|
25 | 25 | TEmbedding = TypeVar("TEmbedding") |
26 | 26 | TChatResponse = TypeVar("TChatResponse", bound="ChatResponse") |
27 | 27 | TChatToolMode = TypeVar("TChatToolMode", bound="ChatToolMode") |
| 28 | +TAgentRunResponse = TypeVar("TAgentRunResponse", bound="AgentRunResponse") |
28 | 29 |
|
29 | 30 | CreatedAtT = str # Use a datetimeoffset type? Or a more specific type like datetime.datetime? |
30 | 31 |
|
@@ -152,7 +153,9 @@ def __iadd__(self, other: "UsageDetails | None") -> Self: |
152 | 153 | return self |
153 | 154 |
|
154 | 155 |
|
155 | | -def _process_update(response: "ChatResponse", update: "ChatResponseUpdate") -> None: |
| 156 | +def _process_update( |
| 157 | + response: "ChatResponse | AgentRunResponse", update: "ChatResponseUpdate | AgentRunResponseUpdate" |
| 158 | +) -> None: |
156 | 159 | """Processes a single update and modifies the response in place.""" |
157 | 160 | is_new_message = False |
158 | 161 | if not response.messages or (update.message_id and response.messages[-1].message_id != update.message_id): |
@@ -189,19 +192,21 @@ def _process_update(response: "ChatResponse", update: "ChatResponseUpdate") -> N |
189 | 192 | # Incorporate the update's properties into the response. |
190 | 193 | if update.response_id: |
191 | 194 | response.response_id = update.response_id |
192 | | - if update.conversation_id is not None: |
193 | | - response.conversation_id = update.conversation_id |
194 | 195 | if update.created_at is not None: |
195 | 196 | response.created_at = update.created_at |
196 | | - if update.finish_reason is not None: |
197 | | - response.finish_reason = update.finish_reason |
198 | | - if update.ai_model_id is not None: |
199 | | - response.ai_model_id = update.ai_model_id |
200 | 197 | if update.additional_properties is not None: |
201 | 198 | if response.additional_properties is None: |
202 | 199 | response.additional_properties = {} |
203 | 200 | response.additional_properties.update(update.additional_properties) |
204 | 201 |
|
| 202 | + if isinstance(response, ChatResponse) and isinstance(update, ChatResponseUpdate): |
| 203 | + if update.conversation_id is not None: |
| 204 | + response.conversation_id = update.conversation_id |
| 205 | + if update.finish_reason is not None: |
| 206 | + response.finish_reason = update.finish_reason |
| 207 | + if update.ai_model_id is not None: |
| 208 | + response.ai_model_id = update.ai_model_id |
| 209 | + |
205 | 210 |
|
206 | 211 | def _coalesce_text_content( |
207 | 212 | contents: list["AIContents"], type_: type["TextContent"] | type["TextReasoningContent"] |
@@ -235,8 +240,8 @@ def _coalesce_text_content( |
235 | 240 | contents.extend(coalesced_contents) |
236 | 241 |
|
237 | 242 |
|
238 | | -def _finalize_response(response: "ChatResponse") -> None: |
239 | | - """Finalizes the chat response by performing any necessary post-processing.""" |
| 243 | +def _finalize_response(response: "ChatResponse | AgentRunResponse") -> None: |
| 244 | + """Finalizes the response by performing any necessary post-processing.""" |
240 | 245 | for msg in response.messages: |
241 | 246 | _coalesce_text_content(msg.contents, TextContent) |
242 | 247 | _coalesce_text_content(msg.contents, TextReasoningContent) |
@@ -1554,6 +1559,110 @@ def __iadd__(self, values: Iterable[TEmbedding] | Self) -> Self: |
1554 | 1559 | return self |
1555 | 1560 |
|
1556 | 1561 |
|
| 1562 | +# region AgentRunResponse |
| 1563 | + |
| 1564 | + |
| 1565 | +class AgentRunResponse(AFBaseModel): |
| 1566 | + """Represents the response to an Agent run request. |
| 1567 | +
|
| 1568 | + Provides one or more response messages and metadata about the response. |
| 1569 | + A typical response will contain a single message, but may contain multiple |
| 1570 | + messages in scenarios involving function calls, RAG retrievals, or complex logic. |
| 1571 | + """ |
| 1572 | + |
| 1573 | + messages: list[ChatMessage] = Field(default_factory=list[ChatMessage]) |
| 1574 | + response_id: str | None = None |
| 1575 | + created_at: CreatedAtT | None = None # use a datetimeoffset type? |
| 1576 | + usage_details: UsageDetails | None = None |
| 1577 | + raw_representation: Any | None = None |
| 1578 | + additional_properties: dict[str, Any] | None = None |
| 1579 | + |
| 1580 | + def __init__( |
| 1581 | + self, |
| 1582 | + messages: ChatMessage | list[ChatMessage] | None = None, |
| 1583 | + response_id: str | None = None, |
| 1584 | + created_at: CreatedAtT | None = None, |
| 1585 | + usage_details: UsageDetails | None = None, |
| 1586 | + raw_representation: Any | None = None, |
| 1587 | + additional_properties: dict[str, Any] | None = None, |
| 1588 | + **kwargs: Any, |
| 1589 | + ) -> None: |
| 1590 | + """Initialize an AgentRunResponse. |
| 1591 | +
|
| 1592 | + Attributes: |
| 1593 | + messages: The list of chat messages in the response. |
| 1594 | + response_id: The ID of the chat response. |
| 1595 | + created_at: A timestamp for the chat response. |
| 1596 | + usage_details: The usage details for the chat response. |
| 1597 | + additional_properties: Any additional properties associated with the chat response. |
| 1598 | + raw_representation: The raw representation of the chat response from an underlying implementation. |
| 1599 | + **kwargs: Additional properties to set on the response. |
| 1600 | + """ |
| 1601 | + processed_messages: list[ChatMessage] = [] |
| 1602 | + if messages is not None: |
| 1603 | + if isinstance(messages, ChatMessage): |
| 1604 | + processed_messages.append(messages) |
| 1605 | + elif isinstance(messages, list): |
| 1606 | + processed_messages.extend(messages) |
| 1607 | + |
| 1608 | + super().__init__( |
| 1609 | + messages=processed_messages, # type: ignore[reportCallIssue] |
| 1610 | + response_id=response_id, # type: ignore[reportCallIssue] |
| 1611 | + created_at=created_at, # type: ignore[reportCallIssue] |
| 1612 | + usage_details=usage_details, # type: ignore[reportCallIssue] |
| 1613 | + additional_properties=additional_properties, # type: ignore[reportCallIssue] |
| 1614 | + raw_representation=raw_representation, # type: ignore[reportCallIssue] |
| 1615 | + **kwargs, |
| 1616 | + ) |
| 1617 | + |
| 1618 | + @property |
| 1619 | + def text(self) -> str: |
| 1620 | + """Get the concatenated text of all messages.""" |
| 1621 | + return "".join(msg.text for msg in self.messages) if self.messages else "" |
| 1622 | + |
| 1623 | + @classmethod |
| 1624 | + def from_agent_run_response_updates( |
| 1625 | + cls: type[TAgentRunResponse], updates: Sequence["AgentRunResponseUpdate"] |
| 1626 | + ) -> TAgentRunResponse: |
| 1627 | + """Joins multiple updates into a single AgentRunResponse.""" |
| 1628 | + msg = cls(messages=[]) |
| 1629 | + for update in updates: |
| 1630 | + _process_update(msg, update) |
| 1631 | + _finalize_response(msg) |
| 1632 | + return msg |
| 1633 | + |
| 1634 | + def __str__(self) -> str: |
| 1635 | + return self.text |
| 1636 | + |
| 1637 | + |
| 1638 | +# region AgentRunResponseUpdate |
| 1639 | + |
| 1640 | + |
| 1641 | +class AgentRunResponseUpdate(AFBaseModel): |
| 1642 | + """Represents a single streaming response chunk from an Agent.""" |
| 1643 | + |
| 1644 | + contents: list[AIContents] = Field(default_factory=list[AIContents]) |
| 1645 | + role: ChatRole | None = None |
| 1646 | + author_name: str | None = None |
| 1647 | + response_id: str | None = None |
| 1648 | + message_id: str | None = None |
| 1649 | + created_at: CreatedAtT | None = None # use a datetimeoffset type? |
| 1650 | + additional_properties: dict[str, Any] | None = None |
| 1651 | + raw_representation: Any | None = None |
| 1652 | + |
| 1653 | + @property |
| 1654 | + def text(self) -> str: |
| 1655 | + """Get the concatenated text of all TextContent objects in contents.""" |
| 1656 | + return ( |
| 1657 | + "".join(content.text for content in self.contents if isinstance(content, TextContent)) |
| 1658 | + if self.contents |
| 1659 | + else "" |
| 1660 | + ) |
| 1661 | + |
| 1662 | + def __str__(self) -> str: |
| 1663 | + return self.text |
| 1664 | + |
| 1665 | + |
1557 | 1666 | # region: SpeechToTextOptions |
1558 | 1667 |
|
1559 | 1668 |
|
|
0 commit comments