autogen_core.model_context._chat_completion_context 源代码

from abc import ABC, abstractmethod
from typing import Any, List, Mapping

from pydantic import BaseModel, Field

from .._component_config import ComponentBase
from ..models import LLMMessage


[文档] class ChatCompletionContext(ABC, ComponentBase[BaseModel]): """定义聊天完成上下文接口的抽象基类。 聊天完成上下文允许代理存储和检索LLM消息。 可以通过不同的召回策略实现。 Args: initial_messages (List[LLMMessage] | None): 初始消息列表。 Example: 创建一个自定义模型上下文,用于过滤掉AssistantMessage中的thought字段。 这对于像DeepSeek R1这样的推理模型很有用,因为这些模型会产生 非常长的thought内容,而这些内容在后续完成中并不需要。 .. code-block:: python from typing import List from autogen_core.model_context import UnboundedChatCompletionContext from autogen_core.models import AssistantMessage, LLMMessage class ReasoningModelContext(UnboundedChatCompletionContext): \"\"\"用于推理模型的模型上下文。\"\"\" async def get_messages(self) -> List[LLMMessage]: messages = await super().get_messages() # 从AssistantMessage中过滤掉thought字段 messages_out: List[LLMMessage] = [] for message in messages: if isinstance(message, AssistantMessage): message.thought = None messages_out.append(message) return messages_out """ component_type = "chat_completion_context" def __init__(self, initial_messages: List[LLMMessage] | None = None) -> None: self._messages: List[LLMMessage] = [] if initial_messages is not None: self._messages.extend(initial_messages) self._initial_messages = initial_messages
[文档] async def add_message(self, message: LLMMessage) -> None: """向上下文中添加消息。""" self._messages.append(message)
[文档] @abstractmethod async def get_messages(self) -> List[LLMMessage]: ...
[文档] async def clear(self) -> None: """清空上下文。""" self._messages = []
[文档] async def save_state(self) -> Mapping[str, Any]: return ChatCompletionContextState(messages=self._messages).model_dump()
[文档] async def load_state(self, state: Mapping[str, Any]) -> None: self._messages = ChatCompletionContextState.model_validate(state).messages
[文档] class ChatCompletionContextState(BaseModel): messages: List[LLMMessage] = Field(default_factory=list)