autogen_core.model_context._buffered_chat_completion_context 源代码
from typing import List
from pydantic import BaseModel
from typing_extensions import Self
from .._component_config import Component
from ..models import FunctionExecutionResultMessage, LLMMessage
from ._chat_completion_context import ChatCompletionContext
class BufferedChatCompletionContextConfig(BaseModel):
buffer_size: int
initial_messages: List[LLMMessage] | None = None
[文档]
class BufferedChatCompletionContext(ChatCompletionContext, Component[BufferedChatCompletionContextConfig]):
"""一个缓冲的聊天完成上下文,保留最近 n 条消息的视图,
其中 n 是缓冲区大小。缓冲区大小在初始化时设置。
Args:
buffer_size (int): 缓冲区大小。
initial_messages (List[LLMMessage] | None): 初始消息。
"""
component_config_schema = BufferedChatCompletionContextConfig
component_provider_override = "autogen_core.model_context.BufferedChatCompletionContext"
def __init__(self, buffer_size: int, initial_messages: List[LLMMessage] | None = None) -> None:
super().__init__(initial_messages)
if buffer_size <= 0:
raise ValueError("buffer_size must be greater than 0.")
self._buffer_size = buffer_size
[文档]
async def get_messages(self) -> List[LLMMessage]:
"""获取最多 `buffer_size` 条最近的消息。"""
messages = self._messages[-self._buffer_size :]
# Handle the first message is a function call result message.
if messages and isinstance(messages[0], FunctionExecutionResultMessage):
# Remove the first message from the list.
messages = messages[1:]
return messages
[文档]
def _to_config(self) -> BufferedChatCompletionContextConfig:
return BufferedChatCompletionContextConfig(
buffer_size=self._buffer_size, initial_messages=self._initial_messages
)
[文档]
@classmethod
def _from_config(cls, config: BufferedChatCompletionContextConfig) -> Self:
return cls(**config.model_dump())