from typing import Any, Optional
from autogen_core import CancellationToken
from autogen_core.memory import (
Memory,
MemoryContent,
MemoryMimeType,
MemoryQueryResult,
UpdateContextResult,
)
from autogen_core.model_context import ChatCompletionContext
from autogen_core.models import SystemMessage
from ._canvas_writer import ApplyPatchTool, UpdateFileTool
from ._text_canvas import TextCanvas
[文档]
class TextCanvasMemory(Memory):
"""
一种使用 Canvas 存储类文件内容的内存实现。
在每次交互时将画布的当前状态插入到 ChatCompletionContext 中。
.. warning::
这是一个实验性 API,未来可能会发生变化。
TextCanvasMemory 提供了一种持久的、类文件的存储机制,可供代理用于读写内容。
它会在每次推理前自动将画布中所有文件的当前状态注入模型上下文。
该功能特别适用于:
- 允许代理在多轮交互中创建和修改文档
- 实现多个代理之间的协作文档编辑
- 在对话轮次间保持持久状态
- 处理过大而无法放入单条消息的内容
画布提供以下工具:
- 使用新内容创建或更新文件
- 对现有文件应用补丁(统一差异格式)
示例:
**示例:在 AssistantAgent 中使用 TextCanvasMemory**
以下示例演示如何创建 TextCanvasMemory 并与 AssistantAgent 配合使用,
来编写和更新故事文件。
.. code-block:: python
import asyncio
from autogen_core import CancellationToken
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.messages import TextMessage
from autogen_ext.memory.canvas import TextCanvasMemory
async def main():
# Create a model client
model_client = OpenAIChatCompletionClient(
model="gpt-4o",
# api_key = "your_openai_api_key"
)
# Create the canvas memory
text_canvas_memory = TextCanvasMemory()
# Get tools for working with the canvas
update_file_tool = text_canvas_memory.get_update_file_tool()
apply_patch_tool = text_canvas_memory.get_apply_patch_tool()
# Create an agent with the canvas memory and tools
writer_agent = AssistantAgent(
name="Writer",
model_client=model_client,
description="A writer agent that creates and updates stories.",
system_message='''
You are a Writer Agent. Your focus is to generate a story based on the user's request.
Instructions for using the canvas:
- The story should be stored on the canvas in a file named "story.md".
- If "story.md" does not exist, create it by calling the 'update_file' tool.
- If "story.md" already exists, generate a unified diff (patch) from the current
content to the new version, and call the 'apply_patch' tool to apply the changes.
IMPORTANT: Do not include the full story text in your chat messages.
Only write the story content to the canvas using the tools.
''',
tools=[update_file_tool, apply_patch_tool],
memory=[text_canvas_memory],
)
# Send a message to the agent
await writer_agent.on_messages(
[TextMessage(content="Write a short story about a bunny and a sunflower.", source="user")],
CancellationToken(),
)
# Retrieve the content from the canvas
story_content = text_canvas_memory.canvas.get_latest_content("story.md")
print("Story content from canvas:")
print(story_content)
if __name__ == "__main__":
asyncio.run(main())
**示例:在多个代理中使用 TextCanvasMemory**
以下示例展示如何在多个代理协作处理同一文档时使用 TextCanvasMemory。
.. code-block:: python
import asyncio
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.teams import RoundRobinGroupChat
from autogen_agentchat.conditions import TextMentionTermination
from autogen_ext.memory.canvas import TextCanvasMemory
async def main():
# Create a model client
model_client = OpenAIChatCompletionClient(
model="gpt-4o",
# api_key = "your_openai_api_key"
)
# Create the shared canvas memory
text_canvas_memory = TextCanvasMemory()
update_file_tool = text_canvas_memory.get_update_file_tool()
apply_patch_tool = text_canvas_memory.get_apply_patch_tool()
# Create a writer agent
writer_agent = AssistantAgent(
name="Writer",
model_client=model_client,
description="A writer agent that creates stories.",
system_message="You write children's stories on the canvas in story.md.",
tools=[update_file_tool, apply_patch_tool],
memory=[text_canvas_memory],
)
# Create a critique agent
critique_agent = AssistantAgent(
name="Critique",
model_client=model_client,
description="A critique agent that provides feedback on stories.",
system_message="You review the story.md file and provide constructive feedback.",
memory=[text_canvas_memory],
)
# Create a team with both agents
team = RoundRobinGroupChat(
participants=[writer_agent, critique_agent],
termination_condition=TextMentionTermination("TERMINATE"),
max_turns=10,
)
# Run the team on a task
await team.run(task="Create a children's book about a bunny and a sunflower")
# Get the final story
story = text_canvas_memory.canvas.get_latest_content("story.md")
print(story)
if __name__ == "__main__":
asyncio.run(main())
"""
def __init__(self, canvas: Optional[TextCanvas] = None):
super().__init__()
self.canvas = canvas if canvas is not None else TextCanvas()
[文档]
async def update_context(self, model_context: ChatCompletionContext) -> UpdateContextResult:
"""
注入整个画布摘要(或选定子集)作为参考数据。
这里我们只是将其放入系统消息中,但你可以自定义处理方式。
"""
snapshot = self.canvas.get_all_contents_for_context()
if snapshot.strip():
msg = SystemMessage(content=snapshot)
await model_context.add_message(msg)
# Return it for debugging/logging
memory_content = MemoryContent(content=snapshot, mime_type=MemoryMimeType.TEXT)
return UpdateContextResult(memories=MemoryQueryResult(results=[memory_content]))
return UpdateContextResult(memories=MemoryQueryResult(results=[]))
[文档]
async def query(
self, query: str | MemoryContent, cancellation_token: Optional[CancellationToken] = None, **kwargs: Any
) -> MemoryQueryResult:
"""
可能搜索匹配的文件名或文件内容。
此示例返回空结果。
"""
return MemoryQueryResult(results=[])
[文档]
async def add(self, content: MemoryContent, cancellation_token: Optional[CancellationToken] = None) -> None:
"""
示例用法:可能将内容解释为补丁或直接文件更新。
也可以通过专门的"CanvasTool"来完成。
"""
# NO-OP here, leaving actual changes to the CanvasTool
pass
[文档]
async def clear(self) -> None:
"""通过替换为新的空实例来清除整个画布。"""
# Create a new TextCanvas instance instead of calling __init__ directly
self.canvas = TextCanvas()
[文档]
async def close(self) -> None:
pass
[文档]
def get_update_file_tool(self) -> UpdateFileTool:
"""
返回一个与此内存画布配合使用的UpdateFileTool实例。
"""
return UpdateFileTool(self.canvas)
[文档]
def get_apply_patch_tool(self) -> ApplyPatchTool:
"""
返回一个适用于此内存画布的 ApplyPatchTool 实例。
"""
return ApplyPatchTool(self.canvas)