autogen_core.logging#

class AgentConstructionExceptionEvent(*, agent_id: AgentId, exception: BaseException, **kwargs: Any)[源代码]#

基类:object

class DeliveryStage(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)[源代码]#

基类:Enum

DELIVER = 2#
SEND = 1#
class LLMCallEvent(*, messages: List[Dict[str, Any]], response: Dict[str, Any], prompt_tokens: int, completion_tokens: int, **kwargs: Any)[源代码]#

基类:object

property completion_tokens: int#
property prompt_tokens: int#
class LLMStreamEndEvent(*, response: Dict[str, Any], prompt_tokens: int, completion_tokens: int, **kwargs: Any)[源代码]#

基类:object

property completion_tokens: int#
property prompt_tokens: int#
class LLMStreamStartEvent(*, messages: List[Dict[str, Any]], **kwargs: Any)[源代码]#

基类:object

供模型客户端用于记录流式调用的开始。

参数:

messages (List[Dict[str, Any]]) -- 调用中使用的消息列表。必须可被JSON序列化。

示例

import logging
from autogen_core import EVENT_LOGGER_NAME
from autogen_core.logging import LLMStreamStartEvent

messages = [{"role": "user", "content": "Hello, world!"}]
logger = logging.getLogger(EVENT_LOGGER_NAME)
logger.info(LLMStreamStartEvent(messages=messages))
class MessageDroppedEvent(*, payload: str, sender: AgentId | None, receiver: AgentId | TopicId | None, kind: MessageKind, **kwargs: Any)[源代码]#

基类:object

class MessageEvent(*, payload: str, sender: AgentId | None, receiver: AgentId | TopicId | None, kind: MessageKind, delivery_stage: DeliveryStage, **kwargs: Any)[源代码]#

基类:object

class MessageHandlerExceptionEvent(*, payload: str, handling_agent: AgentId, exception: BaseException, **kwargs: Any)[源代码]#

基类:object

class MessageKind(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)[源代码]#

基类:Enum

DIRECT = 1#
PUBLISH = 2#
RESPOND = 3#
class ToolCallEvent(*, tool_name: str, arguments: Dict[str, Any], result: str)[源代码]#

基类:object