diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_society_of_mind_agent.py b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_society_of_mind_agent.py index 713b518bd7d..c43c472c291 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_society_of_mind_agent.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_society_of_mind_agent.py @@ -1,7 +1,9 @@ from typing import Any, AsyncGenerator, List, Mapping, Sequence -from autogen_core import CancellationToken +from autogen_core import CancellationToken, Component, ComponentModel from autogen_core.models import ChatCompletionClient, LLMMessage, SystemMessage, UserMessage +from pydantic import BaseModel +from typing_extensions import Self from autogen_agentchat.base import Response from autogen_agentchat.state import SocietyOfMindAgentState @@ -16,7 +18,18 @@ from ._base_chat_agent import BaseChatAgent -class SocietyOfMindAgent(BaseChatAgent): +class SocietyOfMindAgentConfig(BaseModel): + """The declarative configuration for a SocietyOfMindAgent.""" + + name: str + team: ComponentModel + model_client: ComponentModel + description: str + instruction: str + response_prompt: str + + +class SocietyOfMindAgent(BaseChatAgent, Component[SocietyOfMindAgentConfig]): """An agent that uses an inner team of agents to generate responses. Each time the agent's :meth:`on_messages` or :meth:`on_messages_stream` @@ -74,6 +87,9 @@ async def main() -> None: asyncio.run(main()) """ + component_config_schema = SocietyOfMindAgentConfig + component_provider_override = "autogen_agentchat.agents.SocietyOfMindAgent" + DEFAULT_INSTRUCTION = "Earlier you were asked to fulfill a request. You and your team worked diligently to address that request. Here is a transcript of that conversation:" """str: The default instruction to use when generating a response using the inner team's messages. The instruction will be prepended to the inner team's @@ -173,3 +189,26 @@ async def save_state(self) -> Mapping[str, Any]: async def load_state(self, state: Mapping[str, Any]) -> None: society_of_mind_state = SocietyOfMindAgentState.model_validate(state) await self._team.load_state(society_of_mind_state.inner_team_state) + + def _to_config(self) -> SocietyOfMindAgentConfig: + return SocietyOfMindAgentConfig( + name=self.name, + team=self._team.dump_component(), + model_client=self._model_client.dump_component(), + description=self.description, + instruction=self._instruction, + response_prompt=self._response_prompt, + ) + + @classmethod + def _from_config(cls, config: SocietyOfMindAgentConfig) -> Self: + model_client = ChatCompletionClient.load_component(config.model_client) + team = Team.load_component(config.team) + return cls( + name=config.name, + team=team, + model_client=model_client, + description=config.description, + instruction=config.instruction, + response_prompt=config.response_prompt, + ) diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/base/_chat_agent.py b/python/packages/autogen-agentchat/src/autogen_agentchat/base/_chat_agent.py index 36a80efe019..34f0f37ee6e 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/base/_chat_agent.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/base/_chat_agent.py @@ -2,7 +2,8 @@ from dataclasses import dataclass from typing import Any, AsyncGenerator, Mapping, Sequence -from autogen_core import CancellationToken +from autogen_core import CancellationToken, ComponentBase +from pydantic import BaseModel from ..messages import AgentEvent, ChatMessage from ._task import TaskRunner @@ -20,9 +21,11 @@ class Response: or :class:`ChatMessage`.""" -class ChatAgent(ABC, TaskRunner): +class ChatAgent(ABC, TaskRunner, ComponentBase[BaseModel]): """Protocol for a chat agent.""" + component_type = "agent" + @property @abstractmethod def name(self) -> str: diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/base/_team.py b/python/packages/autogen-agentchat/src/autogen_agentchat/base/_team.py index 565ad225b86..0d25edf6d26 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/base/_team.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/base/_team.py @@ -1,9 +1,15 @@ -from typing import Any, Mapping from abc import ABC, abstractmethod +from typing import Any, Mapping + +from autogen_core import ComponentBase +from pydantic import BaseModel + from ._task import TaskRunner -class Team(ABC, TaskRunner): +class Team(ABC, TaskRunner, ComponentBase[BaseModel]): + component_type = "team" + @abstractmethod async def reset(self) -> None: """Reset the team and all its participants to its initial state.""" diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_base_group_chat.py b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_base_group_chat.py index d27865e0a4a..61e3783a80e 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_base_group_chat.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_base_group_chat.py @@ -11,11 +11,13 @@ AgentType, CancellationToken, ClosureAgent, + ComponentBase, MessageContext, SingleThreadedAgentRuntime, TypeSubscription, ) from autogen_core._closure_agent import ClosureContext +from pydantic import BaseModel from ... import EVENT_LOGGER_NAME from ...base import ChatAgent, TaskResult, Team, TerminationCondition @@ -28,13 +30,15 @@ event_logger = logging.getLogger(EVENT_LOGGER_NAME) -class BaseGroupChat(Team, ABC): +class BaseGroupChat(Team, ABC, ComponentBase[BaseModel]): """The base class for group chat teams. To implement a group chat team, first create a subclass of :class:`BaseGroupChatManager` and then create a subclass of :class:`BaseGroupChat` that uses the group chat manager. """ + component_type = "team" + def __init__( self, participants: List[ChatAgent], diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_magentic_one_group_chat.py b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_magentic_one_group_chat.py index f09904fb332..5ce7a71ebe2 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_magentic_one_group_chat.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_magentic_one/_magentic_one_group_chat.py @@ -1,7 +1,10 @@ import logging from typing import Callable, List +from autogen_core import Component, ComponentModel from autogen_core.models import ChatCompletionClient +from pydantic import BaseModel +from typing_extensions import Self from .... import EVENT_LOGGER_NAME, TRACE_LOGGER_NAME from ....base import ChatAgent, TerminationCondition @@ -13,7 +16,18 @@ event_logger = logging.getLogger(EVENT_LOGGER_NAME) -class MagenticOneGroupChat(BaseGroupChat): +class MagenticOneGroupChatConfig(BaseModel): + """The declarative configuration for a MagenticOneGroupChat.""" + + participants: List[ComponentModel] + model_client: ComponentModel + termination_condition: ComponentModel | None = None + max_turns: int | None = None + max_stalls: int + final_answer_prompt: str + + +class MagenticOneGroupChat(BaseGroupChat, Component[MagenticOneGroupChatConfig]): """A team that runs a group chat with participants managed by the MagenticOneOrchestrator. The orchestrator handles the conversation flow, ensuring that the task is completed @@ -73,6 +87,9 @@ async def main() -> None: } """ + component_config_schema = MagenticOneGroupChatConfig + component_provider_override = "autogen_agentchat.teams.MagenticOneGroupChat" + def __init__( self, participants: List[ChatAgent], @@ -117,3 +134,31 @@ def _create_group_chat_manager_factory( self._final_answer_prompt, termination_condition, ) + + def _to_config(self) -> MagenticOneGroupChatConfig: + participants = [participant.dump_component() for participant in self._participants] + termination_condition = self._termination_condition.dump_component() if self._termination_condition else None + return MagenticOneGroupChatConfig( + participants=participants, + model_client=self._model_client.dump_component(), + termination_condition=termination_condition, + max_turns=self._max_turns, + max_stalls=self._max_stalls, + final_answer_prompt=self._final_answer_prompt, + ) + + @classmethod + def _from_config(cls, config: MagenticOneGroupChatConfig) -> Self: + participants = [ChatAgent.load_component(participant) for participant in config.participants] + model_client = ChatCompletionClient.load_component(config.model_client) + termination_condition = ( + TerminationCondition.load_component(config.termination_condition) if config.termination_condition else None + ) + return cls( + participants, + model_client, + termination_condition=termination_condition, + max_turns=config.max_turns, + max_stalls=config.max_stalls, + final_answer_prompt=config.final_answer_prompt, + ) diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_round_robin_group_chat.py b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_round_robin_group_chat.py index d6901f04c98..c7d34f0b332 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_round_robin_group_chat.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_round_robin_group_chat.py @@ -1,5 +1,9 @@ from typing import Any, Callable, List, Mapping +from autogen_core import Component, ComponentModel +from pydantic import BaseModel +from typing_extensions import Self + from ...base import ChatAgent, TerminationCondition from ...messages import AgentEvent, ChatMessage from ...state import RoundRobinManagerState @@ -61,7 +65,15 @@ async def select_speaker(self, thread: List[AgentEvent | ChatMessage]) -> str: return current_speaker -class RoundRobinGroupChat(BaseGroupChat): +class RoundRobinGroupChatConfig(BaseModel): + """The declarative configuration RoundRobinGroupChat.""" + + participants: List[ComponentModel] + termination_condition: ComponentModel | None = None + max_turns: int | None = None + + +class RoundRobinGroupChat(BaseGroupChat, Component[RoundRobinGroupChatConfig]): """A team that runs a group chat with participants taking turns in a round-robin fashion to publish a message to all. @@ -133,6 +145,9 @@ async def main() -> None: asyncio.run(main()) """ + component_config_schema = RoundRobinGroupChatConfig + component_provider_override = "autogen_agentchat.teams.RoundRobinGroupChat" + def __init__( self, participants: List[ChatAgent], @@ -166,3 +181,20 @@ def _factory() -> RoundRobinGroupChatManager: ) return _factory + + def _to_config(self) -> RoundRobinGroupChatConfig: + participants = [participant.dump_component() for participant in self._participants] + termination_condition = self._termination_condition.dump_component() if self._termination_condition else None + return RoundRobinGroupChatConfig( + participants=participants, + termination_condition=termination_condition, + max_turns=self._max_turns, + ) + + @classmethod + def _from_config(cls, config: RoundRobinGroupChatConfig) -> Self: + participants = [ChatAgent.load_component(participant) for participant in config.participants] + termination_condition = ( + TerminationCondition.load_component(config.termination_condition) if config.termination_condition else None + ) + return cls(participants, termination_condition=termination_condition, max_turns=config.max_turns) diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_selector_group_chat.py b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_selector_group_chat.py index dcc399ee297..2cc3c02a645 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_selector_group_chat.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_selector_group_chat.py @@ -2,9 +2,13 @@ import re from typing import Any, Callable, Dict, List, Mapping, Sequence +from autogen_core import Component, ComponentModel from autogen_core.models import ChatCompletionClient, SystemMessage +from pydantic import BaseModel +from typing_extensions import Self from ... import TRACE_LOGGER_NAME +from ...agents import BaseChatAgent from ...base import ChatAgent, TerminationCondition from ...messages import ( AgentEvent, @@ -184,7 +188,19 @@ def _mentioned_agents(self, message_content: str, agent_names: List[str]) -> Dic return mentions -class SelectorGroupChat(BaseGroupChat): +class SelectorGroupChatConfig(BaseModel): + """The declarative configuration for SelectorGroupChat.""" + + participants: List[ComponentModel] + model_client: ComponentModel + termination_condition: ComponentModel | None = None + max_turns: int | None = None + selector_prompt: str + allow_repeated_speaker: bool + # selector_func: ComponentModel | None + + +class SelectorGroupChat(BaseGroupChat, Component[SelectorGroupChatConfig]): """A group chat team that have participants takes turn to publish a message to all, using a ChatCompletion model to select the next speaker after each message. @@ -321,6 +337,9 @@ def selector_func(messages: Sequence[AgentEvent | ChatMessage]) -> str | None: asyncio.run(main()) """ + component_config_schema = SelectorGroupChatConfig + component_provider_override = "autogen_agentchat.teams.SelectorGroupChat" + def __init__( self, participants: List[ChatAgent], @@ -381,3 +400,30 @@ def _create_group_chat_manager_factory( self._allow_repeated_speaker, self._selector_func, ) + + def _to_config(self) -> SelectorGroupChatConfig: + return SelectorGroupChatConfig( + participants=[participant.dump_component() for participant in self._participants], + model_client=self._model_client.dump_component(), + termination_condition=self._termination_condition.dump_component() if self._termination_condition else None, + max_turns=self._max_turns, + selector_prompt=self._selector_prompt, + allow_repeated_speaker=self._allow_repeated_speaker, + # selector_func=self._selector_func.dump_component() if self._selector_func else None, + ) + + @classmethod + def _from_config(cls, config: SelectorGroupChatConfig) -> Self: + return cls( + participants=[BaseChatAgent.load_component(participant) for participant in config.participants], + model_client=ChatCompletionClient.load_component(config.model_client), + termination_condition=TerminationCondition.load_component(config.termination_condition) + if config.termination_condition + else None, + max_turns=config.max_turns, + selector_prompt=config.selector_prompt, + allow_repeated_speaker=config.allow_repeated_speaker, + # selector_func=ComponentLoader.load_component(config.selector_func, Callable[[Sequence[AgentEvent | ChatMessage]], str | None]) + # if config.selector_func + # else None, + ) diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_swarm_group_chat.py b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_swarm_group_chat.py index a31a693e086..0ca02420537 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_swarm_group_chat.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/teams/_group_chat/_swarm_group_chat.py @@ -1,5 +1,8 @@ from typing import Any, Callable, List, Mapping +from autogen_core import Component, ComponentModel +from pydantic import BaseModel + from ...base import ChatAgent, TerminationCondition from ...messages import AgentEvent, ChatMessage, HandoffMessage from ...state import SwarmManagerState @@ -92,7 +95,15 @@ async def load_state(self, state: Mapping[str, Any]) -> None: self._current_speaker = swarm_state.current_speaker -class Swarm(BaseGroupChat): +class SwarmConfig(BaseModel): + """The declarative configuration for Swarm.""" + + participants: List[ComponentModel] + termination_condition: ComponentModel | None = None + max_turns: int | None = None + + +class Swarm(BaseGroupChat, Component[SwarmConfig]): """A group chat team that selects the next speaker based on handoff message only. The first participant in the list of participants is the initial speaker. @@ -180,6 +191,9 @@ async def main() -> None: asyncio.run(main()) """ + component_config_schema = SwarmConfig + component_provider_override = "autogen_agentchat.teams.Swarm" + def __init__( self, participants: List[ChatAgent], @@ -217,3 +231,20 @@ def _factory() -> SwarmGroupChatManager: ) return _factory + + def _to_config(self) -> SwarmConfig: + participants = [participant.dump_component() for participant in self._participants] + termination_condition = self._termination_condition.dump_component() if self._termination_condition else None + return SwarmConfig( + participants=participants, + termination_condition=termination_condition, + max_turns=self._max_turns, + ) + + @classmethod + def _from_config(cls, config: SwarmConfig) -> "Swarm": + participants = [ChatAgent.load_component(participant) for participant in config.participants] + termination_condition = ( + TerminationCondition.load_component(config.termination_condition) if config.termination_condition else None + ) + return cls(participants, termination_condition=termination_condition, max_turns=config.max_turns) diff --git a/python/packages/autogen-agentchat/tests/test_group_chat.py b/python/packages/autogen-agentchat/tests/test_group_chat.py index 3d51e1b58a4..c04d7344029 100644 --- a/python/packages/autogen-agentchat/tests/test_group_chat.py +++ b/python/packages/autogen-agentchat/tests/test_group_chat.py @@ -24,11 +24,7 @@ ToolCallRequestEvent, ToolCallSummaryMessage, ) -from autogen_agentchat.teams import ( - RoundRobinGroupChat, - SelectorGroupChat, - Swarm, -) +from autogen_agentchat.teams import MagenticOneGroupChat, RoundRobinGroupChat, SelectorGroupChat, Swarm from autogen_agentchat.teams._group_chat._round_robin_group_chat import RoundRobinGroupChatManager from autogen_agentchat.teams._group_chat._selector_group_chat import SelectorGroupChatManager from autogen_agentchat.teams._group_chat._swarm_group_chat import SwarmGroupChatManager @@ -1219,3 +1215,65 @@ async def test_round_robin_group_chat_with_message_list() -> None: # Test with empty message list with pytest.raises(ValueError, match="Task list cannot be empty"): await team.run(task=[]) + + +@pytest.mark.asyncio +async def test_declarative_groupchats_with_config() -> None: + # Create basic agents and components for testing + agent1 = AssistantAgent( + "agent_1", + model_client=OpenAIChatCompletionClient(model="gpt-4o-2024-05-13", api_key=""), + handoffs=["agent_2"], + ) + agent2 = AssistantAgent("agent_2", model_client=OpenAIChatCompletionClient(model="gpt-4o-2024-05-13", api_key="")) + termination = MaxMessageTermination(4) + model_client = OpenAIChatCompletionClient(model="gpt-4o-2024-05-13", api_key="") + + # Test round robin - verify config is preserved + round_robin = RoundRobinGroupChat(participants=[agent1, agent2], termination_condition=termination, max_turns=5) + config = round_robin.dump_component() + loaded = RoundRobinGroupChat.load_component(config) + assert loaded.dump_component() == config + + # Test selector group chat - verify config is preserved + selector_prompt = "Custom selector prompt with {roles}, {participants}, {history}" + selector = SelectorGroupChat( + participants=[agent1, agent2], + model_client=model_client, + termination_condition=termination, + max_turns=10, + selector_prompt=selector_prompt, + allow_repeated_speaker=True, + ) + selector_config = selector.dump_component() + selector_loaded = SelectorGroupChat.load_component(selector_config) + assert selector_loaded.dump_component() == selector_config + + # Test swarm with handoff termination + handoff_termination = HandoffTermination(target="Agent2") + swarm = Swarm(participants=[agent1, agent2], termination_condition=handoff_termination, max_turns=5) + swarm_config = swarm.dump_component() + swarm_loaded = Swarm.load_component(swarm_config) + assert swarm_loaded.dump_component() == swarm_config + + # Test MagenticOne with custom parameters + magentic = MagenticOneGroupChat( + participants=[agent1], + model_client=model_client, + max_turns=15, + max_stalls=5, + final_answer_prompt="Custom prompt", + ) + magentic_config = magentic.dump_component() + magentic_loaded = MagenticOneGroupChat.load_component(magentic_config) + assert magentic_loaded.dump_component() == magentic_config + + # Verify component types are correctly set for each + for team in [loaded, selector, swarm, magentic]: + assert team.component_type == "team" + + # Verify provider strings are correctly set + assert round_robin.dump_component().provider == "autogen_agentchat.teams.RoundRobinGroupChat" + assert selector.dump_component().provider == "autogen_agentchat.teams.SelectorGroupChat" + assert swarm.dump_component().provider == "autogen_agentchat.teams.Swarm" + assert magentic.dump_component().provider == "autogen_agentchat.teams.MagenticOneGroupChat" diff --git a/python/packages/autogen-agentchat/tests/test_society_of_mind_agent.py b/python/packages/autogen-agentchat/tests/test_society_of_mind_agent.py index 9bf4713d9c4..f71ba67d5d7 100644 --- a/python/packages/autogen-agentchat/tests/test_society_of_mind_agent.py +++ b/python/packages/autogen-agentchat/tests/test_society_of_mind_agent.py @@ -89,3 +89,13 @@ async def test_society_of_mind_agent(monkeypatch: pytest.MonkeyPatch) -> None: await society_of_mind_agent2.load_state(state) state2 = await society_of_mind_agent2.save_state() assert state == state2 + + # Test serialization. + + soc_agent_config = society_of_mind_agent.dump_component() + assert soc_agent_config.provider == "autogen_agentchat.agents.SocietyOfMindAgent" + + # Test deserialization. + loaded_soc_agent = SocietyOfMindAgent.load_component(soc_agent_config) + assert isinstance(loaded_soc_agent, SocietyOfMindAgent) + assert loaded_soc_agent.name == "society_of_mind" diff --git a/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/serialize-components.ipynb b/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/serialize-components.ipynb index ff29efa9100..017fcd54c97 100644 --- a/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/serialize-components.ipynb +++ b/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/serialize-components.ipynb @@ -1,170 +1,224 @@ { - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Serializing Components \n", - "\n", - "AutoGen provides a {py:class}`~autogen_core.Component` configuration class that defines behaviours to serialize/deserialize component into declarative specifications. We can accomplish this by calling `.dump_component()` and `.load_component()` respectively. This is useful for debugging, visualizing, and even for sharing your work with others. In this notebook, we will demonstrate how to serialize multiple components to a declarative specification like a JSON file. \n", - "\n", - "\n", - "```{warning}\n", - "\n", - "ONLY LOAD COMPONENTS FROM TRUSTED SOURCES.\n", - "\n", - "With serilized components, each component implements the logic for how it is serialized and deserialized - i.e., how the declarative specification is generated and how it is converted back to an object. \n", - "\n", - "In some cases, creating an object may include executing code (e.g., a serialized function). ONLY LOAD COMPONENTS FROM TRUSTED SOURCES. \n", - " \n", - "```\n", - "\n", - " \n", - "### Termination Condition Example \n", - "\n", - "In the example below, we will define termination conditions (a part of an agent team) in python, export this to a dictionary/json and also demonstrate how the termination condition object can be loaded from the dictionary/json. \n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ + "cells": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Config: {\"provider\":\"autogen_agentchat.base.OrTerminationCondition\",\"component_type\":\"termination\",\"version\":1,\"component_version\":1,\"description\":null,\"config\":{\"conditions\":[{\"provider\":\"autogen_agentchat.conditions.MaxMessageTermination\",\"component_type\":\"termination\",\"version\":1,\"component_version\":1,\"config\":{\"max_messages\":5}},{\"provider\":\"autogen_agentchat.conditions.StopMessageTermination\",\"component_type\":\"termination\",\"version\":1,\"component_version\":1,\"config\":{}}]}}\n" - ] - } - ], - "source": [ - "from autogen_agentchat.conditions import MaxMessageTermination, StopMessageTermination\n", - "\n", - "max_termination = MaxMessageTermination(5)\n", - "stop_termination = StopMessageTermination()\n", - "\n", - "or_termination = max_termination | stop_termination\n", - "\n", - "or_term_config = or_termination.dump_component()\n", - "print(\"Config: \", or_term_config.model_dump_json())\n", - "\n", - "new_or_termination = or_termination.load_component(or_term_config)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Agent Example \n", - "\n", - "In the example below, we will define an agent in python, export this to a dictionary/json and also demonstrate how the agent object can be loaded from the dictionary/json." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from autogen_agentchat.agents import AssistantAgent, UserProxyAgent\n", - "from autogen_ext.models.openai import OpenAIChatCompletionClient\n", - "\n", - "# Create an agent that uses the OpenAI GPT-4o model.\n", - "model_client = OpenAIChatCompletionClient(\n", - " model=\"gpt-4o\",\n", - " # api_key=\"YOUR_API_KEY\",\n", - ")\n", - "agent = AssistantAgent(\n", - " name=\"assistant\",\n", - " model_client=model_client,\n", - " handoffs=[\"flights_refunder\", \"user\"],\n", - " # tools=[], # serializing tools is not yet supported\n", - " system_message=\"Use tools to solve tasks.\",\n", - ")\n", - "user_proxy = UserProxyAgent(name=\"user\")" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Serializing Components \n", + "\n", + "AutoGen provides a {py:class}`~autogen_core.Component` configuration class that defines behaviours to serialize/deserialize component into declarative specifications. We can accomplish this by calling `.dump_component()` and `.load_component()` respectively. This is useful for debugging, visualizing, and even for sharing your work with others. In this notebook, we will demonstrate how to serialize multiple components to a declarative specification like a JSON file. \n", + "\n", + "\n", + "```{warning}\n", + "\n", + "ONLY LOAD COMPONENTS FROM TRUSTED SOURCES.\n", + "\n", + "With serilized components, each component implements the logic for how it is serialized and deserialized - i.e., how the declarative specification is generated and how it is converted back to an object. \n", + "\n", + "In some cases, creating an object may include executing code (e.g., a serialized function). ONLY LOAD COMPONENTS FROM TRUSTED SOURCES. \n", + " \n", + "```\n", + "\n", + " \n", + "### Termination Condition Example \n", + "\n", + "In the example below, we will define termination conditions (a part of an agent team) in python, export this to a dictionary/json and also demonstrate how the termination condition object can be loaded from the dictionary/json. \n", + " " + ] + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"provider\":\"autogen_agentchat.agents.UserProxyAgent\",\"component_type\":\"agent\",\"version\":1,\"component_version\":1,\"description\":null,\"config\":{\"name\":\"user\",\"description\":\"A human user\"}}\n" - ] - } - ], - "source": [ - "user_proxy_config = user_proxy.dump_component() # dump component\n", - "print(user_proxy_config.model_dump_json())\n", - "up_new = user_proxy.load_component(user_proxy_config) # load component" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Config: {\"provider\":\"autogen_agentchat.base.OrTerminationCondition\",\"component_type\":\"termination\",\"version\":1,\"component_version\":1,\"description\":null,\"config\":{\"conditions\":[{\"provider\":\"autogen_agentchat.conditions.MaxMessageTermination\",\"component_type\":\"termination\",\"version\":1,\"component_version\":1,\"config\":{\"max_messages\":5}},{\"provider\":\"autogen_agentchat.conditions.StopMessageTermination\",\"component_type\":\"termination\",\"version\":1,\"component_version\":1,\"config\":{}}]}}\n" + ] + } + ], + "source": [ + "from autogen_agentchat.conditions import MaxMessageTermination, StopMessageTermination\n", + "\n", + "max_termination = MaxMessageTermination(5)\n", + "stop_termination = StopMessageTermination()\n", + "\n", + "or_termination = max_termination | stop_termination\n", + "\n", + "or_term_config = or_termination.dump_component()\n", + "print(\"Config: \", or_term_config.model_dump_json())\n", + "\n", + "new_or_termination = or_termination.load_component(or_term_config)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Agent Example \n", + "\n", + "In the example below, we will define an agent in python, export this to a dictionary/json and also demonstrate how the agent object can be loaded from the dictionary/json." + ] + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "{\"provider\":\"autogen_agentchat.agents.AssistantAgent\",\"component_type\":\"agent\",\"version\":1,\"component_version\":1,\"description\":null,\"config\":{\"name\":\"assistant\",\"model_client\":{\"provider\":\"autogen_ext.models.openai.OpenAIChatCompletionClient\",\"component_type\":\"model\",\"version\":1,\"component_version\":1,\"config\":{\"model\":\"gpt-4o\"}},\"handoffs\":[{\"target\":\"flights_refunder\",\"description\":\"Handoff to flights_refunder.\",\"name\":\"transfer_to_flights_refunder\",\"message\":\"Transferred to flights_refunder, adopting the role of flights_refunder immediately.\"},{\"target\":\"user\",\"description\":\"Handoff to user.\",\"name\":\"transfer_to_user\",\"message\":\"Transferred to user, adopting the role of user immediately.\"}],\"model_context\":{\"provider\":\"autogen_core.model_context.UnboundedChatCompletionContext\",\"component_type\":\"chat_completion_context\",\"version\":1,\"component_version\":1,\"config\":{}},\"description\":\"An agent that provides assistance with ability to use tools.\",\"system_message\":\"Use tools to solve tasks.\",\"reflect_on_tool_use\":false,\"tool_call_summary_format\":\"{result}\"}}\n" - ] + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "from autogen_agentchat.agents import AssistantAgent, UserProxyAgent\n", + "from autogen_ext.models.openai import OpenAIChatCompletionClient\n", + "\n", + "# Create an agent that uses the OpenAI GPT-4o model.\n", + "model_client = OpenAIChatCompletionClient(\n", + " model=\"gpt-4o\",\n", + " # api_key=\"YOUR_API_KEY\",\n", + ")\n", + "agent = AssistantAgent(\n", + " name=\"assistant\",\n", + " model_client=model_client,\n", + " handoffs=[\"flights_refunder\", \"user\"],\n", + " # tools=[], # serializing tools is not yet supported\n", + " system_message=\"Use tools to solve tasks.\",\n", + ")\n", + "user_proxy = UserProxyAgent(name=\"user\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\"provider\":\"autogen_agentchat.agents.UserProxyAgent\",\"component_type\":\"agent\",\"version\":1,\"component_version\":1,\"description\":null,\"config\":{\"name\":\"user\",\"description\":\"A human user\"}}\n" + ] + } + ], + "source": [ + "user_proxy_config = user_proxy.dump_component() # dump component\n", + "print(user_proxy_config.model_dump_json())\n", + "up_new = user_proxy.load_component(user_proxy_config) # load component" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\"provider\":\"autogen_agentchat.agents.AssistantAgent\",\"component_type\":\"agent\",\"version\":1,\"component_version\":1,\"description\":null,\"config\":{\"name\":\"assistant\",\"model_client\":{\"provider\":\"autogen_ext.models.openai.OpenAIChatCompletionClient\",\"component_type\":\"model\",\"version\":1,\"component_version\":1,\"config\":{\"model\":\"gpt-4o\"}},\"handoffs\":[{\"target\":\"flights_refunder\",\"description\":\"Handoff to flights_refunder.\",\"name\":\"transfer_to_flights_refunder\",\"message\":\"Transferred to flights_refunder, adopting the role of flights_refunder immediately.\"},{\"target\":\"user\",\"description\":\"Handoff to user.\",\"name\":\"transfer_to_user\",\"message\":\"Transferred to user, adopting the role of user immediately.\"}],\"model_context\":{\"provider\":\"autogen_core.model_context.UnboundedChatCompletionContext\",\"component_type\":\"chat_completion_context\",\"version\":1,\"component_version\":1,\"config\":{}},\"description\":\"An agent that provides assistance with ability to use tools.\",\"system_message\":\"Use tools to solve tasks.\",\"reflect_on_tool_use\":false,\"tool_call_summary_format\":\"{result}\"}}\n" + ] + } + ], + "source": [ + "agent_config = agent.dump_component() # dump component\n", + "print(agent_config.model_dump_json())\n", + "agent_new = agent.load_component(agent_config) # load component" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A similar approach can be used to serialize the `MultiModalWebSurfer` agent.\n", + "\n", + "```python\n", + "from autogen_ext.agents.web_surfer import MultimodalWebSurfer\n", + "\n", + "agent = MultimodalWebSurfer(\n", + " name=\"web_surfer\",\n", + " model_client=model_client,\n", + " headless=False,\n", + ")\n", + "\n", + "web_surfer_config = agent.dump_component() # dump component\n", + "print(web_surfer_config.model_dump_json())\n", + "\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Team Example\n", + "\n", + "In the example below, we will define a team in python, export this to a dictionary/json and also demonstrate how the team object can be loaded from the dictionary/json." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\"provider\":\"autogen_agentchat.teams.RoundRobinGroupChat\",\"component_type\":\"team\",\"version\":1,\"component_version\":1,\"description\":null,\"config\":{\"participants\":[{\"provider\":\"autogen_agentchat.agents.AssistantAgent\",\"component_type\":\"agent\",\"version\":1,\"component_version\":1,\"config\":{\"name\":\"assistant\",\"model_client\":{\"provider\":\"autogen_ext.models.openai.OpenAIChatCompletionClient\",\"component_type\":\"model\",\"version\":1,\"component_version\":1,\"config\":{\"model\":\"gpt-4o\"}},\"handoffs\":[{\"target\":\"flights_refunder\",\"description\":\"Handoff to flights_refunder.\",\"name\":\"transfer_to_flights_refunder\",\"message\":\"Transferred to flights_refunder, adopting the role of flights_refunder immediately.\"},{\"target\":\"user\",\"description\":\"Handoff to user.\",\"name\":\"transfer_to_user\",\"message\":\"Transferred to user, adopting the role of user immediately.\"}],\"model_context\":{\"provider\":\"autogen_core.model_context.UnboundedChatCompletionContext\",\"component_type\":\"chat_completion_context\",\"version\":1,\"component_version\":1,\"config\":{}},\"description\":\"An agent that provides assistance with ability to use tools.\",\"system_message\":\"Use tools to solve tasks.\",\"reflect_on_tool_use\":false,\"tool_call_summary_format\":\"{result}\"}}],\"termination_condition\":{\"provider\":\"autogen_agentchat.conditions.MaxMessageTermination\",\"component_type\":\"termination\",\"version\":1,\"component_version\":1,\"config\":{\"max_messages\":2}}}}\n" + ] + } + ], + "source": [ + "from autogen_agentchat.agents import AssistantAgent, UserProxyAgent\n", + "from autogen_agentchat.conditions import MaxMessageTermination\n", + "from autogen_agentchat.teams import RoundRobinGroupChat\n", + "from autogen_ext.models.openai import OpenAIChatCompletionClient\n", + "\n", + "# Create an agent that uses the OpenAI GPT-4o model.\n", + "model_client = OpenAIChatCompletionClient(\n", + " model=\"gpt-4o\",\n", + " # api_key=\"YOUR_API_KEY\",\n", + ")\n", + "agent = AssistantAgent(\n", + " name=\"assistant\",\n", + " model_client=model_client,\n", + " handoffs=[\"flights_refunder\", \"user\"],\n", + " # tools=[], # serializing tools is not yet supported\n", + " system_message=\"Use tools to solve tasks.\",\n", + ")\n", + "\n", + "team = RoundRobinGroupChat(participants=[agent], termination_condition=MaxMessageTermination(2))\n", + "\n", + "team_config = team.dump_component() # dump component\n", + "print(team_config.model_dump_json())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" } - ], - "source": [ - "agent_config = agent.dump_component() # dump component\n", - "print(agent_config.model_dump_json())\n", - "agent_new = agent.load_component(agent_config) # load component" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "A similar approach can be used to serialize the `MultiModalWebSurfer` agent.\n", - "\n", - "```python\n", - "from autogen_ext.agents.web_surfer import MultimodalWebSurfer\n", - "\n", - "agent = MultimodalWebSurfer(\n", - " name=\"web_surfer\",\n", - " model_client=model_client,\n", - " headless=False,\n", - ")\n", - "\n", - "web_surfer_config = agent.dump_component() # dump component\n", - "print(web_surfer_config.model_dump_json())\n", - "\n", - "```" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 + "nbformat": 4, + "nbformat_minor": 2 } diff --git a/python/packages/autogen-core/src/autogen_core/memory/_base_memory.py b/python/packages/autogen-core/src/autogen_core/memory/_base_memory.py index bfccafdf0a9..2ae79b4106b 100644 --- a/python/packages/autogen-core/src/autogen_core/memory/_base_memory.py +++ b/python/packages/autogen-core/src/autogen_core/memory/_base_memory.py @@ -1,6 +1,6 @@ +from abc import ABC, abstractmethod from enum import Enum from typing import Any, Dict, List, Union -from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict diff --git a/python/packages/autogen-core/tests/test_memory.py b/python/packages/autogen-core/tests/test_memory.py index 7d0f6b6ea5b..04054e1b225 100644 --- a/python/packages/autogen-core/tests/test_memory.py +++ b/python/packages/autogen-core/tests/test_memory.py @@ -1,4 +1,5 @@ from typing import Any + import pytest from autogen_core import CancellationToken from autogen_core.memory import (