diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 74595b02e8ee..3b6c785d1c26 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -33,7 +33,7 @@ jobs:
[
# For main use the workflow target
{ ref: "${{github.ref}}", dest-dir: dev, uv-version: "0.5.13", sphinx-release-override: "dev" },
- { ref: "v0.4.0.post1", dest-dir: stable, uv-version: "0.5.13", sphinx-release-override: "stable" },
+ { ref: "v0.4.1", dest-dir: stable, uv-version: "0.5.13", sphinx-release-override: "stable" },
{ ref: "v0.4.0.dev0", dest-dir: "0.4.0.dev0", uv-version: "0.5.11", sphinx-release-override: "" },
{ ref: "v0.4.0.dev1", dest-dir: "0.4.0.dev1", uv-version: "0.5.11", sphinx-release-override: "" },
{ ref: "v0.4.0.dev2", dest-dir: "0.4.0.dev2", uv-version: "0.5.11", sphinx-release-override: "" },
@@ -49,6 +49,7 @@ jobs:
{ ref: "v0.4.0.dev12", dest-dir: "0.4.0.dev12", uv-version: "0.5.13", sphinx-release-override: "" },
{ ref: "v0.4.0.dev13", dest-dir: "0.4.0.dev13", uv-version: "0.5.13", sphinx-release-override: "" },
{ ref: "v0.4.0.post1", dest-dir: "0.4.0", uv-version: "0.5.13", sphinx-release-override: "" },
+ { ref: "v0.4.1", dest-dir: "0.4.1", uv-version: "0.5.13", sphinx-release-override: "" },
]
steps:
- name: Checkout
diff --git a/README.md b/README.md
index 5e5299469a09..179067fe9106 100644
--- a/README.md
+++ b/README.md
@@ -25,6 +25,13 @@ pip install -U "autogen-agentchat" "autogen-ext[openai]"
The current stable version is v0.4. If you are upgrading from AutoGen v0.2, please refer to the [Migration Guide](https://microsoft.github.io/autogen/dev/user-guide/agentchat-user-guide/migration-guide.html) for detailed instructions on how to update your code and configurations.
+```bash
+# Install AutoGen Studio for no-code GUI
+pip install -U "autogenstudio"
+```
+
+## Quickstart
+
### Hello World
Create an assistant agent using OpenAI's GPT-4o model.
@@ -69,6 +76,15 @@ async def main() -> None:
asyncio.run(main())
```
+### AutoGen Studio
+
+Use AutoGen Studio to prototype and run multi-agent workflows without writing code.
+
+```bash
+# Run AutoGen Studio on http://localhost:8080
+autogenstudio ui --port 8080 --appdir ./my-app
+```
+
## Why Use AutoGen?
diff --git a/docs/switcher.json b/docs/switcher.json
index f90f52ef0847..ab4e1d2e5c75 100644
--- a/docs/switcher.json
+++ b/docs/switcher.json
@@ -1,6 +1,6 @@
[
{
- "name": "0.4.0 (stable)",
+ "name": "0.4.1 (stable)",
"version": "stable",
"url": "/autogen/stable/",
"preferred": true
@@ -15,6 +15,11 @@
"version": "0.2",
"url": "/autogen/0.2/"
},
+ {
+ "name": "0.4.0",
+ "version": "0.4.0",
+ "url": "/autogen/0.4.0/"
+ },
{
"name": "0.4.0.dev0",
"version": "0.4.0.dev0",
diff --git a/python/packages/autogen-agentchat/pyproject.toml b/python/packages/autogen-agentchat/pyproject.toml
index 2aad2d35b1b7..8e9329f0d49a 100644
--- a/python/packages/autogen-agentchat/pyproject.toml
+++ b/python/packages/autogen-agentchat/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "autogen-agentchat"
-version = "0.4.0"
+version = "0.4.1"
license = {file = "LICENSE-CODE"}
description = "AutoGen agents and teams library"
readme = "README.md"
@@ -15,7 +15,7 @@ classifiers = [
"Operating System :: OS Independent",
]
dependencies = [
- "autogen-core==0.4.0",
+ "autogen-core==0.4.1",
"aioconsole>=0.8.1"
]
diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_user_proxy_agent.py b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_user_proxy_agent.py
index 2ad9a24682f0..89e0b61a50ee 100644
--- a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_user_proxy_agent.py
+++ b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_user_proxy_agent.py
@@ -1,15 +1,17 @@
import asyncio
+import uuid
+from contextlib import contextmanager
+from contextvars import ContextVar
from inspect import iscoroutinefunction
-from typing import Awaitable, Callable, Optional, Sequence, Union, cast
+from typing import Any, AsyncGenerator, Awaitable, Callable, ClassVar, Generator, Optional, Sequence, Union, cast
from aioconsole import ainput # type: ignore
from autogen_core import CancellationToken
from ..base import Response
-from ..messages import ChatMessage, HandoffMessage, TextMessage
+from ..messages import AgentEvent, ChatMessage, HandoffMessage, TextMessage, UserInputRequestedEvent
from ._base_chat_agent import BaseChatAgent
-# Define input function types more precisely
SyncInputFunc = Callable[[str], str]
AsyncInputFunc = Callable[[str, Optional[CancellationToken]], Awaitable[str]]
InputFuncType = Union[SyncInputFunc, AsyncInputFunc]
@@ -109,6 +111,33 @@ async def cancellable_user_agent():
print(f"BaseException: {e}")
"""
+ class InputRequestContext:
+ def __init__(self) -> None:
+ raise RuntimeError(
+ "InputRequestContext cannot be instantiated. It is a static class that provides context management for user input requests."
+ )
+
+ _INPUT_REQUEST_CONTEXT_VAR: ClassVar[ContextVar[str]] = ContextVar("_INPUT_REQUEST_CONTEXT_VAR")
+
+ @classmethod
+ @contextmanager
+ def populate_context(cls, ctx: str) -> Generator[None, Any, None]:
+ """:meta private:"""
+ token = UserProxyAgent.InputRequestContext._INPUT_REQUEST_CONTEXT_VAR.set(ctx)
+ try:
+ yield
+ finally:
+ UserProxyAgent.InputRequestContext._INPUT_REQUEST_CONTEXT_VAR.reset(token)
+
+ @classmethod
+ def request_id(cls) -> str:
+ try:
+ return cls._INPUT_REQUEST_CONTEXT_VAR.get()
+ except LookupError as e:
+ raise RuntimeError(
+ "InputRequestContext.runtime() must be called within the input callback of a UserProxyAgent."
+ ) from e
+
def __init__(
self,
name: str,
@@ -153,9 +182,15 @@ async def _get_input(self, prompt: str, cancellation_token: Optional[Cancellatio
except Exception as e:
raise RuntimeError(f"Failed to get user input: {str(e)}") from e
- async def on_messages(
- self, messages: Sequence[ChatMessage], cancellation_token: Optional[CancellationToken] = None
- ) -> Response:
+ async def on_messages(self, messages: Sequence[ChatMessage], cancellation_token: CancellationToken) -> Response:
+ async for message in self.on_messages_stream(messages, cancellation_token):
+ if isinstance(message, Response):
+ return message
+ raise AssertionError("The stream should have returned the final result.")
+
+ async def on_messages_stream(
+ self, messages: Sequence[ChatMessage], cancellation_token: CancellationToken
+ ) -> AsyncGenerator[AgentEvent | ChatMessage | Response, None]:
"""Handle incoming messages by requesting user input."""
try:
# Check for handoff first
@@ -164,15 +199,18 @@ async def on_messages(
f"Handoff received from {handoff.source}. Enter your response: " if handoff else "Enter your response: "
)
- user_input = await self._get_input(prompt, cancellation_token)
+ request_id = str(uuid.uuid4())
+
+ input_requested_event = UserInputRequestedEvent(request_id=request_id, source=self.name)
+ yield input_requested_event
+ with UserProxyAgent.InputRequestContext.populate_context(request_id):
+ user_input = await self._get_input(prompt, cancellation_token)
# Return appropriate message type based on handoff presence
if handoff:
- return Response(
- chat_message=HandoffMessage(content=user_input, target=handoff.source, source=self.name)
- )
+ yield Response(chat_message=HandoffMessage(content=user_input, target=handoff.source, source=self.name))
else:
- return Response(chat_message=TextMessage(content=user_input, source=self.name))
+ yield Response(chat_message=TextMessage(content=user_input, source=self.name))
except asyncio.CancelledError:
raise
diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/messages.py b/python/packages/autogen-agentchat/src/autogen_agentchat/messages.py
index 07fc3123eb4c..21fb32d9d584 100644
--- a/python/packages/autogen-agentchat/src/autogen_agentchat/messages.py
+++ b/python/packages/autogen-agentchat/src/autogen_agentchat/messages.py
@@ -103,25 +103,40 @@ class ToolCallSummaryMessage(BaseChatMessage):
type: Literal["ToolCallSummaryMessage"] = "ToolCallSummaryMessage"
+class UserInputRequestedEvent(BaseAgentEvent):
+ """An event signaling a that the user proxy has requested user input. Published prior to invoking the input callback."""
+
+ request_id: str
+ """Identifier for the user input request."""
+
+ content: Literal[""] = ""
+ """Empty content for compat with consumers expecting a content field."""
+
+ type: Literal["UserInputRequestedEvent"] = "UserInputRequestedEvent"
+
+
ChatMessage = Annotated[
TextMessage | MultiModalMessage | StopMessage | ToolCallSummaryMessage | HandoffMessage, Field(discriminator="type")
]
"""Messages for agent-to-agent communication only."""
-AgentEvent = Annotated[ToolCallRequestEvent | ToolCallExecutionEvent, Field(discriminator="type")]
+AgentEvent = Annotated[
+ ToolCallRequestEvent | ToolCallExecutionEvent | UserInputRequestedEvent, Field(discriminator="type")
+]
"""Events emitted by agents and teams when they work, not used for agent-to-agent communication."""
__all__ = [
+ "AgentEvent",
"BaseMessage",
- "TextMessage",
+ "ChatMessage",
+ "HandoffMessage",
"MultiModalMessage",
"StopMessage",
- "HandoffMessage",
- "ToolCallRequestEvent",
+ "TextMessage",
"ToolCallExecutionEvent",
+ "ToolCallRequestEvent",
"ToolCallSummaryMessage",
- "ChatMessage",
- "AgentEvent",
+ "UserInputRequestedEvent",
]
diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/ui/__init__.py b/python/packages/autogen-agentchat/src/autogen_agentchat/ui/__init__.py
index 65c4f1e07ad9..9cc0837c58c2 100644
--- a/python/packages/autogen-agentchat/src/autogen_agentchat/ui/__init__.py
+++ b/python/packages/autogen-agentchat/src/autogen_agentchat/ui/__init__.py
@@ -2,6 +2,6 @@
This module implements utility classes for formatting/printing agent messages.
"""
-from ._console import Console
+from ._console import Console, UserInputManager
-__all__ = ["Console"]
+__all__ = ["Console", "UserInputManager"]
diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/ui/_console.py b/python/packages/autogen-agentchat/src/autogen_agentchat/ui/_console.py
index 79d39d6add7f..767dc68d8b4e 100644
--- a/python/packages/autogen-agentchat/src/autogen_agentchat/ui/_console.py
+++ b/python/packages/autogen-agentchat/src/autogen_agentchat/ui/_console.py
@@ -1,14 +1,17 @@
+import asyncio
import os
import sys
import time
-from typing import AsyncGenerator, List, Optional, TypeVar, cast
+from inspect import iscoroutinefunction
+from typing import AsyncGenerator, Awaitable, Callable, Dict, List, Optional, TypeVar, Union, cast
from aioconsole import aprint # type: ignore
-from autogen_core import Image
+from autogen_core import CancellationToken, Image
from autogen_core.models import RequestUsage
+from autogen_agentchat.agents import UserProxyAgent
from autogen_agentchat.base import Response, TaskResult
-from autogen_agentchat.messages import AgentEvent, ChatMessage, MultiModalMessage
+from autogen_agentchat.messages import AgentEvent, ChatMessage, MultiModalMessage, UserInputRequestedEvent
def _is_running_in_iterm() -> bool:
@@ -19,25 +22,76 @@ def _is_output_a_tty() -> bool:
return sys.stdout.isatty()
+SyncInputFunc = Callable[[str], str]
+AsyncInputFunc = Callable[[str, Optional[CancellationToken]], Awaitable[str]]
+InputFuncType = Union[SyncInputFunc, AsyncInputFunc]
+
T = TypeVar("T", bound=TaskResult | Response)
+class UserInputManager:
+ def __init__(self, callback: InputFuncType):
+ self.input_events: Dict[str, asyncio.Event] = {}
+ self.callback = callback
+
+ def get_wrapped_callback(self) -> AsyncInputFunc:
+ async def user_input_func_wrapper(prompt: str, cancellation_token: Optional[CancellationToken]) -> str:
+ # Lookup the event for the prompt, if it exists wait for it.
+ # If it doesn't exist, create it and store it.
+ # Get request ID:
+ request_id = UserProxyAgent.InputRequestContext.request_id()
+ if request_id in self.input_events:
+ event = self.input_events[request_id]
+ else:
+ event = asyncio.Event()
+ self.input_events[request_id] = event
+
+ await event.wait()
+
+ del self.input_events[request_id]
+
+ if iscoroutinefunction(self.callback):
+ # Cast to AsyncInputFunc for proper typing
+ async_func = cast(AsyncInputFunc, self.callback)
+ return await async_func(prompt, cancellation_token)
+ else:
+ # Cast to SyncInputFunc for proper typing
+ sync_func = cast(SyncInputFunc, self.callback)
+ loop = asyncio.get_event_loop()
+ return await loop.run_in_executor(None, sync_func, prompt)
+
+ return user_input_func_wrapper
+
+ def notify_event_received(self, request_id: str) -> None:
+ if request_id in self.input_events:
+ self.input_events[request_id].set()
+ else:
+ event = asyncio.Event()
+ self.input_events[request_id] = event
+
+
async def Console(
stream: AsyncGenerator[AgentEvent | ChatMessage | T, None],
*,
no_inline_images: bool = False,
- output_stats: bool = True,
+ output_stats: bool = False,
+ user_input_manager: UserInputManager | None = None,
) -> T:
"""
Consumes the message stream from :meth:`~autogen_agentchat.base.TaskRunner.run_stream`
or :meth:`~autogen_agentchat.base.ChatAgent.on_messages_stream` and renders the messages to the console.
Returns the last processed TaskResult or Response.
+ .. note::
+
+ `output_stats` is experimental and the stats may not be accurate.
+ It will be improved in future releases.
+
Args:
stream (AsyncGenerator[AgentEvent | ChatMessage | TaskResult, None] | AsyncGenerator[AgentEvent | ChatMessage | Response, None]): Message stream to render.
This can be from :meth:`~autogen_agentchat.base.TaskRunner.run_stream` or :meth:`~autogen_agentchat.base.ChatAgent.on_messages_stream`.
no_inline_images (bool, optional): If terminal is iTerm2 will render images inline. Use this to disable this behavior. Defaults to False.
- output_stats (bool, optional): If True, will output a summary of the messages and inline token usage info. Defaults to True.
+ output_stats (bool, optional): (Experimental) If True, will output a summary of the messages and inline token usage info. Defaults to False.
Returns:
last_processed: A :class:`~autogen_agentchat.base.TaskResult` if the stream is from :meth:`~autogen_agentchat.base.TaskRunner.run_stream`
@@ -62,6 +116,7 @@ async def Console(
f"Duration: {duration:.2f} seconds\n"
)
await aprint(output, end="")
+
# mypy ignore
last_processed = message # type: ignore
@@ -91,9 +146,13 @@ async def Console(
f"Duration: {duration:.2f} seconds\n"
)
await aprint(output, end="")
+
# mypy ignore
last_processed = message # type: ignore
-
+ # We don't want to print UserInputRequestedEvent messages, we just use them to signal the user input event.
+ elif isinstance(message, UserInputRequestedEvent):
+ if user_input_manager is not None:
+ user_input_manager.notify_event_received(message.request_id)
else:
# Cast required for mypy to be happy
message = cast(AgentEvent | ChatMessage, message) # type: ignore
diff --git a/python/packages/autogen-core/docs/src/index.md b/python/packages/autogen-core/docs/src/index.md
index 8b2b651ac5e8..1f13cc8dc237 100644
--- a/python/packages/autogen-core/docs/src/index.md
+++ b/python/packages/autogen-core/docs/src/index.md
@@ -60,7 +60,7 @@ A console-based multi-agent assistant for web and file-based tasks.
Built on AgentChat.
```bash
-pip install magentic-one-cli
+pip install -U magentic-one-cli
m1 "Find flights from Seattle to Paris and format the result in a table"
```
@@ -83,8 +83,8 @@ An app for prototyping and managing agents without writing code.
Built on AgentChat.
```bash
-pip install autogenstudio
-autogenstudio ui --port 8080
+pip install -U autogenstudio
+autogenstudio ui --port 8080 --appdir ./myapp
```
+++
diff --git a/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/installation.md b/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/installation.md
index e4e49591df95..ed47a8114037 100644
--- a/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/installation.md
+++ b/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/installation.md
@@ -80,5 +80,5 @@ pip install "autogen-ext[openai]"
If you are using Azure OpenAI with AAD authentication, you need to install the following:
```bash
-pip install "autogen-ext[azure]==0.4.0.dev13"
+pip install "autogen-ext[azure]"
```
diff --git a/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/teams.ipynb b/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/teams.ipynb
index ce0f39664158..d12a273edbda 100644
--- a/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/teams.ipynb
+++ b/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/teams.ipynb
@@ -83,7 +83,7 @@
"source": [
"## Running a Team\n",
"\n",
- "Let's calls the {py:meth}`~autogen_agentchat.teams.BaseGroupChat.run` method\n",
+ "Let's call the {py:meth}`~autogen_agentchat.teams.BaseGroupChat.run` method\n",
"to start the team with a task."
]
},
diff --git a/python/packages/autogen-core/docs/src/user-guide/core-user-guide/framework/component-config.ipynb b/python/packages/autogen-core/docs/src/user-guide/core-user-guide/framework/component-config.ipynb
index 6d21c33d2a87..1f335d0b59e8 100644
--- a/python/packages/autogen-core/docs/src/user-guide/core-user-guide/framework/component-config.ipynb
+++ b/python/packages/autogen-core/docs/src/user-guide/core-user-guide/framework/component-config.ipynb
@@ -20,7 +20,7 @@
"\n",
"## Usage\n",
"\n",
- "If you have a component in Python and want to get the config for it, simply call {py:meth}`~autogen_core.ComponentConfig.dump_component` on it. The resulting object can be passed back into {py:meth}`~autogen_core.ComponentLoader.load_component` to get the component back.\n",
+ "If you have a component in Python and want to get the config for it, simply call {py:meth}`~autogen_core.ComponentToConfig.dump_component` on it. The resulting object can be passed back into {py:meth}`~autogen_core.ComponentLoader.load_component` to get the component back.\n",
"\n",
"### Loading a component from a config\n",
"\n",
@@ -52,7 +52,7 @@
"To add component functionality to a given class:\n",
"\n",
"1. Add a call to {py:meth}`~autogen_core.Component` in the class inheritance list.\n",
- "2. Implment the {py:meth}`~autogen_core.ComponentConfigImpl._to_config` and {py:meth}`~autogen_core.ComponentConfigImpl._from_config` methods\n",
+ "2. Implment the {py:meth}`~autogen_core.ComponentToConfig._to_config` and {py:meth}`~autogen_core.ComponentFromConfig._from_config` methods\n",
"\n",
"For example:"
]
@@ -63,7 +63,7 @@
"metadata": {},
"outputs": [],
"source": [
- "from autogen_core import Component\n",
+ "from autogen_core import Component, ComponentBase\n",
"from pydantic import BaseModel\n",
"\n",
"\n",
@@ -71,7 +71,7 @@
" value: str\n",
"\n",
"\n",
- "class MyComponent(Component[Config]):\n",
+ "class MyComponent(ComponentBase[Config], Component[Config]):\n",
" component_type = \"custom\"\n",
" component_config_schema = Config\n",
"\n",
@@ -129,7 +129,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.12.7"
+ "version": "3.12.5"
}
},
"nbformat": 4,
diff --git a/python/packages/autogen-core/docs/src/user-guide/core-user-guide/installation.md b/python/packages/autogen-core/docs/src/user-guide/core-user-guide/installation.md
index 3fd181c7feeb..1b53cfb2f466 100644
--- a/python/packages/autogen-core/docs/src/user-guide/core-user-guide/installation.md
+++ b/python/packages/autogen-core/docs/src/user-guide/core-user-guide/installation.md
@@ -67,13 +67,13 @@ To use the OpenAI and Azure OpenAI models, you need to install the following
extensions:
```bash
-pip install "autogen-ext[openai]==0.4.0.dev13"
+pip install "autogen-ext[openai]"
```
If you are using Azure OpenAI with AAD authentication, you need to install the following:
```bash
-pip install "autogen-ext[azure]==0.4.0.dev13"
+pip install "autogen-ext[azure]"
```
## Install Docker for Code Execution (Optional)
diff --git a/python/packages/autogen-core/pyproject.toml b/python/packages/autogen-core/pyproject.toml
index 4d6aa4ba6410..27269f2e1c3b 100644
--- a/python/packages/autogen-core/pyproject.toml
+++ b/python/packages/autogen-core/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "autogen-core"
-version = "0.4.0"
+version = "0.4.1"
license = {file = "LICENSE-CODE"}
description = "Foundational interfaces and agent runtime implementation for AutoGen"
readme = "README.md"
@@ -69,7 +69,7 @@ dev = [
"pygments",
"sphinxext-rediraffe",
- "autogen_ext==0.4.0",
+ "autogen_ext==0.4.1",
# Documentation tooling
"sphinx-autobuild",
diff --git a/python/packages/autogen-core/src/autogen_core/__init__.py b/python/packages/autogen-core/src/autogen_core/__init__.py
index c9d12872dd9d..478ecc422e03 100644
--- a/python/packages/autogen-core/src/autogen_core/__init__.py
+++ b/python/packages/autogen-core/src/autogen_core/__init__.py
@@ -14,10 +14,15 @@
from ._closure_agent import ClosureAgent, ClosureContext
from ._component_config import (
Component,
- ComponentConfigImpl,
+ ComponentBase,
+ ComponentFromConfig,
ComponentLoader,
ComponentModel,
+ ComponentSchemaType,
+ ComponentToConfig,
ComponentType,
+ is_component_class,
+ is_component_instance,
)
from ._constants import (
EVENT_LOGGER_NAME as EVENT_LOGGER_NAME_ALIAS,
@@ -112,10 +117,15 @@
"EVENT_LOGGER_NAME",
"TRACE_LOGGER_NAME",
"Component",
+ "ComponentBase",
+ "ComponentFromConfig",
"ComponentLoader",
- "ComponentConfigImpl",
"ComponentModel",
+ "ComponentSchemaType",
+ "ComponentToConfig",
"ComponentType",
+ "is_component_class",
+ "is_component_instance",
"DropMessage",
"InterventionHandler",
"DefaultInterventionHandler",
diff --git a/python/packages/autogen-core/src/autogen_core/_component_config.py b/python/packages/autogen-core/src/autogen_core/_component_config.py
index 1045282921f2..f64165703832 100644
--- a/python/packages/autogen-core/src/autogen_core/_component_config.py
+++ b/python/packages/autogen-core/src/autogen_core/_component_config.py
@@ -2,13 +2,15 @@
import importlib
import warnings
-from typing import Any, ClassVar, Dict, Generic, List, Literal, Protocol, Type, cast, overload, runtime_checkable
+from typing import Any, ClassVar, Dict, Generic, Literal, Type, TypeGuard, cast, overload
from pydantic import BaseModel
from typing_extensions import Self, TypeVar
ComponentType = Literal["model", "agent", "tool", "termination", "token_provider"] | str
ConfigT = TypeVar("ConfigT", bound=BaseModel)
+FromConfigT = TypeVar("FromConfigT", bound=BaseModel, contravariant=True)
+ToConfigT = TypeVar("ToConfigT", bound=BaseModel, covariant=True)
T = TypeVar("T", bound=BaseModel, covariant=True)
@@ -47,36 +49,9 @@ def _type_to_provider_str(t: type) -> str:
}
-@runtime_checkable
-class ComponentConfigImpl(Protocol[ConfigT]):
- # Ideally would be ClassVar[Type[ConfigT]], but this is disallowed https://github.com/python/typing/discussions/1424 (despite being valid in this context)
- component_config_schema: Type[ConfigT]
- """The Pydantic model class which represents the configuration of the component."""
- component_type: ClassVar[ComponentType]
- """The logical type of the component."""
- component_version: ClassVar[int] = 1
- """The version of the component, if schema incompatibilities are introduced this should be updated."""
- component_provider_override: ClassVar[str | None] = None
- """Override the provider string for the component. This should be used to prevent internal module names being a part of the module name."""
-
- """The two methods a class must implement to be a component.
-
- Args:
- Protocol (ConfigT): Type which derives from :py:class:`pydantic.BaseModel`.
- """
-
- def _to_config(self) -> ConfigT:
- """Dump the configuration that would be requite to create a new instance of a component matching the configuration of this instance.
-
- Returns:
- T: The configuration of the component.
-
- :meta public:
- """
- ...
-
+class ComponentFromConfig(Generic[FromConfigT]):
@classmethod
- def _from_config(cls, config: ConfigT) -> Self:
+ def _from_config(cls, config: FromConfigT) -> Self:
"""Create a new instance of the component from a configuration object.
Args:
@@ -87,7 +62,7 @@ def _from_config(cls, config: ConfigT) -> Self:
:meta public:
"""
- ...
+ raise NotImplementedError("This component does not support dumping to config")
@classmethod
def _from_config_past_version(cls, config: Dict[str, Any], version: int) -> Self:
@@ -104,7 +79,69 @@ def _from_config_past_version(cls, config: Dict[str, Any], version: int) -> Self
:meta public:
"""
- raise NotImplementedError()
+ raise NotImplementedError("This component does not support loading from past versions")
+
+
+class ComponentToConfig(Generic[ToConfigT]):
+ """The two methods a class must implement to be a component.
+
+ Args:
+ Protocol (ConfigT): Type which derives from :py:class:`pydantic.BaseModel`.
+ """
+
+ component_type: ClassVar[ComponentType]
+ """The logical type of the component."""
+ component_version: ClassVar[int] = 1
+ """The version of the component, if schema incompatibilities are introduced this should be updated."""
+ component_provider_override: ClassVar[str | None] = None
+ """Override the provider string for the component. This should be used to prevent internal module names being a part of the module name."""
+
+ def _to_config(self) -> ToConfigT:
+ """Dump the configuration that would be requite to create a new instance of a component matching the configuration of this instance.
+
+ Returns:
+ T: The configuration of the component.
+
+ :meta public:
+ """
+ raise NotImplementedError("This component does not support dumping to config")
+
+ def dump_component(self) -> ComponentModel:
+ """Dump the component to a model that can be loaded back in.
+
+ Raises:
+ TypeError: If the component is a local class.
+
+ Returns:
+ ComponentModel: The model representing the component.
+ """
+ if self.component_provider_override is not None:
+ provider = self.component_provider_override
+ else:
+ provider = _type_to_provider_str(self.__class__)
+ # Warn if internal module name is used,
+ if "._" in provider:
+ warnings.warn(
+ "Internal module name used in provider string. This is not recommended and may cause issues in the future. Silence this warning by setting component_provider_override to this value.",
+ stacklevel=2,
+ )
+
+ if "" in provider:
+ raise TypeError("Cannot dump component with local class")
+
+ if not hasattr(self, "component_type"):
+ raise AttributeError("component_type not defined")
+
+ obj_config = self._to_config().model_dump(exclude_none=True)
+ model = ComponentModel(
+ provider=provider,
+ component_type=self.component_type,
+ version=self.component_version,
+ component_version=self.component_version,
+ description=None,
+ config=obj_config,
+ )
+ return model
ExpectedType = TypeVar("ExpectedType")
@@ -171,9 +208,9 @@ def load_component(
module_path, class_name = output
module = importlib.import_module(module_path)
- component_class = cast(ComponentConfigImpl[BaseModel], module.__getattribute__(class_name))
+ component_class = module.__getattribute__(class_name)
- if not isinstance(component_class, ComponentConfigImpl):
+ if not is_component_class(component_class):
raise TypeError("Invalid component class")
# We need to check the schema is valid
@@ -192,7 +229,7 @@ def load_component(
f"Tried to load component {component_class} which is on version {component_class.component_version} with a config on version {loaded_config_version} but _from_config_past_version is not implemented"
) from e
else:
- schema = component_class.component_config_schema
+ schema = component_class.component_config_schema # type: ignore
validated_config = schema.model_validate(loaded_model.config)
# We're allowed to use the private method here
@@ -208,8 +245,35 @@ def load_component(
return cast(ExpectedType, instance)
-class Component(ComponentConfigImpl[ConfigT], ComponentLoader, Generic[ConfigT]):
- """To create a component class, inherit from this class. Then implement two class variables:
+class ComponentSchemaType(Generic[ConfigT]):
+ # Ideally would be ClassVar[Type[ConfigT]], but this is disallowed https://github.com/python/typing/discussions/1424 (despite being valid in this context)
+ component_config_schema: Type[ConfigT]
+ """The Pydantic model class which represents the configuration of the component."""
+
+ required_class_vars = ["component_config_schema", "component_type"]
+
+ def __init_subclass__(cls, **kwargs: Any):
+ super().__init_subclass__(**kwargs)
+
+ if cls.__name__ != "Component" and not cls.__name__ == "_ConcreteComponent":
+ # TODO: validate provider is loadable
+ for var in cls.required_class_vars:
+ if not hasattr(cls, var):
+ warnings.warn(
+ f"Class variable '{var}' must be defined in {cls.__name__} to be a valid component",
+ stacklevel=2,
+ )
+
+
+class ComponentBase(ComponentToConfig[ConfigT], ComponentLoader, Generic[ConfigT]): ...
+
+
+class Component(
+ ComponentFromConfig[ConfigT],
+ ComponentSchemaType[ConfigT],
+ Generic[ConfigT],
+):
+ """To create a component class, inherit from this class for the concrete class and ComponentBase on the interface. Then implement two class variables:
- :py:attr:`component_config_schema` - A Pydantic model class which represents the configuration of the component. This is also the type parameter of Component.
- :py:attr:`component_type` - What is the logical type of the component.
@@ -243,55 +307,39 @@ def _from_config(cls, config: Config) -> MyComponent:
return cls(value=config.value)
"""
- required_class_vars: ClassVar[List[str]] = ["component_config_schema", "component_type"]
-
- def __init_subclass__(cls, **kwargs: Any) -> None:
+ def __init_subclass__(cls, **kwargs: Any):
super().__init_subclass__(**kwargs)
- # TODO: validate provider is loadable
- for var in cls.required_class_vars:
- if not hasattr(cls, var):
- warnings.warn(
- f"Class variable '{var}' must be defined in {cls.__name__} to be a valid component", stacklevel=2
- )
-
- def dump_component(self) -> ComponentModel:
- """Dump the component to a model that can be loaded back in.
-
- Raises:
- TypeError: If the component is a local class.
-
- Returns:
- ComponentModel: The model representing the component.
- """
- if self.component_provider_override is not None:
- provider = self.component_provider_override
- else:
- provider = _type_to_provider_str(self.__class__)
- # Warn if internal module name is used,
- if "._" in provider:
- warnings.warn(
- "Internal module name used in provider string. This is not recommended and may cause issues in the future. Silence this warning by setting component_provider_override to this value.",
- stacklevel=2,
- )
-
- if "" in provider:
- raise TypeError("Cannot dump component with local class")
-
- if not hasattr(self, "component_type"):
- raise AttributeError("component_type not defined")
-
- obj_config = self._to_config().model_dump(exclude_none=True)
- model = ComponentModel(
- provider=provider,
- component_type=self.component_type,
- version=self.component_version,
- component_version=self.component_version,
- description=None,
- config=obj_config,
- )
- return model
-
- @classmethod
- def _from_config_past_version(cls, config: Dict[str, Any], version: int) -> Self:
- raise NotImplementedError()
+ if not is_component_class(cls):
+ warnings.warn(
+ f"Component class '{cls.__name__}' must subclass the following: ComponentFromConfig, ComponentToConfig, ComponentSchemaType, ComponentLoader, individually or with ComponentBase and Component. Look at the component config documentation or how OpenAIChatCompletionClient does it.",
+ stacklevel=2,
+ )
+
+
+# Should never be used directly, only for type checking
+class _ConcreteComponent(
+ ComponentFromConfig[ConfigT],
+ ComponentSchemaType[ConfigT],
+ ComponentToConfig[ConfigT],
+ ComponentLoader,
+ Generic[ConfigT],
+): ...
+
+
+def is_component_instance(cls: Any) -> TypeGuard[_ConcreteComponent[BaseModel]]:
+ return (
+ isinstance(cls, ComponentFromConfig)
+ and isinstance(cls, ComponentToConfig)
+ and isinstance(cls, ComponentSchemaType)
+ and isinstance(cls, ComponentLoader)
+ )
+
+
+def is_component_class(cls: type) -> TypeGuard[Type[_ConcreteComponent[BaseModel]]]:
+ return (
+ issubclass(cls, ComponentFromConfig)
+ and issubclass(cls, ComponentToConfig)
+ and issubclass(cls, ComponentSchemaType)
+ and issubclass(cls, ComponentLoader)
+ )
diff --git a/python/packages/autogen-core/src/autogen_core/models/_model_client.py b/python/packages/autogen-core/src/autogen_core/models/_model_client.py
index a952ad43458c..356fad5487ca 100644
--- a/python/packages/autogen-core/src/autogen_core/models/_model_client.py
+++ b/python/packages/autogen-core/src/autogen_core/models/_model_client.py
@@ -4,10 +4,11 @@
from abc import ABC, abstractmethod
from typing import Literal, Mapping, Optional, Sequence, TypeAlias
+from pydantic import BaseModel
from typing_extensions import Any, AsyncGenerator, Required, TypedDict, Union, deprecated
from .. import CancellationToken
-from .._component_config import ComponentLoader
+from .._component_config import ComponentBase
from ..tools import Tool, ToolSchema
from ._types import CreateResult, LLMMessage, RequestUsage
@@ -47,7 +48,7 @@ class ModelInfo(TypedDict, total=False):
"""Model family should be one of the constants from :py:class:`ModelFamily` or a string representing an unknown model family."""
-class ChatCompletionClient(ABC, ComponentLoader):
+class ChatCompletionClient(ComponentBase[BaseModel], ABC):
# Caching has to be handled internally as they can depend on the create args that were stored in the constructor
@abstractmethod
async def create(
diff --git a/python/packages/autogen-core/src/autogen_core/models/_types.py b/python/packages/autogen-core/src/autogen_core/models/_types.py
index fb118562e4d3..a3d6af1edde4 100644
--- a/python/packages/autogen-core/src/autogen_core/models/_types.py
+++ b/python/packages/autogen-core/src/autogen_core/models/_types.py
@@ -52,7 +52,7 @@ class RequestUsage:
completion_tokens: int
-FinishReasons = Literal["stop", "length", "function_calls", "content_filter"]
+FinishReasons = Literal["stop", "length", "function_calls", "content_filter", "unknown"]
@dataclass
diff --git a/python/packages/autogen-core/tests/test_component_config.py b/python/packages/autogen-core/tests/test_component_config.py
index fe726227acc1..d59fde59c1b6 100644
--- a/python/packages/autogen-core/tests/test_component_config.py
+++ b/python/packages/autogen-core/tests/test_component_config.py
@@ -4,7 +4,7 @@
from typing import Any, Dict
import pytest
-from autogen_core import Component, ComponentLoader, ComponentModel
+from autogen_core import Component, ComponentBase, ComponentLoader, ComponentModel
from autogen_core._component_config import _type_to_provider_str # type: ignore
from autogen_core.models import ChatCompletionClient
from autogen_test_utils import MyInnerComponent, MyOuterComponent
@@ -16,7 +16,7 @@ class MyConfig(BaseModel):
info: str
-class MyComponent(Component[MyConfig]):
+class MyComponent(ComponentBase[MyConfig], Component[MyConfig]):
component_config_schema = MyConfig
component_type = "custom"
@@ -95,7 +95,7 @@ def test_cannot_import_locals() -> None:
class InvalidModelClientConfig(BaseModel):
info: str
- class MyInvalidModelClient(Component[InvalidModelClientConfig]):
+ class MyInvalidModelClient(ComponentBase[InvalidModelClientConfig], Component[InvalidModelClientConfig]):
component_config_schema = InvalidModelClientConfig
component_type = "model"
@@ -119,7 +119,7 @@ class InvalidModelClientConfig(BaseModel):
info: str
-class MyInvalidModelClient(Component[InvalidModelClientConfig]):
+class MyInvalidModelClient(ComponentBase[InvalidModelClientConfig], Component[InvalidModelClientConfig]):
component_config_schema = InvalidModelClientConfig
component_type = "model"
@@ -143,7 +143,7 @@ def test_type_error_on_creation() -> None:
with pytest.warns(UserWarning):
- class MyInvalidMissingAttrs(Component[InvalidModelClientConfig]):
+ class MyInvalidMissingAttrs(ComponentBase[InvalidModelClientConfig], Component[InvalidModelClientConfig]):
def __init__(self, info: str):
self.info = info
@@ -189,7 +189,7 @@ def test_config_optional_values() -> None:
assert component.__class__ == MyComponent
-class ConfigProviderOverrided(Component[MyConfig]):
+class ConfigProviderOverrided(ComponentBase[MyConfig], Component[MyConfig]):
component_provider_override = "InvalidButStillOverridden"
component_config_schema = MyConfig
component_type = "custom"
@@ -215,7 +215,7 @@ class MyConfig2(BaseModel):
info2: str
-class ComponentNonOneVersion(Component[MyConfig2]):
+class ComponentNonOneVersion(ComponentBase[MyConfig2], Component[MyConfig2]):
component_config_schema = MyConfig2
component_version = 2
component_type = "custom"
@@ -231,7 +231,7 @@ def _from_config(cls, config: MyConfig2) -> Self:
return cls(info=config.info2)
-class ComponentNonOneVersionWithUpgrade(Component[MyConfig2]):
+class ComponentNonOneVersionWithUpgrade(ComponentBase[MyConfig2], Component[MyConfig2]):
component_config_schema = MyConfig2
component_version = 2
component_type = "custom"
diff --git a/python/packages/autogen-ext/pyproject.toml b/python/packages/autogen-ext/pyproject.toml
index f4a3fa85b299..a5a2bce803a6 100644
--- a/python/packages/autogen-ext/pyproject.toml
+++ b/python/packages/autogen-ext/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "autogen-ext"
-version = "0.4.0"
+version = "0.4.1"
license = {file = "LICENSE-CODE"}
description = "AutoGen extensions library"
readme = "README.md"
@@ -15,7 +15,7 @@ classifiers = [
"Operating System :: OS Independent",
]
dependencies = [
- "autogen-core==0.4.0",
+ "autogen-core==0.4.1",
]
[project.optional-dependencies]
@@ -24,23 +24,23 @@ azure = ["azure-core", "azure-identity"]
docker = ["docker~=7.0"]
openai = ["openai>=1.52.2", "tiktoken>=0.8.0", "aiofiles"]
file-surfer = [
- "autogen-agentchat==0.4.0",
+ "autogen-agentchat==0.4.1",
"markitdown>=0.0.1a2",
]
web-surfer = [
- "autogen-agentchat==0.4.0",
+ "autogen-agentchat==0.4.1",
"playwright>=1.48.0",
"pillow>=11.0.0",
"markitdown>=0.0.1a2",
]
magentic-one = [
- "autogen-agentchat==0.4.0",
+ "autogen-agentchat==0.4.1",
"markitdown>=0.0.1a2",
"playwright>=1.48.0",
"pillow>=11.0.0",
]
video-surfer = [
- "autogen-agentchat==0.4.0",
+ "autogen-agentchat==0.4.1",
"opencv-python>=4.5",
"ffmpeg-python",
"openai-whisper",
diff --git a/python/packages/autogen-ext/src/autogen_ext/auth/azure/__init__.py b/python/packages/autogen-ext/src/autogen_ext/auth/azure/__init__.py
index 607a3e01c3a4..08de1e723cd5 100644
--- a/python/packages/autogen-ext/src/autogen_ext/auth/azure/__init__.py
+++ b/python/packages/autogen-ext/src/autogen_ext/auth/azure/__init__.py
@@ -1,6 +1,6 @@
from typing import List
-from autogen_core import Component
+from autogen_core import Component, ComponentBase
from pydantic import BaseModel
from typing_extensions import Self
@@ -13,10 +13,10 @@ class TokenProviderConfig(BaseModel):
scopes: List[str]
-class AzureTokenProvider(Component[TokenProviderConfig]):
+class AzureTokenProvider(ComponentBase[TokenProviderConfig], Component[TokenProviderConfig]):
component_type = "token_provider"
component_config_schema = TokenProviderConfig
- component_provider_override = "autogen_ext.models.openai.AzureTokenProvider"
+ component_provider_override = "autogen_ext.auth.azure.AzureTokenProvider"
def __init__(self, credential: TokenProvider, *scopes: str):
self.credential = credential
diff --git a/python/packages/autogen-ext/src/autogen_ext/models/openai/_openai_client.py b/python/packages/autogen-ext/src/autogen_ext/models/openai/_openai_client.py
index 5b9f51129a88..b525e6340fd0 100644
--- a/python/packages/autogen-ext/src/autogen_ext/models/openai/_openai_client.py
+++ b/python/packages/autogen-ext/src/autogen_ext/models/openai/_openai_client.py
@@ -30,6 +30,7 @@
Image,
MessageHandlerContext,
)
+from autogen_core.models import FinishReasons
from autogen_core.logging import LLMCallEvent
from autogen_core.models import (
AssistantMessage,
@@ -327,6 +328,21 @@ def assert_valid_name(name: str) -> str:
return name
+def normalize_stop_reason(stop_reason: str | None) -> FinishReasons:
+ if stop_reason is None:
+ return "unknown"
+
+ # Convert to lower case
+ stop_reason = stop_reason.lower()
+
+ KNOWN_STOP_MAPPINGS: Dict[str, FinishReasons] = {
+ "end_turn": "stop",
+ "tool_calls": "function_calls",
+ }
+
+ return KNOWN_STOP_MAPPINGS.get(stop_reason, "unknown")
+
+
class BaseOpenAIChatCompletionClient(ChatCompletionClient):
def __init__(
self,
@@ -747,8 +763,8 @@ async def create_stream(
else:
prompt_tokens = 0
- if stop_reason is None:
- raise ValueError("No stop reason found")
+ if stop_reason == "function_call":
+ raise ValueError("Function calls are not supported in this context")
content: Union[str, List[FunctionCall]]
if len(content_deltas) > 1:
@@ -770,13 +786,9 @@ async def create_stream(
prompt_tokens=prompt_tokens,
completion_tokens=completion_tokens,
)
- if stop_reason == "function_call":
- raise ValueError("Function calls are not supported in this context")
- if stop_reason == "tool_calls":
- stop_reason = "function_calls"
result = CreateResult(
- finish_reason=stop_reason, # type: ignore
+ finish_reason=normalize_stop_reason(stop_reason),
content=content,
usage=usage,
cached=False,
@@ -1102,7 +1114,7 @@ class AzureOpenAIChatCompletionClient(
"azure_deployment": "{your-azure-deployment}",
"api_version": "2024-06-01",
"azure_ad_token_provider": {
- "provider": "autogen_ext.models.openai.AzureTokenProvider",
+ "provider": "autogen_ext.auth.azure.AzureTokenProvider",
"config": {
"provider_kind": "DefaultAzureCredential",
"scopes": ["https://cognitiveservices.azure.com/.default"],
diff --git a/python/packages/autogen-ext/src/autogen_ext/teams/magentic_one.py b/python/packages/autogen-ext/src/autogen_ext/teams/magentic_one.py
index 23aca97014c3..fc2e4f6b9129 100644
--- a/python/packages/autogen-ext/src/autogen_ext/teams/magentic_one.py
+++ b/python/packages/autogen-ext/src/autogen_ext/teams/magentic_one.py
@@ -1,9 +1,10 @@
import warnings
-from typing import List
+from typing import Awaitable, Callable, List, Optional, Union
from autogen_agentchat.agents import CodeExecutorAgent, UserProxyAgent
from autogen_agentchat.base import ChatAgent
from autogen_agentchat.teams import MagenticOneGroupChat
+from autogen_core import CancellationToken
from autogen_core.models import ChatCompletionClient
from autogen_ext.agents.file_surfer import FileSurfer
@@ -12,6 +13,10 @@
from autogen_ext.code_executors.local import LocalCommandLineCodeExecutor
from autogen_ext.models.openai._openai_client import BaseOpenAIChatCompletionClient
+SyncInputFunc = Callable[[str], str]
+AsyncInputFunc = Callable[[str, Optional[CancellationToken]], Awaitable[str]]
+InputFuncType = Union[SyncInputFunc, AsyncInputFunc]
+
class MagenticOne(MagenticOneGroupChat):
"""
@@ -116,7 +121,12 @@ async def example_usage_hil():
"""
- def __init__(self, client: ChatCompletionClient, hil_mode: bool = False):
+ def __init__(
+ self,
+ client: ChatCompletionClient,
+ hil_mode: bool = False,
+ input_func: InputFuncType | None = None,
+ ):
self.client = client
self._validate_client_capabilities(client)
@@ -126,7 +136,7 @@ def __init__(self, client: ChatCompletionClient, hil_mode: bool = False):
executor = CodeExecutorAgent("Executor", code_executor=LocalCommandLineCodeExecutor())
agents: List[ChatAgent] = [fs, ws, coder, executor]
if hil_mode:
- user_proxy = UserProxyAgent("User")
+ user_proxy = UserProxyAgent("User", input_func=input_func)
agents.append(user_proxy)
super().__init__(agents, model_client=client)
diff --git a/python/packages/autogen-magentic-one/README.md b/python/packages/autogen-magentic-one/README.md
index a0573df8c52a..d6aef754204a 100644
--- a/python/packages/autogen-magentic-one/README.md
+++ b/python/packages/autogen-magentic-one/README.md
@@ -143,7 +143,7 @@ To configure for Azure OpenAI service, use the following config:
"azure_deployment": "{your-azure-deployment}",
"api_version": "2024-06-01",
"azure_ad_token_provider": {
- "provider": "autogen_ext.models.openai.AzureTokenProvider",
+ "provider": "autogen_ext.auth.azure.AzureTokenProvider",
"config": {
"provider_kind": "DefaultAzureCredential",
"scopes": [
diff --git a/python/packages/autogen-studio/pyproject.toml b/python/packages/autogen-studio/pyproject.toml
index 869bdb78b8f6..5c9cbcc6cf47 100644
--- a/python/packages/autogen-studio/pyproject.toml
+++ b/python/packages/autogen-studio/pyproject.toml
@@ -35,8 +35,8 @@ dependencies = [
"pyyaml",
"autogen-core==0.4.0",
"autogen-agentchat==0.4.0",
- "autogen-ext[magentic-one]==0.4.0",
- "azure-identity",
+ "autogen-ext[magentic-one, openai, azure]==0.4.0",
+ "azure-identity"
]
optional-dependencies = {web = ["fastapi", "uvicorn"], database = ["psycopg"]}
diff --git a/python/packages/autogen-test-utils/src/autogen_test_utils/__init__.py b/python/packages/autogen-test-utils/src/autogen_test_utils/__init__.py
index e3539ac9ede9..b917194b1d82 100644
--- a/python/packages/autogen-test-utils/src/autogen_test_utils/__init__.py
+++ b/python/packages/autogen-test-utils/src/autogen_test_utils/__init__.py
@@ -6,13 +6,14 @@
from autogen_core import (
BaseAgent,
Component,
+ ComponentBase,
+ ComponentModel,
DefaultTopicId,
MessageContext,
RoutedAgent,
default_subscription,
message_handler,
)
-from autogen_core._component_config import ComponentModel
from pydantic import BaseModel
@@ -76,7 +77,7 @@ class MyInnerConfig(BaseModel):
inner_message: str
-class MyInnerComponent(Component[MyInnerConfig]):
+class MyInnerComponent(ComponentBase[MyInnerConfig], Component[MyInnerConfig]):
component_config_schema = MyInnerConfig
component_type = "custom"
@@ -96,7 +97,7 @@ class MyOuterConfig(BaseModel):
inner_class: ComponentModel
-class MyOuterComponent(Component[MyOuterConfig]):
+class MyOuterComponent(ComponentBase[MyOuterConfig], Component[MyOuterConfig]):
component_config_schema = MyOuterConfig
component_type = "custom"
diff --git a/python/packages/component-schema-gen/src/component_schema_gen/__main__.py b/python/packages/component-schema-gen/src/component_schema_gen/__main__.py
index bf0a21f1f141..810d5ec84455 100644
--- a/python/packages/component-schema-gen/src/component_schema_gen/__main__.py
+++ b/python/packages/component-schema-gen/src/component_schema_gen/__main__.py
@@ -5,7 +5,8 @@
from autogen_core import ComponentModel
from autogen_core._component_config import (
WELL_KNOWN_PROVIDERS,
- ComponentConfigImpl,
+ ComponentSchemaType,
+ ComponentToConfig,
_type_to_provider_str, # type: ignore
)
from autogen_ext.auth.azure import AzureTokenProvider
@@ -17,10 +18,13 @@
T = TypeVar("T", bound=BaseModel)
-def build_specific_component_schema(component: type[ComponentConfigImpl[T]], provider_str: str) -> Dict[str, Any]:
+def build_specific_component_schema(component: type[ComponentSchemaType[T]], provider_str: str) -> Dict[str, Any]:
model = component.component_config_schema # type: ignore
model_schema = model.model_json_schema()
+ # We can't specify component to be the union of two types, so we assert it here
+ assert issubclass(component, ComponentToConfig)
+
component_model_schema = ComponentModel.model_json_schema()
if "$defs" not in component_model_schema:
component_model_schema["$defs"] = {}
@@ -70,7 +74,9 @@ def main() -> None:
for key, value in WELL_KNOWN_PROVIDERS.items():
reverse_provider_lookup_table[value].append(key)
- def add_type(type: type[ComponentConfigImpl[T]]) -> None:
+ def add_type(type: type[ComponentSchemaType[T]]) -> None:
+ # We can't specify component to be the union of two types, so we assert it here
+ assert issubclass(type, ComponentToConfig)
canonical = type.component_provider_override or _type_to_provider_str(type)
reverse_provider_lookup_table[canonical].append(canonical)
for provider_str in reverse_provider_lookup_table[canonical]:
diff --git a/python/packages/magentic-one-cli/pyproject.toml b/python/packages/magentic-one-cli/pyproject.toml
index 5b14ed6f73d7..79ce1075c6a3 100644
--- a/python/packages/magentic-one-cli/pyproject.toml
+++ b/python/packages/magentic-one-cli/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "magentic-one-cli"
-version = "0.2.0"
+version = "0.2.1"
license = {file = "LICENSE-CODE"}
description = "Magentic-One is a generalist multi-agent system, built on `AutoGen-AgentChat`, for solving complex web and file-based tasks. This package installs the `m1` command-line utility to quickly get started with Magentic-One."
readme = "README.md"
@@ -15,8 +15,8 @@ classifiers = [
"Operating System :: OS Independent",
]
dependencies = [
- "autogen-agentchat>=0.4.0,<0.5",
- "autogen-ext[openai,magentic-one]>=0.4.0,<0.5",
+ "autogen-agentchat>=0.4.1,<0.5",
+ "autogen-ext[openai,magentic-one]>=0.4.1,<0.5",
]
[project.scripts]
diff --git a/python/packages/magentic-one-cli/src/magentic_one_cli/_m1.py b/python/packages/magentic-one-cli/src/magentic_one_cli/_m1.py
index e5a07b164939..e7a3f2ed1e89 100644
--- a/python/packages/magentic-one-cli/src/magentic_one_cli/_m1.py
+++ b/python/packages/magentic-one-cli/src/magentic_one_cli/_m1.py
@@ -1,8 +1,11 @@
import argparse
import asyncio
import warnings
+from typing import Optional
-from autogen_agentchat.ui import Console
+from aioconsole import ainput # type: ignore
+from autogen_agentchat.ui import Console, UserInputManager
+from autogen_core import CancellationToken
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_ext.teams.magentic_one import MagenticOne
@@ -10,6 +13,13 @@
warnings.filterwarnings(action="ignore", message="unclosed", category=ResourceWarning)
+async def cancellable_input(prompt: str, cancellation_token: Optional[CancellationToken]) -> str:
+ task: asyncio.Task[str] = asyncio.create_task(ainput(prompt)) # type: ignore
+ if cancellation_token is not None:
+ cancellation_token.link_future(task)
+ return await task
+
+
def main() -> None:
"""
Command-line interface for running a complex task using MagenticOne.
@@ -37,9 +47,10 @@ def main() -> None:
args = parser.parse_args()
async def run_task(task: str, hil_mode: bool) -> None:
+ input_manager = UserInputManager(callback=cancellable_input)
client = OpenAIChatCompletionClient(model="gpt-4o")
- m1 = MagenticOne(client=client, hil_mode=hil_mode)
- await Console(m1.run_stream(task=task), output_stats=False)
+ m1 = MagenticOne(client=client, hil_mode=hil_mode, input_func=input_manager.get_wrapped_callback())
+ await Console(m1.run_stream(task=task), output_stats=False, user_input_manager=input_manager)
task = args.task[0]
asyncio.run(run_task(task, not args.no_hil))
diff --git a/python/samples/agentchat_chainlit/requirements.txt b/python/samples/agentchat_chainlit/requirements.txt
index db122ba31ce4..a73d6abbe443 100644
--- a/python/samples/agentchat_chainlit/requirements.txt
+++ b/python/samples/agentchat_chainlit/requirements.txt
@@ -1,2 +1,2 @@
chainlit
-autogen-agentchat==0.4.0
+autogen-agentchat>=0.4.1,<0.5
diff --git a/python/samples/core_async_human_in_the_loop/model_config_template.json b/python/samples/core_async_human_in_the_loop/model_config_template.json
index bfa6913900ae..a66457f3b8e3 100644
--- a/python/samples/core_async_human_in_the_loop/model_config_template.json
+++ b/python/samples/core_async_human_in_the_loop/model_config_template.json
@@ -7,7 +7,7 @@
// "azure_deployment": "{your-azure-deployment}",
// "api_version": "2024-06-01",
// "azure_ad_token_provider": {
-// "provider": "autogen_ext.models.openai.AzureTokenProvider",
+// "provider": "autogen_ext.auth.azure.AzureTokenProvider",
// "config": {
// "provider_kind": "DefaultAzureCredential",
// "scopes": [
diff --git a/python/samples/core_chess_game/model_config_template.json b/python/samples/core_chess_game/model_config_template.json
index bfa6913900ae..a66457f3b8e3 100644
--- a/python/samples/core_chess_game/model_config_template.json
+++ b/python/samples/core_chess_game/model_config_template.json
@@ -7,7 +7,7 @@
// "azure_deployment": "{your-azure-deployment}",
// "api_version": "2024-06-01",
// "azure_ad_token_provider": {
-// "provider": "autogen_ext.models.openai.AzureTokenProvider",
+// "provider": "autogen_ext.auth.azure.AzureTokenProvider",
// "config": {
// "provider_kind": "DefaultAzureCredential",
// "scopes": [
diff --git a/python/uv.lock b/python/uv.lock
index ea317daf5954..113c967f85ca 100644
--- a/python/uv.lock
+++ b/python/uv.lock
@@ -383,7 +383,7 @@ wheels = [
[[package]]
name = "autogen-agentchat"
-version = "0.4.0"
+version = "0.4.1"
source = { editable = "packages/autogen-agentchat" }
dependencies = [
{ name = "aioconsole" },
@@ -398,7 +398,7 @@ requires-dist = [
[[package]]
name = "autogen-core"
-version = "0.4.0"
+version = "0.4.1"
source = { editable = "packages/autogen-core" }
dependencies = [
{ name = "jsonref" },
@@ -513,7 +513,7 @@ dev = [
[[package]]
name = "autogen-ext"
-version = "0.4.0"
+version = "0.4.1"
source = { editable = "packages/autogen-ext" }
dependencies = [
{ name = "autogen-core" },
@@ -739,7 +739,7 @@ dependencies = [
{ name = "alembic" },
{ name = "autogen-agentchat" },
{ name = "autogen-core" },
- { name = "autogen-ext", extra = ["magentic-one"] },
+ { name = "autogen-ext", extra = ["azure", "magentic-one", "openai"] },
{ name = "azure-identity" },
{ name = "fastapi" },
{ name = "loguru" },
@@ -770,7 +770,7 @@ requires-dist = [
{ name = "alembic" },
{ name = "autogen-agentchat", editable = "packages/autogen-agentchat" },
{ name = "autogen-core", editable = "packages/autogen-core" },
- { name = "autogen-ext", extras = ["magentic-one"], editable = "packages/autogen-ext" },
+ { name = "autogen-ext", extras = ["azure", "magentic-one", "openai"], editable = "packages/autogen-ext" },
{ name = "azure-identity" },
{ name = "fastapi" },
{ name = "fastapi", marker = "extra == 'web'" },
@@ -3086,7 +3086,7 @@ wheels = [
[[package]]
name = "magentic-one-cli"
-version = "0.2.0"
+version = "0.2.1"
source = { editable = "packages/magentic-one-cli" }
dependencies = [
{ name = "autogen-agentchat" },