From a7ee98b9084fe575a9ef10fbbc73cec7c9eb3772 Mon Sep 17 00:00:00 2001 From: gagb Date: Sun, 26 Jan 2025 22:26:52 -0800 Subject: [PATCH] Run poe check --- .../src/autogen_agentchat/agents/_assistant_agent.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py index 6744e725d91d..384dcf55fc2b 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py @@ -28,7 +28,7 @@ SystemMessage, UserMessage, ) -from autogen_core.tools import FunctionTool, BaseTool +from autogen_core.tools import BaseTool, FunctionTool from pydantic import BaseModel from typing_extensions import Self @@ -388,7 +388,10 @@ async def on_messages_stream( # Generate an inference result based on the current model context. llm_messages = self._system_messages + await self._model_context.get_messages() model_result = await self._model_client.create( - llm_messages, tools=self._tools + self._handoff_tools, extra_create_args=self._extra_create_args, cancellation_token=cancellation_token + llm_messages, + tools=self._tools + self._handoff_tools, + extra_create_args=self._extra_create_args, + cancellation_token=cancellation_token, ) # Add the response to the model context.