Skip to content

Commit

Permalink
Merge branch 'main' into split_component_infra
Browse files Browse the repository at this point in the history
  • Loading branch information
jackgerrits authored Jan 13, 2025
2 parents 1ce0bba + 1a6e976 commit 3f63c6f
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@
"source": [
"## Running a Team\n",
"\n",
"Let's calls the {py:meth}`~autogen_agentchat.teams.BaseGroupChat.run` method\n",
"Let's call the {py:meth}`~autogen_agentchat.teams.BaseGroupChat.run` method\n",
"to start the team with a task."
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class RequestUsage:
completion_tokens: int


FinishReasons = Literal["stop", "length", "function_calls", "content_filter"]
FinishReasons = Literal["stop", "length", "function_calls", "content_filter", "unknown"]


@dataclass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
Image,
MessageHandlerContext,
)
from autogen_core.models import FinishReasons
from autogen_core.logging import LLMCallEvent
from autogen_core.models import (
AssistantMessage,
Expand Down Expand Up @@ -327,6 +328,21 @@ def assert_valid_name(name: str) -> str:
return name


def normalize_stop_reason(stop_reason: str | None) -> FinishReasons:
if stop_reason is None:
return "unknown"

# Convert to lower case
stop_reason = stop_reason.lower()

KNOWN_STOP_MAPPINGS: Dict[str, FinishReasons] = {
"end_turn": "stop",
"tool_calls": "function_calls",
}

return KNOWN_STOP_MAPPINGS.get(stop_reason, "unknown")


class BaseOpenAIChatCompletionClient(ChatCompletionClient):
def __init__(
self,
Expand Down Expand Up @@ -747,8 +763,8 @@ async def create_stream(
else:
prompt_tokens = 0

if stop_reason is None:
raise ValueError("No stop reason found")
if stop_reason == "function_call":
raise ValueError("Function calls are not supported in this context")

content: Union[str, List[FunctionCall]]
if len(content_deltas) > 1:
Expand All @@ -770,13 +786,9 @@ async def create_stream(
prompt_tokens=prompt_tokens,
completion_tokens=completion_tokens,
)
if stop_reason == "function_call":
raise ValueError("Function calls are not supported in this context")
if stop_reason == "tool_calls":
stop_reason = "function_calls"

result = CreateResult(
finish_reason=stop_reason, # type: ignore
finish_reason=normalize_stop_reason(stop_reason),
content=content,
usage=usage,
cached=False,
Expand Down
4 changes: 2 additions & 2 deletions python/packages/autogen-studio/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ dependencies = [
"pyyaml",
"autogen-core==0.4.0",
"autogen-agentchat==0.4.0",
"autogen-ext[magentic-one]==0.4.0",
"azure-identity",
"autogen-ext[magentic-one, openai, azure]==0.4.0",
"azure-identity"
]
optional-dependencies = {web = ["fastapi", "uvicorn"], database = ["psycopg"]}

Expand Down
4 changes: 2 additions & 2 deletions python/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 3f63c6f

Please sign in to comment.