Skip to content

Commit

Permalink
unit tests now fully passing with params, example works with httpbin …
Browse files Browse the repository at this point in the history
…base64 decode
  • Loading branch information
EItanya committed Jan 25, 2025
1 parent 873ac5c commit dfb9b50
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 60 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import re
import json
from typing import Any, Literal, Optional, Type
import urllib.parse

Expand Down Expand Up @@ -66,50 +67,54 @@ class HttpTool(BaseTool[BaseModel, Any], Component[HttpToolConfig]):
Example:
Simple use case::
import asyncio
from autogen_ext.tools.http import HttpTool
from autogen_agentchat.agents import AssistantAgent
from autogen_ext.models.openai import OpenAIChatCompletionClient
# Define a JSON schema for a weather API
weather_schema = {
"type": "object",
"properties": {
"city": {"type": "string", "description": "The city to get weather for"},
"country": {"type": "string", "description": "The country code"}
},
"required": ["city"]
}
# Create an HTTP tool for the weather API
weather_tool = HttpTool(
name="get_weather",
description="Get the current weather for a city",
url="https://api.weatherapi.com/v1/current.json",
method="GET",
headers={"key": "your-api-key"}, # Replace with your API key
json_schema=weather_schema
)
async def main():
# Create an assistant with the weather tool
model = OpenAIChatCompletionClient(model="gpt-4")
assistant = AssistantAgent(
"weather_assistant",
model_client=model,
tools=[weather_tool]
)
# The assistant can now use the weather tool to get weather data
response = await assistant.on_messages([
TextMessage(content="What's the weather like in London?")
])
print(response.chat_message.content)
asyncio.run(main())
"""

component_type = "agent"
import asyncio
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.messages import TextMessage
from autogen_core import CancellationToken
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_ext.tools.http import HttpTool
# Define a JSON schema for a base64 decode tool
base64_schema = {
"type": "object",
"properties": {
"value": {"type": "string", "description": "The base64 value to decode"},
},
"required": ["value"]
}
# Create an HTTP tool for the weather API
base64_tool = HttpTool(
name="base64_decode",
description="base64 decode a value",
scheme="https",
host="httpbin.org",
port=443,
path="/base64/{value}",
method="GET",
json_schema=base64_schema
)
async def main():
# Create an assistant with the base64 tool
model = OpenAIChatCompletionClient(model="gpt-4")
assistant = AssistantAgent(
"base64_assistant",
model_client=model,
tools=[base64_tool]
)
# The assistant can now use the base64 tool to decode the string
response = await assistant.on_messages([
TextMessage(content="Can you base64 decode the value 'YWJjZGU=', please?", source="user")
], CancellationToken())
print(response.chat_message.content)
asyncio.run(main())
"""

component_type = "tool"
component_provider_override = "autogen_ext.tools.http.HttpTool"
component_config_schema = HttpToolConfig

Expand All @@ -119,7 +124,7 @@ def __init__(
host: str,
port: int,
json_schema: dict[str, Any],
headers: Optional[dict[str, Any]],
headers: Optional[dict[str, Any]] = None,
description: str = "HTTP tool",
path: str = "/",
scheme: Literal["http", "https"] = "http",
Expand Down Expand Up @@ -200,4 +205,5 @@ async def run(self, args: BaseModel, cancellation_token: CancellationToken) -> A
case _: # Default case POST
response = await client.post(url, json=model_dump)

return response.json()
# TODO: (EItanya): Think about adding the ability to parse the response as JSON, or check a schema
return response.text
30 changes: 16 additions & 14 deletions python/packages/autogen-ext/tests/tools/http/test_http_tool.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import json
import pytest
import httpx
from pydantic import ValidationError
Expand Down Expand Up @@ -48,8 +49,8 @@ async def test_post_request(test_config: ComponentModel, test_server: None) -> N
tool = HttpTool.load_component(test_config)
result = await tool.run_json({"query": "test query", "value": 42}, CancellationToken())

assert isinstance(result, dict)
assert result["result"] == "Received: test query with value 42"
assert isinstance(result, str)
assert json.loads(result)["result"] == "Received: test query with value 42"


@pytest.mark.asyncio
Expand All @@ -61,8 +62,8 @@ async def test_get_request(test_config: ComponentModel, test_server: None) -> No

result = await tool.run_json({"query": "test query", "value": 42}, CancellationToken())

assert isinstance(result, dict)
assert result["result"] == "Received: test query with value 42"
assert isinstance(result, str)
assert json.loads(result)["result"] == "Received: test query with value 42"


@pytest.mark.asyncio
Expand All @@ -74,8 +75,8 @@ async def test_put_request(test_config: ComponentModel, test_server: None) -> No

result = await tool.run_json({"query": "test query", "value": 42}, CancellationToken())

assert isinstance(result, dict)
assert result["result"] == "Received: test query with value 42"
assert isinstance(result, str)
assert json.loads(result)["result"] == "Received: test query with value 42"

@pytest.mark.asyncio
async def test_path_params(test_config: ComponentModel, test_server: None) -> None:
Expand All @@ -86,8 +87,8 @@ async def test_path_params(test_config: ComponentModel, test_server: None) -> No

result = await tool.run_json({"query": "test query", "value": 42}, CancellationToken())

assert isinstance(result, dict)
assert result["result"] == "Received: test query with value 42"
assert isinstance(result, str)
assert json.loads(result)["result"] == "Received: test query with value 42"

@pytest.mark.asyncio
async def test_path_params_and_body(test_config: ComponentModel, test_server: None) -> None:
Expand All @@ -112,8 +113,9 @@ async def test_path_params_and_body(test_config: ComponentModel, test_server: No
"extra": "extra data"
}, CancellationToken())

assert isinstance(result, dict)
assert result["result"] == "Received: test query with value 42 and extra extra data"
assert isinstance(result, str)
assert json.loads(result)["result"] == "Received: test query with value 42 and extra extra data"




Expand All @@ -126,8 +128,8 @@ async def test_delete_request(test_config: ComponentModel, test_server: None) ->

result = await tool.run_json({"query": "test query", "value": 42}, CancellationToken())

assert isinstance(result, dict)
assert result["result"] == "Received: test query with value 42"
assert isinstance(result, str)
assert json.loads(result)["result"] == "Received: test query with value 42"


@pytest.mark.asyncio
Expand All @@ -139,8 +141,8 @@ async def test_patch_request(test_config: ComponentModel, test_server: None) ->

result = await tool.run_json({"query": "test query", "value": 42}, CancellationToken())

assert isinstance(result, dict)
assert result["result"] == "Received: test query with value 42"
assert isinstance(result, str)
assert json.loads(result)["result"] == "Received: test query with value 42"


@pytest.mark.asyncio
Expand Down

0 comments on commit dfb9b50

Please sign in to comment.