Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions posthog/ai/anthropic/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import uuid
from typing import Any, Dict, Optional

from posthog.ai.utils import call_llm_and_track_usage, get_model_params, with_privacy_mode
from posthog.ai.utils import call_llm_and_track_usage, get_model_params, merge_system_prompt, with_privacy_mode
from posthog.client import Client as PostHogClient


Expand Down Expand Up @@ -171,7 +171,11 @@ def _capture_streaming_event(
"$ai_provider": "anthropic",
"$ai_model": kwargs.get("model"),
"$ai_model_parameters": get_model_params(kwargs),
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("messages")),
"$ai_input": with_privacy_mode(
self._client._ph_client,
posthog_privacy_mode,
merge_system_prompt(kwargs, "anthropic"),
),
"$ai_output_choices": with_privacy_mode(
self._client._ph_client,
posthog_privacy_mode,
Expand Down
8 changes: 6 additions & 2 deletions posthog/ai/anthropic/anthropic_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import uuid
from typing import Any, Dict, Optional

from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params, with_privacy_mode
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params, merge_system_prompt, with_privacy_mode
from posthog.client import Client as PostHogClient


Expand Down Expand Up @@ -171,7 +171,11 @@ async def _capture_streaming_event(
"$ai_provider": "anthropic",
"$ai_model": kwargs.get("model"),
"$ai_model_parameters": get_model_params(kwargs),
"$ai_input": with_privacy_mode(self._client._ph_client, posthog_privacy_mode, kwargs.get("messages")),
"$ai_input": with_privacy_mode(
self._client._ph_client,
posthog_privacy_mode,
merge_system_prompt(kwargs, "anthropic"),
),
"$ai_output_choices": with_privacy_mode(
self._client._ph_client,
posthog_privacy_mode,
Expand Down
17 changes: 15 additions & 2 deletions posthog/ai/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,15 @@ def format_response_openai(response):
return output


def merge_system_prompt(kwargs: Dict[str, Any], provider: str):
if provider != "anthropic":
return kwargs.get("messages")
messages = kwargs.get("messages") or []
if kwargs.get("system") is None:
return messages
return [{"role": "system", "content": kwargs.get("system")}] + messages


def call_llm_and_track_usage(
posthog_distinct_id: Optional[str],
ph_client: PostHogClient,
Expand Down Expand Up @@ -123,11 +132,13 @@ def call_llm_and_track_usage(
if response and hasattr(response, "usage"):
usage = get_usage(response, provider)

messages = merge_system_prompt(kwargs, provider)

event_properties = {
"$ai_provider": provider,
"$ai_model": kwargs.get("model"),
"$ai_model_parameters": get_model_params(kwargs),
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, kwargs.get("messages")),
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, messages),
"$ai_output_choices": with_privacy_mode(
ph_client, posthog_privacy_mode, format_response(response, provider)
),
Expand Down Expand Up @@ -191,11 +202,13 @@ async def call_llm_and_track_usage_async(
if response and hasattr(response, "usage"):
usage = get_usage(response, provider)

messages = merge_system_prompt(kwargs, provider)

event_properties = {
"$ai_provider": provider,
"$ai_model": kwargs.get("model"),
"$ai_model_parameters": get_model_params(kwargs),
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, kwargs.get("messages")),
"$ai_input": with_privacy_mode(ph_client, posthog_privacy_mode, messages),
"$ai_output_choices": with_privacy_mode(
ph_client, posthog_privacy_mode, format_response(response, provider)
),
Expand Down
62 changes: 58 additions & 4 deletions posthog/test/ai/anthropic/test_anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,25 +218,29 @@ def test_basic_integration(mock_client):
client = Anthropic(posthog_client=mock_client)
client.messages.create(
model="claude-3-opus-20240229",
messages=[{"role": "user", "content": "You must always answer with 'Bar'."}],
messages=[{"role": "user", "content": "Foo"}],
max_tokens=1,
temperature=0,
posthog_distinct_id="test-id",
posthog_properties={"foo": "bar"},
system="You must always answer with 'Bar'.",
)

assert mock_client.capture.call_count == 1

call_args = mock_client.capture.call_args[1]
props = call_args["properties"]

assert call_args["distinct_id"] == "test-id"
assert call_args["event"] == "$ai_generation"
assert props["$ai_provider"] == "anthropic"
assert props["$ai_model"] == "claude-3-opus-20240229"
assert props["$ai_input"] == [{"role": "user", "content": "You must always answer with 'Bar'."}]
assert props["$ai_input"] == [
{"role": "system", "content": "You must always answer with 'Bar'."},
{"role": "user", "content": "Foo"},
]
assert props["$ai_output_choices"][0]["role"] == "assistant"
assert props["$ai_input_tokens"] == 16
assert props["$ai_output_choices"][0]["content"] == "Bar"
assert props["$ai_input_tokens"] == 18
assert props["$ai_output_tokens"] == 1
assert props["$ai_http_status"] == 200
assert props["foo"] == "bar"
Expand Down Expand Up @@ -271,3 +275,53 @@ async def test_basic_async_integration(mock_client):
assert props["$ai_http_status"] == 200
assert props["foo"] == "bar"
assert isinstance(props["$ai_latency"], float)


def test_streaming_system_prompt(mock_client, mock_anthropic_stream):
with patch("anthropic.resources.Messages.create", return_value=mock_anthropic_stream):
client = Anthropic(api_key="test-key", posthog_client=mock_client)
response = client.messages.create(
model="claude-3-opus-20240229",
system="Foo",
messages=[{"role": "user", "content": "Bar"}],
stream=True,
)

# Consume the stream
list(response)

# Wait a bit to ensure the capture is called
time.sleep(0.1)
assert mock_client.capture.call_count == 1

call_args = mock_client.capture.call_args[1]
props = call_args["properties"]

assert props["$ai_input"] == [{"role": "system", "content": "Foo"}, {"role": "user", "content": "Bar"}]


@pytest.mark.skipif(not ANTHROPIC_API_KEY, reason="ANTHROPIC_API_KEY is not set")
async def test_async_streaming_system_prompt(mock_client, mock_anthropic_stream):
client = AsyncAnthropic(posthog_client=mock_client)
response = await client.messages.create(
model="claude-3-opus-20240229",
system="You must always answer with 'Bar'.",
messages=[{"role": "user", "content": "Foo"}],
stream=True,
max_tokens=1,
)

# Consume the stream
[c async for c in response]

# Wait a bit to ensure the capture is called
time.sleep(0.1)
assert mock_client.capture.call_count == 1

call_args = mock_client.capture.call_args[1]
props = call_args["properties"]

assert props["$ai_input"] == [
{"role": "system", "content": "You must always answer with 'Bar'."},
{"role": "user", "content": "Foo"},
]
Loading