Skip to content

Commit

Permalink
fix: Quick fix for history bug + bump sentry
Browse files Browse the repository at this point in the history
  • Loading branch information
RamiAwar committed Jul 29, 2024
1 parent dfb6d5c commit 59dd946
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 5 deletions.
2 changes: 1 addition & 1 deletion backend/dataline/sentry.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def setup_sentry() -> None:
sentry_sdk.init(
dsn="https://eb866cebe8c8378fd689c1ad3d39bcb5@o4507329853915136.ingest.de.sentry.io/4507335339278416",
environment=config.environment,
release="v1.0.8",
release="v1.0.11",
enable_tracing=True,
integrations=[FastApiIntegration()],
traces_sample_rate=1.0,
Expand Down
7 changes: 3 additions & 4 deletions backend/dataline/services/llm_flow/nodes.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from abc import ABC, abstractmethod
from typing import cast

from langchain_core.messages import AIMessage, BaseMessage, ToolMessage, ToolCall
from langchain_core.messages import AIMessage, BaseMessage, ToolCall, ToolMessage
from langchain_core.utils.function_calling import convert_to_openai_function
from langchain_openai import ChatOpenAI
from langgraph.graph import END
Expand Down Expand Up @@ -56,10 +56,9 @@ def run(cls, state: QueryGraphState) -> QueryGraphStateUpdate:
all_tools = sql_tools + [ChartGeneratorTool()]
tools = [convert_to_openai_function(t) for t in all_tools]
model = cast(ChatOpenAI, model.bind_tools(tools))
# We only want to pass the last 20 messages to the model
# This includes tool messages and ai messages at this point
# Useful to limit tokens when graph recursion is very deep
last_n_messages = state.messages[-20:]
# TODO: Useful to limit tokens when graph recursion is very deep
last_n_messages = state.messages
try:
response = model.invoke(last_n_messages)
except RateLimitError as e:
Expand Down

0 comments on commit 59dd946

Please sign in to comment.