Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit c7e7291

Browse files
committedApr 7, 2025·
fix: logging prints
1 parent ed81111 commit c7e7291

File tree

3 files changed

+8
-3
lines changed

3 files changed

+8
-3
lines changed
 

‎index.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ def handler(event, context):
99
Lambda handler function
1010
"""
1111
# Log the input event for debugging purposes
12-
print("Received event:", json.dumps(event, indent=2))
12+
print("Received event:", " ".join(json.dumps(event, indent=2).splitlines()))
1313

1414
if "body" in event:
1515
try:

‎src/agents/informational_agent/informational_agent.py

+5
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,7 @@ def call_model(self, state: State, config: RunnableConfig) -> str:
7979
messages = [SystemMessage(content=system_message)] + state['messages']
8080

8181
valid_messages = self.check_for_valid_messages(messages)
82+
print("Informational agent valid messages, ready for LLM call...")
8283
response = self.llm.invoke(valid_messages)
8384

8485
# Save summary for fetching outside the class
@@ -131,6 +132,7 @@ def summarize_conversation(self, state: State, config: RunnableConfig) -> dict:
131132
valid_messages = self.check_for_valid_messages(messages)
132133
conversationalStyle_response = self.summarisation_llm.invoke(valid_messages)
133134

135+
print("Informational agent summary and conversational style responses successfully received.")
134136
# Delete messages that are no longer wanted, except the last ones
135137
delete_messages: list[AllMessageTypes] = [RemoveMessage(id=m.id) for m in state["messages"][:-3]]
136138

@@ -153,6 +155,7 @@ def should_summarize(self, state: State) -> str:
153155

154156
# always pairs of (sent, response) + 1 latest message
155157
if nr_messages > self.max_messages_to_summarize:
158+
print("Informational agent: summarizing conversation needed...")
156159
return "summarize_conversation"
157160
return "call_llm"
158161

@@ -185,7 +188,9 @@ def invoke_informational_agent(query: str, conversation_history: list, summary:
185188
print(f'in invoke_informational_agent(), thread_id = {session_id}')
186189

187190
config = {"configurable": {"thread_id": session_id, "summary": summary, "conversational_style": conversationalStyle, "question_response_details": question_response_details}}
191+
print("Informational agent invoking...")
188192
response_events = agent.app.invoke({"messages": conversation_history, "summary": summary, "conversational_style": conversationalStyle}, config=config, stream_mode="values") #updates
193+
print("Informational agent response received.")
189194
pretty_printed_response = agent.pretty_response_value(response_events) # get last event/ai answer in the response
190195

191196
# Gather Metadata from the agent

‎src/module.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -63,9 +63,9 @@ def chat_module(message: Any, params: Params) -> Result:
6363
question_information,
6464
question_access_information
6565
)
66-
print("INFO:: ", question_response_details_prompt)
66+
print("INFO:: ", " ".join(question_response_details_prompt.splitlines()))
6767
except Exception as e:
68-
print("ERROR:: ", e)
68+
print("ERROR on parsing the JSON event to prompt:: ", e)
6969
raise Exception("Internal Error: The question response details could not be parsed.")
7070
if "agent_type" in params:
7171
agent_type = params["agent_type"]

0 commit comments

Comments
 (0)
Please sign in to comment.