Skip to content

Commit 3aa8d64

Browse files
fix a bug of always displaying an example question
1 parent 894d930 commit 3aa8d64

File tree

1 file changed

+43
-13
lines changed

1 file changed

+43
-13
lines changed

wizard/to_question.py

+43-13
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33

44
from langchain_core.prompts import ChatPromptTemplate
55
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
6+
from langchain_core.prompts import FewShotPromptTemplate, PromptTemplate, FewShotChatMessagePromptTemplate
7+
68

79
#from in2lambda.api.question import Question
810
from chains.llm_factory import LLMFactory
@@ -19,6 +21,7 @@ def __init__(self):
1921
llm_factory_instance = LLMFactory()
2022
self.llm = llm_factory_instance.get_llm()
2123

24+
2225
self.examples = [
2326
{
2427
"input":
@@ -90,22 +93,49 @@ def convert(self, question:str, solution:str) -> Question:
9093
Convert a question and solution to a Question object
9194
it's possible solution is a list of solutions or no solution at all
9295
'''
96+
97+
# This is a prompt template used to format each individual example.
98+
example_prompt = ChatPromptTemplate.from_messages(
99+
[
100+
("human", "{input}"),
101+
("ai", "{output}"),
102+
]
103+
)
104+
few_shot_prompt = FewShotChatMessagePromptTemplate(
105+
example_prompt=example_prompt,
106+
examples=self.examples,
107+
)
108+
109+
final_prompt = ChatPromptTemplate.from_messages(
110+
[
111+
("system", """You are intelligent assistant to process the given input question,
112+
Please analyze the input question and respond with:
113+
1. Main Content (String).
114+
2. Relevant parts (Comma and new line separated list).
115+
Use format: "Main Content: <string>\\nParts: <Part1>, \\n<Part2>, \\n..."""),
116+
few_shot_prompt,
117+
("human", "{input}"),
118+
]
119+
)
120+
121+
# print(few_shot_prompt.format())
93122

94-
prompt = ChatPromptTemplate.from_messages([
95-
SystemMessage(content="""Analyze text and respond with:
96-
1. Main Content (string)
97-
2. Relevant parts (coma and new line seperated list)
98-
Use format: "Main Content: <string>\\nParts: <Part1>, \\n<Part2>, \\n..."""),
99-
*[
100-
HumanMessage(content=ex["input"])
101-
for ex in self.examples
102-
],
103-
*[AIMessage(content=f"Main Content: {ex['output'][0]} nParts: {', '.join(ex['output'][1])}") for ex in self.examples],
104-
HumanMessage(content="{input}")
105-
])
123+
# prompt = ChatPromptTemplate.from_messages([
124+
# SystemMessage(content="""Analyze text and respond with:
125+
# 1. Main Content (string)
126+
# 2. Relevant parts (coma and new line seperated list)
127+
# Use format: "Main Content: <string>\\nParts: <Part1>, \\n<Part2>, \\n..."""),
128+
# *[
129+
# HumanMessage(content=ex["input"])
130+
# for ex in self.examples
131+
# ],
132+
# *[AIMessage(content=f"Main Content: {ex['output'][0]} Parts: {', '.join(ex['output'][1])}") for ex in self.examples],
133+
# HumanMessage(content="{input}")
134+
# ])
106135

107-
chain = prompt | self.llm
136+
chain = final_prompt | self.llm
108137

138+
print(question)
109139
result = chain.invoke({"input":question})
110140
print(result)
111141
return Question()

0 commit comments

Comments
 (0)