-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathjson_langchain.py
67 lines (55 loc) · 1.95 KB
/
json_langchain.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# pip install langchain-openai
from langchain_openai.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.schema import HumanMessage, SystemMessage
from langchain.schema.runnable.config import RunnableConfig
prompt = """Write a weather report for a major city
in ten words or less.
Do not include any additional explanation.
"""
guided_prompt = prompt + """
Return the result as JSON as follows:
{ "city": "<CITY_NAME>",
"report": "<BRIEF_REPORT>" }
"""
chat = ChatOpenAI(
#base_url="http://localhost:1234/v1",
temperature=0.7,
max_tokens=500,
model='gpt-4-1106-preview'
)
def baseline():
print("baseline:")
print(chat.invoke(prompt).content)
def with_guided_prompt():
print("1. Ask nicely")
print(chat.invoke(guided_prompt).content)
def with_pydantic_output_formatter():
print("2. Pydantic OutputParser")
from langchain.output_parsers import PydanticOutputParser
from langchain.pydantic_v1 import BaseModel, Field
class WeatherReport(BaseModel):
city: str = Field(description="City name")
report: str = Field(description="Brief weather report")
parser = PydanticOutputParser(pydantic_object=WeatherReport)
#print(f"Parser instructions: {parser.get_format_instructions()}")
runnable_prompt = ChatPromptTemplate.from_messages(
[
SystemMessage(content=parser.get_format_instructions()),
HumanMessage(content=prompt)
]
)
chain = runnable_prompt | chat | parser
py_obj = chain.invoke({})
print(py_obj.city, py_obj.report)
#To be extra robust about fixing JSON errors you can add
#from langchain.output_parsers import OutputFixingParser
#robust_parser = OutputFixingParser.from_llm(
# parser=parser,
# llm=chat
#)
# this will re-prompt to get a conforming output format
if __name__ == "__main__":
baseline()
with_guided_prompt()
with_pydantic_output_formatter()