Skip to content

Commit 8f22b92

Browse files
authored
Merge pull request #12 from pamelafox/chainedcalls
Adding chained calls example
2 parents 802165c + 90fe76b commit 8f22b92

File tree

1 file changed

+80
-0
lines changed

1 file changed

+80
-0
lines changed

chained_calls.py

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
import os
2+
3+
import azure.identity
4+
import openai
5+
from dotenv import load_dotenv
6+
7+
# Setup the OpenAI client to use either Azure, OpenAI.com, or Ollama API
8+
load_dotenv(override=True)
9+
API_HOST = os.getenv("API_HOST")
10+
11+
if API_HOST == "azure":
12+
13+
token_provider = azure.identity.get_bearer_token_provider(
14+
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
15+
)
16+
client = openai.AzureOpenAI(
17+
api_version=os.getenv("AZURE_OPENAI_VERSION"),
18+
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
19+
azure_ad_token_provider=token_provider,
20+
)
21+
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
22+
23+
elif API_HOST == "ollama":
24+
25+
client = openai.OpenAI(
26+
base_url=os.getenv("OLLAMA_ENDPOINT"),
27+
api_key="nokeyneeded",
28+
)
29+
MODEL_NAME = os.getenv("OLLAMA_MODEL")
30+
31+
elif API_HOST == "github":
32+
33+
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
34+
MODEL_NAME = os.getenv("GITHUB_MODEL")
35+
36+
else:
37+
38+
client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
39+
MODEL_NAME = os.getenv("OPENAI_MODEL")
40+
41+
42+
response = client.chat.completions.create(
43+
model=MODEL_NAME,
44+
temperature=0.7,
45+
messages=[{"role": "user", "content": "Explain how LLMs work in a single paragraph."}],
46+
)
47+
48+
explanation = response.choices[0].message.content
49+
print("Explanation: ", explanation)
50+
response = client.chat.completions.create(
51+
model=MODEL_NAME,
52+
temperature=0.7,
53+
messages=[
54+
{
55+
"role": "user",
56+
"content": "You're an editor. Review the explanation and provide feedback (but don't edit yourself):\n\n"
57+
+ explanation,
58+
}
59+
],
60+
)
61+
62+
feedback = response.choices[0].message.content
63+
print("\n\nFeedback: ", feedback)
64+
65+
response = client.chat.completions.create(
66+
model=MODEL_NAME,
67+
temperature=0.7,
68+
messages=[
69+
{
70+
"role": "user",
71+
"content": (
72+
"Revise the article using the following feedback, but keep it to a single paragraph."
73+
f"\nExplanation:\n{explanation}\n\nFeedback:\n{feedback}"
74+
),
75+
}
76+
],
77+
)
78+
79+
final_article = response.choices[0].message.content
80+
print("\n\nFinal Article: ", final_article)

0 commit comments

Comments
 (0)