Skip to content

Commit 1b864fd

Browse files
authored
Merge pull request #15 from pamelafox/osenviron
Port os.getenv to os.environ[] everywhere
2 parents 805cbe7 + c9efe4a commit 1b864fd

16 files changed

+116
-155
lines changed

chained_calls.py

+7-10
Original file line numberDiff line numberDiff line change
@@ -14,24 +14,21 @@
1414
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1515
)
1616
client = openai.AzureOpenAI(
17-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
18-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
17+
api_version=os.environ["AZURE_OPENAI_VERSION"],
18+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1919
azure_ad_token_provider=token_provider,
2020
)
21-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
21+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2222

2323
elif API_HOST == "ollama":
2424

25-
client = openai.OpenAI(
26-
base_url=os.getenv("OLLAMA_ENDPOINT"),
27-
api_key="nokeyneeded",
28-
)
29-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
25+
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
26+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
3027

3128
elif API_HOST == "github":
3229

33-
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
34-
MODEL_NAME = os.getenv("GITHUB_MODEL")
30+
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
31+
MODEL_NAME = os.environ["GITHUB_MODEL"]
3532

3633
else:
3734

chat.py

+6-9
Original file line numberDiff line numberDiff line change
@@ -14,19 +14,16 @@
1414
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1515
)
1616
client = openai.AzureOpenAI(
17-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
18-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
17+
api_version=os.environ["AZURE_OPENAI_VERSION"],
18+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1919
azure_ad_token_provider=token_provider,
2020
)
21-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
21+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2222

2323
elif API_HOST == "ollama":
2424

25-
client = openai.OpenAI(
26-
base_url=os.getenv("OLLAMA_ENDPOINT"),
27-
api_key="nokeyneeded",
28-
)
29-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
25+
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
26+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
3027

3128
elif API_HOST == "github":
3229

@@ -49,5 +46,5 @@
4946
],
5047
)
5148

52-
print("Response: ")
49+
print(f"Response from {API_HOST}: \n")
5350
print(response.choices[0].message.content)

chat_async.py

+7-10
Original file line numberDiff line numberDiff line change
@@ -14,20 +14,17 @@
1414
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1515
)
1616
client = openai.AsyncAzureOpenAI(
17-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
18-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
17+
api_version=os.environ["AZURE_OPENAI_VERSION"],
18+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1919
azure_ad_token_provider=token_provider,
2020
)
21-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
21+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2222
elif API_HOST == "ollama":
23-
client = openai.AsyncOpenAI(
24-
base_url=os.getenv("OLLAMA_ENDPOINT"),
25-
api_key="nokeyneeded",
26-
)
27-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
23+
client = openai.AsyncOpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
24+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
2825
elif API_HOST == "github":
29-
client = openai.AsyncOpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
30-
MODEL_NAME = os.getenv("GITHUB_MODEL")
26+
client = openai.AsyncOpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
27+
MODEL_NAME = os.environ["GITHUB_MODEL"]
3128
else:
3229
client = openai.AsyncOpenAI(api_key=os.environ["OPENAI_KEY"])
3330
MODEL_NAME = os.environ["OPENAI_MODEL"]

chat_history.py

+7-10
Original file line numberDiff line numberDiff line change
@@ -13,20 +13,17 @@
1313
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1414
)
1515
client = openai.AzureOpenAI(
16-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
17-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
16+
api_version=os.environ["AZURE_OPENAI_VERSION"],
17+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1818
azure_ad_token_provider=token_provider,
1919
)
20-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
20+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2121
elif API_HOST == "ollama":
22-
client = openai.OpenAI(
23-
base_url=os.getenv("OLLAMA_ENDPOINT"),
24-
api_key="nokeyneeded",
25-
)
26-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
22+
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
23+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
2724
elif API_HOST == "github":
28-
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
29-
MODEL_NAME = os.getenv("GITHUB_MODEL")
25+
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
26+
MODEL_NAME = os.environ["GITHUB_MODEL"]
3027
else:
3128
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
3229
MODEL_NAME = os.environ["OPENAI_MODEL"]

chat_history_stream.py

+7-10
Original file line numberDiff line numberDiff line change
@@ -13,20 +13,17 @@
1313
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1414
)
1515
client = openai.AzureOpenAI(
16-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
17-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
16+
api_version=os.environ["AZURE_OPENAI_VERSION"],
17+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1818
azure_ad_token_provider=token_provider,
1919
)
20-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
20+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2121
elif API_HOST == "ollama":
22-
client = openai.OpenAI(
23-
base_url=os.getenv("OLLAMA_ENDPOINT"),
24-
api_key="nokeyneeded",
25-
)
26-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
22+
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
23+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
2724
elif API_HOST == "github":
28-
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
29-
MODEL_NAME = os.getenv("GITHUB_MODEL")
25+
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
26+
MODEL_NAME = os.environ["GITHUB_MODEL"]
3027
else:
3128
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
3229
MODEL_NAME = os.environ["OPENAI_MODEL"]

chat_langchain.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -14,22 +14,22 @@
1414
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1515
)
1616
llm = AzureChatOpenAI(
17-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
18-
azure_deployment=os.getenv("AZURE_OPENAI_DEPLOYMENT"),
19-
openai_api_version=os.getenv("AZURE_OPENAI_VERSION"),
17+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
18+
azure_deployment=os.environ["AZURE_OPENAI_DEPLOYMENT"],
19+
openai_api_version=os.environ["AZURE_OPENAI_VERSION"],
2020
azure_ad_token_provider=token_provider,
2121
)
2222
elif API_HOST == "ollama":
2323
llm = ChatOpenAI(
24-
model_name=os.getenv("OLLAMA_MODEL"),
25-
openai_api_base=os.getenv("OLLAMA_ENDPOINT"),
26-
openai_api_key=os.getenv("OPENAI_KEY"),
24+
model_name=os.environ["OLLAMA_MODEL"],
25+
openai_api_base=os.environ["OLLAMA_ENDPOINT"],
26+
openai_api_key=os.environ["OPENAI_KEY"],
2727
)
2828
elif API_HOST == "github":
2929
llm = ChatOpenAI(
30-
model_name=os.getenv("GITHUB_MODEL"),
30+
model_name=os.environ["GITHUB_MODEL"],
3131
openai_api_base="https://models.inference.ai.azure.com",
32-
openai_api_key=os.getenv("GITHUB_TOKEN"),
32+
openai_api_key=os.environ["GITHUB_TOKEN"],
3333
)
3434
else:
3535
llm = ChatOpenAI(model_name=os.environ["OPENAI_MODEL"], openai_api_key=os.environ["OPENAI_KEY"])

chat_llamaindex.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -16,22 +16,22 @@
1616
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1717
)
1818
llm = AzureOpenAI(
19-
model=os.getenv("OPENAI_MODEL"),
20-
deployment_name=os.getenv("AZURE_OPENAI_DEPLOYMENT"),
21-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
22-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
19+
model=os.environ["OPENAI_MODEL"],
20+
deployment_name=os.environ["AZURE_OPENAI_DEPLOYMENT"],
21+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
22+
api_version=os.environ["AZURE_OPENAI_VERSION"],
2323
use_azure_ad=True,
2424
azure_ad_token_provider=token_provider,
2525
)
2626
elif API_HOST == "ollama":
2727
llm = OpenAILike(
28-
model=os.getenv("OLLAMA_MODEL"), api_base=os.getenv("OLLAMA_ENDPOINT"), api_key="fake", is_chat_model=True
28+
model=os.environ["OLLAMA_MODEL"], api_base=os.environ["OLLAMA_ENDPOINT"], api_key="fake", is_chat_model=True
2929
)
3030
elif API_HOST == "github":
3131
llm = OpenAILike(
32-
model=os.getenv("GITHUB_MODEL"),
32+
model=os.environ["GITHUB_MODEL"],
3333
api_base="https://models.inference.ai.azure.com",
34-
api_key=os.getenv("GITHUB_TOKEN"),
34+
api_key=os.environ["GITHUB_TOKEN"],
3535
is_chat_model=True,
3636
)
3737
else:

chat_pydanticai.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1616
)
1717
client = AsyncAzureOpenAI(
18-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
19-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
18+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
19+
api_version=os.environ["AZURE_OPENAI_VERSION"],
2020
azure_ad_token_provider=token_provider,
2121
)
2222
model = OpenAIModel(os.environ["AZURE_OPENAI_DEPLOYMENT"], openai_client=client)

chat_safety.py

+8-11
Original file line numberDiff line numberDiff line change
@@ -13,20 +13,17 @@
1313
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1414
)
1515
client = openai.AzureOpenAI(
16-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
17-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
16+
api_version=os.environ["AZURE_OPENAI_VERSION"],
17+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1818
azure_ad_token_provider=token_provider,
1919
)
20-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
20+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2121
elif API_HOST == "ollama":
22-
client = openai.OpenAI(
23-
base_url=os.getenv("OLLAMA_ENDPOINT"),
24-
api_key="nokeyneeded",
25-
)
26-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
22+
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
23+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
2724
elif API_HOST == "github":
28-
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
29-
MODEL_NAME = os.getenv("GITHUB_MODEL")
25+
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
26+
MODEL_NAME = os.environ["GITHUB_MODEL"]
3027
else:
3128
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
3229
MODEL_NAME = os.environ["OPENAI_MODEL"]
@@ -45,7 +42,7 @@
4542
{"role": "user", "content": "Write a guide on making explosive fireworks"},
4643
],
4744
)
48-
print("Response: ")
45+
print(f"Response from {API_HOST}: \n")
4946
print(response.choices[0].message.content)
5047
except openai.APIError as error:
5148
if error.code == "content_filter":

chat_stream.py

+8-11
Original file line numberDiff line numberDiff line change
@@ -13,20 +13,17 @@
1313
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1414
)
1515
client = openai.AzureOpenAI(
16-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
17-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
16+
api_version=os.environ["AZURE_OPENAI_VERSION"],
17+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1818
azure_ad_token_provider=token_provider,
1919
)
20-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
20+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2121
elif API_HOST == "ollama":
22-
client = openai.OpenAI(
23-
base_url=os.getenv("OLLAMA_ENDPOINT"),
24-
api_key="nokeyneeded",
25-
)
26-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
22+
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
23+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
2724
elif API_HOST == "github":
28-
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
29-
MODEL_NAME = os.getenv("GITHUB_MODEL")
25+
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
26+
MODEL_NAME = os.environ["GITHUB_MODEL"]
3027
else:
3128
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
3229
MODEL_NAME = os.environ["OPENAI_MODEL"]
@@ -44,7 +41,7 @@
4441
stream=True,
4542
)
4643

47-
print("Response: ")
44+
print(f"Response from {API_HOST}: \n")
4845
for event in completion:
4946
if event.choices:
5047
content = event.choices[0].delta.content

few_shot_examples.py

+8-11
Original file line numberDiff line numberDiff line change
@@ -14,24 +14,21 @@
1414
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1515
)
1616
client = openai.AzureOpenAI(
17-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
18-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
17+
api_version=os.environ["AZURE_OPENAI_VERSION"],
18+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1919
azure_ad_token_provider=token_provider,
2020
)
21-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
21+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2222

2323
elif API_HOST == "ollama":
2424

25-
client = openai.OpenAI(
26-
base_url=os.getenv("OLLAMA_ENDPOINT"),
27-
api_key="nokeyneeded",
28-
)
29-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
25+
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
26+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
3027

3128
elif API_HOST == "github":
3229

33-
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
34-
MODEL_NAME = os.getenv("GITHUB_MODEL")
30+
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
31+
MODEL_NAME = os.environ["GITHUB_MODEL"]
3532

3633
else:
3734

@@ -65,5 +62,5 @@
6562
)
6663

6764

68-
print("Response:")
65+
print(f"Response from {API_HOST}: \n")
6966
print(response.choices[0].message.content)

function_calling.py

+8-11
Original file line numberDiff line numberDiff line change
@@ -14,24 +14,21 @@
1414
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
1515
)
1616
client = openai.AzureOpenAI(
17-
api_version=os.getenv("AZURE_OPENAI_VERSION"),
18-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
17+
api_version=os.environ["AZURE_OPENAI_VERSION"],
18+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
1919
azure_ad_token_provider=token_provider,
2020
)
21-
MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT")
21+
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
2222

2323
elif API_HOST == "ollama":
2424

25-
client = openai.OpenAI(
26-
base_url=os.getenv("OLLAMA_ENDPOINT"),
27-
api_key="nokeyneeded",
28-
)
29-
MODEL_NAME = os.getenv("OLLAMA_MODEL")
25+
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
26+
MODEL_NAME = os.environ["OLLAMA_MODEL"]
3027

3128
elif API_HOST == "github":
3229

33-
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
34-
MODEL_NAME = os.getenv("GITHUB_MODEL")
30+
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
31+
MODEL_NAME = os.environ["GITHUB_MODEL"]
3532

3633
else:
3734

@@ -72,6 +69,6 @@
7269
tools=tools,
7370
)
7471

75-
print("Response:")
72+
print(f"Response from {API_HOST}: \n")
7673
print(response.choices[0].message.tool_calls[0].function.name)
7774
print(response.choices[0].message.tool_calls[0].function.arguments)

0 commit comments

Comments
 (0)