Skip to content

Change os.environ calls #14

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 3, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 0 additions & 13 deletions CHANGELOG.md

This file was deleted.

4 changes: 2 additions & 2 deletions chained_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@

else:

client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


response = client.chat.completions.create(
Expand Down
8 changes: 4 additions & 4 deletions chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@

elif API_HOST == "github":

client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
MODEL_NAME = os.getenv("GITHUB_MODEL")
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"])
MODEL_NAME = os.environ["GITHUB_MODEL"]

else:

client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


response = client.chat.completions.create(
Expand Down
4 changes: 2 additions & 2 deletions chat_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@
client = openai.AsyncOpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
MODEL_NAME = os.getenv("GITHUB_MODEL")
else:
client = openai.AsyncOpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.AsyncOpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


async def generate_response(location):
Expand Down
4 changes: 2 additions & 2 deletions chat_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
MODEL_NAME = os.getenv("GITHUB_MODEL")
else:
client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


messages = [
Expand Down
4 changes: 2 additions & 2 deletions chat_history_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
MODEL_NAME = os.getenv("GITHUB_MODEL")
else:
client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


messages = [
Expand Down
2 changes: 1 addition & 1 deletion chat_langchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
openai_api_key=os.getenv("GITHUB_TOKEN"),
)
else:
llm = ChatOpenAI(model_name=os.getenv("OPENAI_MODEL"), openai_api_key=os.getenv("OPENAI_KEY"))
llm = ChatOpenAI(model_name=os.environ["OPENAI_MODEL"], openai_api_key=os.environ["OPENAI_KEY"])


prompt = ChatPromptTemplate.from_messages(
Expand Down
2 changes: 1 addition & 1 deletion chat_llamaindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
is_chat_model=True,
)
else:
llm = OpenAI(model=os.getenv("OPENAI_MODEL"), api_key=os.getenv("OPENAI_KEY"))
llm = OpenAI(model=os.environ["OPENAI_MODEL"], api_key=os.environ["OPENAI_KEY"])

chat_msgs = [
ChatMessage(
Expand Down
4 changes: 2 additions & 2 deletions chat_pydanticai.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@
model = OpenAIModel(os.environ["OPENAI_MODEL"], api_key=os.environ["OPENAI_KEY"])


agent = Agent(model, system_prompt="Be concise: 1 sentence only.")
agent = Agent(model, system_prompt="You are a helpful assistant that makes lots of cat references and uses emojis.")

result = agent.run_sync("Where does 'hello world' come from?")
result = agent.run_sync("Write a haiku about a hungry cat who wants tuna")

print(f"Response from {API_HOST}: \n")
print(result.data)
4 changes: 2 additions & 2 deletions chat_safety.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
MODEL_NAME = os.getenv("GITHUB_MODEL")
else:
client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]

try:
response = client.chat.completions.create(
Expand Down
4 changes: 2 additions & 2 deletions chat_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN"))
MODEL_NAME = os.getenv("GITHUB_MODEL")
else:
client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


completion = client.chat.completions.create(
Expand Down
4 changes: 2 additions & 2 deletions few_shot_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@

else:

client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


SYSTEM_MESSAGE = """
Expand Down
4 changes: 2 additions & 2 deletions function_calling.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@

else:

client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


tools = [
Expand Down
4 changes: 2 additions & 2 deletions function_calling_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@
MODEL_NAME = os.getenv("GITHUB_MODEL")

else:
client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


def lookup_weather(city_name=None, zip_code=None):
Expand Down
4 changes: 2 additions & 2 deletions function_calling_multiple.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@

else:

client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


tools = [
Expand Down
5 changes: 3 additions & 2 deletions http/.env.sample
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
AZURE_OPENAI_SERVICE=
AUTH_TOKEN=
SERVICE=
DEPLOYMENT=
TOKEN=
File renamed without changes.
10 changes: 10 additions & 0 deletions http/chat_completion_ollama.http
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
POST http://localhost:11434/v1/chat/completions
Content-Type: application/json

{
"model": "phi3.5:latest",
"messages": [{"role":"system","content":"You are an AI assistant that answers questions with short clear answers."},
{"role":"user","content":"How fast is the Prius V?"}],
"max_tokens": 800,
"temperature": 0.7
}
File renamed without changes.
14 changes: 7 additions & 7 deletions prompt_engineering.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,19 +35,19 @@

else:

client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


SYSTEM_MESSAGE = """
I want you to act like Elmo from Sesame Street.
I want you to respond and answer like Elmo using the tone, manner and vocabulary that Elmo would use.
Do not write any explanations. Only answer like Elmo.
You must know all of the knowledge of Elmo, and nothing more.
I want you to act like Yoda from Star Wars.
I want you to respond and answer like Yoda using the tone, manner and vocabulary that Yoda would use.
Do not write any explanations. Only answer like Yoda.
You must know all of the knowledge of Yoda, and nothing more.
"""

USER_MESSAGE = """
Hi Elmo, how are you doing today?
What is an LLM?
"""

response = client.chat.completions.create(
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ langchain-openai
llama-index-llms-azure-openai
llama-index-llms-openai
llama-index-llms-openai-like
pydantic-ai
4 changes: 2 additions & 2 deletions retrieval_augmented_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@
MODEL_NAME = os.getenv("GITHUB_MODEL")

else:
client = openai.OpenAI(api_key=os.getenv("OPENAI_KEY"))
MODEL_NAME = os.getenv("OPENAI_MODEL")
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]


USER_MESSAGE = "how fast is the prius v?"
Expand Down