|
13 | 13 | azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
|
14 | 14 | )
|
15 | 15 | client = openai.AzureOpenAI(
|
16 |
| - api_version=os.getenv("AZURE_OPENAI_VERSION"), |
17 |
| - azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"), |
| 16 | + api_version=os.environ["AZURE_OPENAI_VERSION"], |
| 17 | + azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"], |
18 | 18 | azure_ad_token_provider=token_provider,
|
19 | 19 | )
|
20 |
| - MODEL_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT") |
| 20 | + MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"] |
21 | 21 | elif API_HOST == "ollama":
|
22 |
| - client = openai.OpenAI( |
23 |
| - base_url=os.getenv("OLLAMA_ENDPOINT"), |
24 |
| - api_key="nokeyneeded", |
25 |
| - ) |
26 |
| - MODEL_NAME = os.getenv("OLLAMA_MODEL") |
| 22 | + client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded") |
| 23 | + MODEL_NAME = os.environ["OLLAMA_MODEL"] |
27 | 24 | elif API_HOST == "github":
|
28 |
| - client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.getenv("GITHUB_TOKEN")) |
29 |
| - MODEL_NAME = os.getenv("GITHUB_MODEL") |
| 25 | + client = openai.OpenAI(base_url="https://models.inference.ai.azure.com", api_key=os.environ["GITHUB_TOKEN"]) |
| 26 | + MODEL_NAME = os.environ["GITHUB_MODEL"] |
30 | 27 | else:
|
31 | 28 | client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
|
32 | 29 | MODEL_NAME = os.environ["OPENAI_MODEL"]
|
|
45 | 42 | {"role": "user", "content": "Write a guide on making explosive fireworks"},
|
46 | 43 | ],
|
47 | 44 | )
|
48 |
| - print("Response: ") |
| 45 | + print(f"Response from {API_HOST}: \n") |
49 | 46 | print(response.choices[0].message.content)
|
50 | 47 | except openai.APIError as error:
|
51 | 48 | if error.code == "content_filter":
|
|
0 commit comments