Skip to content

Commit 6d8cb30

Browse files
authored
Merge pull request #2 from Unsupervisedcom/azure-token-provider
Azure token provider
2 parents 3c9411f + 4dc6ece commit 6d8cb30

File tree

7 files changed

+255
-5
lines changed

7 files changed

+255
-5
lines changed

README.md

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -218,6 +218,21 @@ To use the [Azure OpenAI Service](https://learn.microsoft.com/en-us/azure/cognit
218218

219219
where `AZURE_OPENAI_URI` is e.g. `https://custom-domain.openai.azure.com/openai/deployments/gpt-35-turbo`
220220

221+
##### Azure with Azure AD tokens
222+
223+
To use Azure AD tokens you can configure the gem with a proc like this:
224+
225+
```ruby
226+
OpenAI.configure do |config|
227+
config.azure_token_provider = ->() { your_code_caches_or_refreshes_token }
228+
config.uri_base = ENV.fetch("AZURE_OPENAI_URI")
229+
config.api_type = :azure
230+
config.api_version = "2023-03-15-preview"
231+
end
232+
```
233+
234+
The azure_token_provider will be called on every request. This allows tokens to be cached and periodically refreshed by your custom code.
235+
221236
#### Ollama
222237

223238
Ollama allows you to run open-source LLMs, such as Llama 3, locally. It [offers chat compatibility](https://github.com/ollama/ollama/blob/main/docs/openai.md) with the OpenAI API.

lib/openai.rb

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,8 @@ class Configuration
4747
:organization_id,
4848
:uri_base,
4949
:request_timeout,
50-
:extra_headers
50+
:extra_headers,
51+
:azure_token_provider
5152

5253
DEFAULT_API_VERSION = "v1".freeze
5354
DEFAULT_URI_BASE = "https://api.openai.com/".freeze
@@ -63,6 +64,7 @@ def initialize
6364
@uri_base = DEFAULT_URI_BASE
6465
@request_timeout = DEFAULT_REQUEST_TIMEOUT
6566
@extra_headers = {}
67+
@azure_token_provider = nil
6668
end
6769
end
6870

lib/openai/client.rb

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ class Client
1212
uri_base
1313
request_timeout
1414
extra_headers
15+
azure_token_provider
1516
].freeze
1617
attr_reader *CONFIG_KEYS, :faraday_middleware
1718

@@ -25,6 +26,8 @@ def initialize(config = {}, &faraday_middleware)
2526
)
2627
end
2728
@faraday_middleware = faraday_middleware
29+
validate_credential_config!
30+
validate_azure_credential_provider!
2831
end
2932

3033
def chat(parameters: {})
@@ -109,6 +112,31 @@ def beta(apis)
109112
end
110113
end
111114

115+
private
116+
117+
def validate_credential_config!
118+
if @access_token && @azure_token_provider
119+
raise ConfigurationError,
120+
"Only one of OpenAI access token or Azure token provider can be set! See https://github.com/alexrudall/ruby-openai#usage"
121+
end
122+
123+
return if @access_token || @azure_token_provider
124+
125+
raise ConfigurationError,
126+
"OpenAI access token or Azure token provider missing! See https://github.com/alexrudall/ruby-openai#usage"
127+
end
128+
129+
def validate_azure_credential_provider!
130+
return if @azure_token_provider.nil?
131+
132+
unless @azure_token_provider.respond_to?(:to_proc)
133+
raise ConfigurationError,
134+
"OpenAI Azure AD token provider must be a Proc, Lambda, or respond to to_proc."
135+
end
136+
137+
@azure_token_provider = @azure_token_provider&.to_proc
138+
end
139+
112140
def inspect
113141
vars = instance_variables.map do |var|
114142
value = instance_variable_get(var)

lib/openai/http_headers.rb

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,18 @@ def openai_headers
2424

2525
def azure_headers
2626
{
27-
"Content-Type" => "application/json",
28-
"api-key" => @access_token
29-
}
27+
"Content-Type" => "application/json"
28+
}.merge(azure_auth_headers)
29+
end
30+
31+
def azure_auth_headers
32+
if @access_token
33+
{ "api-key" => @access_token }
34+
elsif @azure_token_provider
35+
{ "Authorization" => "Bearer #{@azure_token_provider.call}" }
36+
else
37+
raise ConfigurationError, "access_token or azure_token_provider must be set."
38+
end
3039
end
3140

3241
def extra_headers

spec/fixtures/cassettes/http_json_post_with_azure_token_provider.yml

Lines changed: 125 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

spec/openai/client/client_spec.rb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
let!(:c2) do
3131
OpenAI::Client.new(
3232
access_token: "access_token2",
33-
organization_id: nil,
3433
request_timeout: 1,
3534
uri_base: "https://example.com/"
3635
)

spec/openai/client/http_spec.rb

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,56 @@
120120
end
121121
end
122122

123+
describe ".json_post" do
124+
context "with azure_token_provider" do
125+
let(:token_provider) do
126+
counter = 0
127+
lambda do
128+
counter += 1
129+
"some dynamic token #{counter}"
130+
end
131+
end
132+
133+
let(:client) do
134+
OpenAI::Client.new(
135+
access_token: nil,
136+
azure_token_provider: token_provider,
137+
api_type: :azure,
138+
uri_base: "https://custom-domain.openai.azure.com/openai/deployments/gpt-35-turbo",
139+
api_version: "2024-02-01"
140+
)
141+
end
142+
143+
let(:cassette) { "http json post with azure token provider" }
144+
145+
it "calls the token provider on every request" do
146+
expect(token_provider).to receive(:call).twice.and_call_original
147+
VCR.use_cassette(cassette, record: :none) do
148+
client.chat(
149+
parameters: {
150+
messages: [
151+
{
152+
"role" => "user",
153+
"content" => "Hello world!"
154+
}
155+
]
156+
}
157+
)
158+
client.chat(
159+
parameters: {
160+
messages: [
161+
{
162+
"role" => "user",
163+
"content" => "Who were the founders of Microsoft?"
164+
}
165+
]
166+
}
167+
)
168+
end
169+
end
170+
end
171+
end
172+
123173
describe ".to_json_stream" do
124174
context "with a proc" do
125175
let(:user_proc) { proc { |x| x } }
@@ -281,6 +331,28 @@
281331
expect(headers).to eq({ "Content-Type" => "application/json",
282332
"api-key" => OpenAI.configuration.access_token })
283333
}
334+
335+
context "with azure_token_provider" do
336+
let(:token) { "some dynamic token" }
337+
let(:token_provider) { -> { token } }
338+
339+
around do |ex|
340+
old_access_token = OpenAI.configuration.access_token
341+
OpenAI.configuration.access_token = nil
342+
OpenAI.configuration.azure_token_provider = token_provider
343+
344+
ex.call
345+
ensure
346+
OpenAI.configuration.azure_token_provider = nil
347+
OpenAI.configuration.access_token = old_access_token
348+
end
349+
350+
it {
351+
expect(token_provider).to receive(:call).once.and_call_original
352+
expect(headers).to eq({ "Content-Type" => "application/json",
353+
"Authorization" => "Bearer #{token}" })
354+
}
355+
end
284356
end
285357
end
286358

0 commit comments

Comments
 (0)