Skip to content

Commit de139f6

Browse files
committed
fix: OPENAI_API_KEY Required Even When Not Using OpenAI Provider with undefined apiKey #135
1 parent b68f2a4 commit de139f6

File tree

3 files changed

+35
-4
lines changed

3 files changed

+35
-4
lines changed

.changeset/cold-cherries-serve.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'token.js': patch
3+
---
4+
5+
Default to hardcoded empty string for API key in the OpenAI compatible provider if no API key is provided

scripts/example.ts

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import * as dotenv from 'dotenv'
22
import { OpenAI } from 'openai'
33

4-
import { TokenJS } from '../dist/index.cjs'
4+
import { TokenJS } from '../src'
55
dotenv.config()
66

77
const messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[] = [
@@ -12,10 +12,12 @@ const messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[] = [
1212
]
1313

1414
const callLLM = async () => {
15-
const tokenjs = new TokenJS()
15+
const tokenjs = new TokenJS({
16+
baseURL: 'http://localhost:3000/api/',
17+
})
1618
const result = await tokenjs.chat.completions.create({
1719
// stream: true,
18-
provider: 'gemini',
20+
provider: 'openai-compatible',
1921
model: 'gemini-1.5-pro',
2022
messages,
2123
})

src/handlers/openai-compatible.ts

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,30 @@ export class OpenAICompatibleHandler extends BaseHandler<OpenAICompatibleModel>
3434
}
3535
}
3636

37+
determineAPIKey = () => {
38+
if (this.opts.apiKey) {
39+
return this.opts.apiKey
40+
} else if (process.env.OPENAI_COMPATIBLE_API_KEY) {
41+
return process.env.OPENAI_COMPATIBLE_API_KEY
42+
} else {
43+
/**
44+
* We hardcode an empty API key if none is defined because the OpenAI SDK throws an error if we do not do this.
45+
* There are plenty of reasonable cases where an API is not required by an openai compartible model provider (locally
46+
* hosted models for example), so we want to avoid runtime errors in those situations.
47+
* See this issue for an example: https://github.com/twinnydotdev/twinny/issues/440
48+
*
49+
* However, the tradeoff with this is that if the underlying provider requires an API key and the user does not provide one,
50+
* they may get an unpredictable error. We deem this tradeoff acceptible in this case because using an unvetted openai-compatible
51+
* model provider is inherently less safe than using a provider officially integrated and supported by Token.js. If users often,
52+
* report errors related to this, we should consider officially supporting the behavior of the underlying provider that is causing issues.
53+
*
54+
* For example, we may want to officially support ollama local models if users often report problems related to using that provider via this
55+
* generic implementation.
56+
*/
57+
return ''
58+
}
59+
}
60+
3761
async create(
3862
body: ProviderCompletionParams<'openai'>
3963
): Promise<CompletionResponse | StreamCompletionResponse> {
@@ -42,7 +66,7 @@ export class OpenAICompatibleHandler extends BaseHandler<OpenAICompatibleModel>
4266
// Uses the OPENAI_API_KEY environment variable, if the apiKey is not provided.
4367
// This makes the UX better for switching between providers because you can just
4468
// define all the environment variables and then change the model field without doing anything else.
45-
const apiKey = this.opts.apiKey ?? process.env.OPENAI_COMPATIBLE_API_KEY
69+
const apiKey = this.determineAPIKey()
4670
const openai = new OpenAI({
4771
...this.opts,
4872
apiKey,

0 commit comments

Comments
 (0)