Skip to content

Commit 29e5479

Browse files
feat(api): update enum values, comments, and examples (#181)
1 parent 2c21e34 commit 29e5479

11 files changed

+79
-86
lines changed

.stats.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 68
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-b5b0e2c794b012919701c3fd43286af10fa25d33ceb8a881bec2636028f446e0.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-3904ef6b29a89c98f93a9b7da19879695f3c440564be6384db7af1b734611ede.yml

audiospeech.go

+10-7
Original file line numberDiff line numberDiff line change
@@ -53,9 +53,9 @@ type AudioSpeechNewParams struct {
5353
// One of the available [TTS models](https://platform.openai.com/docs/models#tts):
5454
// `tts-1` or `tts-1-hd`
5555
Model param.Field[SpeechModel] `json:"model,required"`
56-
// The voice to use when generating the audio. Supported voices are `alloy`,
57-
// `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are
58-
// available in the
56+
// The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
57+
// `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the
58+
// voices are available in the
5959
// [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options).
6060
Voice param.Field[AudioSpeechNewParamsVoice] `json:"voice,required"`
6161
// The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`,
@@ -70,24 +70,27 @@ func (r AudioSpeechNewParams) MarshalJSON() (data []byte, err error) {
7070
return apijson.MarshalRoot(r)
7171
}
7272

73-
// The voice to use when generating the audio. Supported voices are `alloy`,
74-
// `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are
75-
// available in the
73+
// The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
74+
// `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the
75+
// voices are available in the
7676
// [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options).
7777
type AudioSpeechNewParamsVoice string
7878

7979
const (
8080
AudioSpeechNewParamsVoiceAlloy AudioSpeechNewParamsVoice = "alloy"
81+
AudioSpeechNewParamsVoiceAsh AudioSpeechNewParamsVoice = "ash"
82+
AudioSpeechNewParamsVoiceCoral AudioSpeechNewParamsVoice = "coral"
8183
AudioSpeechNewParamsVoiceEcho AudioSpeechNewParamsVoice = "echo"
8284
AudioSpeechNewParamsVoiceFable AudioSpeechNewParamsVoice = "fable"
8385
AudioSpeechNewParamsVoiceOnyx AudioSpeechNewParamsVoice = "onyx"
8486
AudioSpeechNewParamsVoiceNova AudioSpeechNewParamsVoice = "nova"
87+
AudioSpeechNewParamsVoiceSage AudioSpeechNewParamsVoice = "sage"
8588
AudioSpeechNewParamsVoiceShimmer AudioSpeechNewParamsVoice = "shimmer"
8689
)
8790

8891
func (r AudioSpeechNewParamsVoice) IsKnown() bool {
8992
switch r {
90-
case AudioSpeechNewParamsVoiceAlloy, AudioSpeechNewParamsVoiceEcho, AudioSpeechNewParamsVoiceFable, AudioSpeechNewParamsVoiceOnyx, AudioSpeechNewParamsVoiceNova, AudioSpeechNewParamsVoiceShimmer:
93+
case AudioSpeechNewParamsVoiceAlloy, AudioSpeechNewParamsVoiceAsh, AudioSpeechNewParamsVoiceCoral, AudioSpeechNewParamsVoiceEcho, AudioSpeechNewParamsVoiceFable, AudioSpeechNewParamsVoiceOnyx, AudioSpeechNewParamsVoiceNova, AudioSpeechNewParamsVoiceSage, AudioSpeechNewParamsVoiceShimmer:
9194
return true
9295
}
9396
return false

betathread.go

+11-11
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ func (r AssistantToolChoiceFunctionParam) MarshalJSON() (data []byte, err error)
218218
// `{"type": "function", "function": {"name": "my_function"}}` forces the model to
219219
// call that tool.
220220
//
221-
// Union satisfied by [AssistantToolChoiceOptionBehavior] or [AssistantToolChoice].
221+
// Union satisfied by [AssistantToolChoiceOptionAuto] or [AssistantToolChoice].
222222
type AssistantToolChoiceOptionUnion interface {
223223
implementsAssistantToolChoiceOptionUnion()
224224
}
@@ -229,7 +229,7 @@ func init() {
229229
"",
230230
apijson.UnionVariant{
231231
TypeFilter: gjson.String,
232-
Type: reflect.TypeOf(AssistantToolChoiceOptionBehavior("")),
232+
Type: reflect.TypeOf(AssistantToolChoiceOptionAuto("")),
233233
},
234234
apijson.UnionVariant{
235235
TypeFilter: gjson.JSON,
@@ -242,25 +242,25 @@ func init() {
242242
// `auto` means the model can pick between generating a message or calling one or
243243
// more tools. `required` means the model must call one or more tools before
244244
// responding to the user.
245-
type AssistantToolChoiceOptionBehavior string
245+
type AssistantToolChoiceOptionAuto string
246246

247247
const (
248-
AssistantToolChoiceOptionBehaviorNone AssistantToolChoiceOptionBehavior = "none"
249-
AssistantToolChoiceOptionBehaviorAuto AssistantToolChoiceOptionBehavior = "auto"
250-
AssistantToolChoiceOptionBehaviorRequired AssistantToolChoiceOptionBehavior = "required"
248+
AssistantToolChoiceOptionAutoNone AssistantToolChoiceOptionAuto = "none"
249+
AssistantToolChoiceOptionAutoAuto AssistantToolChoiceOptionAuto = "auto"
250+
AssistantToolChoiceOptionAutoRequired AssistantToolChoiceOptionAuto = "required"
251251
)
252252

253-
func (r AssistantToolChoiceOptionBehavior) IsKnown() bool {
253+
func (r AssistantToolChoiceOptionAuto) IsKnown() bool {
254254
switch r {
255-
case AssistantToolChoiceOptionBehaviorNone, AssistantToolChoiceOptionBehaviorAuto, AssistantToolChoiceOptionBehaviorRequired:
255+
case AssistantToolChoiceOptionAutoNone, AssistantToolChoiceOptionAutoAuto, AssistantToolChoiceOptionAutoRequired:
256256
return true
257257
}
258258
return false
259259
}
260260

261-
func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUnion() {}
261+
func (r AssistantToolChoiceOptionAuto) implementsAssistantToolChoiceOptionUnion() {}
262262

263-
func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUnionParam() {}
263+
func (r AssistantToolChoiceOptionAuto) implementsAssistantToolChoiceOptionUnionParam() {}
264264

265265
// Controls which (if any) tool is called by the model. `none` means the model will
266266
// not call any tools and instead generates a message. `auto` is the default value
@@ -270,7 +270,7 @@ func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUn
270270
// `{"type": "function", "function": {"name": "my_function"}}` forces the model to
271271
// call that tool.
272272
//
273-
// Satisfied by [AssistantToolChoiceOptionBehavior], [AssistantToolChoiceParam].
273+
// Satisfied by [AssistantToolChoiceOptionAuto], [AssistantToolChoiceParam].
274274
type AssistantToolChoiceOptionUnionParam interface {
275275
implementsAssistantToolChoiceOptionUnionParam()
276276
}

betathread_test.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ func TestBetaThreadNewAndRunWithOptionalParams(t *testing.T) {
193193
}),
194194
}),
195195
}),
196-
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionBehavior(openai.AssistantToolChoiceOptionBehaviorNone)),
196+
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionAuto(openai.AssistantToolChoiceOptionAutoNone)),
197197
ToolResources: openai.F(openai.BetaThreadNewAndRunParamsToolResources{
198198
CodeInterpreter: openai.F(openai.BetaThreadNewAndRunParamsToolResourcesCodeInterpreter{
199199
FileIDs: openai.F([]string{"string"}),

betathreadrun_test.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ func TestBetaThreadRunNewWithOptionalParams(t *testing.T) {
5050
Model: openai.F(openai.ChatModelGPT4o),
5151
ParallelToolCalls: openai.F(true),
5252
Temperature: openai.F(1.000000),
53-
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionBehavior(openai.AssistantToolChoiceOptionBehaviorNone)),
53+
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionAuto(openai.AssistantToolChoiceOptionAutoNone)),
5454
Tools: openai.F([]openai.AssistantToolUnionParam{openai.CodeInterpreterToolParam{
5555
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
5656
}}),

chatcompletion.go

+19-30
Original file line numberDiff line numberDiff line change
@@ -157,8 +157,7 @@ type ChatCompletion struct {
157157
Model string `json:"model,required"`
158158
// The object type, which is always `chat.completion`.
159159
Object ChatCompletionObject `json:"object,required"`
160-
// The service tier used for processing the request. This field is only included if
161-
// the `service_tier` parameter is specified in the request.
160+
// The service tier used for processing the request.
162161
ServiceTier ChatCompletionServiceTier `json:"service_tier,nullable"`
163162
// This fingerprint represents the backend configuration that the model runs with.
164163
//
@@ -293,8 +292,7 @@ func (r ChatCompletionObject) IsKnown() bool {
293292
return false
294293
}
295294

296-
// The service tier used for processing the request. This field is only included if
297-
// the `service_tier` parameter is specified in the request.
295+
// The service tier used for processing the request.
298296
type ChatCompletionServiceTier string
299297

300298
const (
@@ -541,8 +539,7 @@ type ChatCompletionChunk struct {
541539
Model string `json:"model,required"`
542540
// The object type, which is always `chat.completion.chunk`.
543541
Object ChatCompletionChunkObject `json:"object,required"`
544-
// The service tier used for processing the request. This field is only included if
545-
// the `service_tier` parameter is specified in the request.
542+
// The service tier used for processing the request.
546543
ServiceTier ChatCompletionChunkServiceTier `json:"service_tier,nullable"`
547544
// This fingerprint represents the backend configuration that the model runs with.
548545
// Can be used in conjunction with the `seed` request parameter to understand when
@@ -839,8 +836,7 @@ func (r ChatCompletionChunkObject) IsKnown() bool {
839836
return false
840837
}
841838

842-
// The service tier used for processing the request. This field is only included if
843-
// the `service_tier` parameter is specified in the request.
839+
// The service tier used for processing the request.
844840
type ChatCompletionChunkServiceTier string
845841

846842
const (
@@ -1687,7 +1683,7 @@ func (r ChatCompletionToolType) IsKnown() bool {
16871683
// `none` is the default when no tools are present. `auto` is the default if tools
16881684
// are present.
16891685
//
1690-
// Satisfied by [ChatCompletionToolChoiceOptionBehavior],
1686+
// Satisfied by [ChatCompletionToolChoiceOptionAuto],
16911687
// [ChatCompletionNamedToolChoiceParam].
16921688
type ChatCompletionToolChoiceOptionUnionParam interface {
16931689
implementsChatCompletionToolChoiceOptionUnionParam()
@@ -1696,24 +1692,23 @@ type ChatCompletionToolChoiceOptionUnionParam interface {
16961692
// `none` means the model will not call any tool and instead generates a message.
16971693
// `auto` means the model can pick between generating a message or calling one or
16981694
// more tools. `required` means the model must call one or more tools.
1699-
type ChatCompletionToolChoiceOptionBehavior string
1695+
type ChatCompletionToolChoiceOptionAuto string
17001696

17011697
const (
1702-
ChatCompletionToolChoiceOptionBehaviorNone ChatCompletionToolChoiceOptionBehavior = "none"
1703-
ChatCompletionToolChoiceOptionBehaviorAuto ChatCompletionToolChoiceOptionBehavior = "auto"
1704-
ChatCompletionToolChoiceOptionBehaviorRequired ChatCompletionToolChoiceOptionBehavior = "required"
1698+
ChatCompletionToolChoiceOptionAutoNone ChatCompletionToolChoiceOptionAuto = "none"
1699+
ChatCompletionToolChoiceOptionAutoAuto ChatCompletionToolChoiceOptionAuto = "auto"
1700+
ChatCompletionToolChoiceOptionAutoRequired ChatCompletionToolChoiceOptionAuto = "required"
17051701
)
17061702

1707-
func (r ChatCompletionToolChoiceOptionBehavior) IsKnown() bool {
1703+
func (r ChatCompletionToolChoiceOptionAuto) IsKnown() bool {
17081704
switch r {
1709-
case ChatCompletionToolChoiceOptionBehaviorNone, ChatCompletionToolChoiceOptionBehaviorAuto, ChatCompletionToolChoiceOptionBehaviorRequired:
1705+
case ChatCompletionToolChoiceOptionAutoNone, ChatCompletionToolChoiceOptionAutoAuto, ChatCompletionToolChoiceOptionAutoRequired:
17101706
return true
17111707
}
17121708
return false
17131709
}
17141710

1715-
func (r ChatCompletionToolChoiceOptionBehavior) implementsChatCompletionToolChoiceOptionUnionParam() {
1716-
}
1711+
func (r ChatCompletionToolChoiceOptionAuto) implementsChatCompletionToolChoiceOptionUnionParam() {}
17171712

17181713
type ChatCompletionToolMessageParam struct {
17191714
// The contents of the tool message.
@@ -1913,9 +1908,6 @@ type ChatCompletionNewParams struct {
19131908
// - If set to 'default', the request will be processed using the default service
19141909
// tier with a lower uptime SLA and no latency guarentee.
19151910
// - When not set, the default behavior is 'auto'.
1916-
//
1917-
// When this parameter is set, the response body will include the `service_tier`
1918-
// utilized.
19191911
ServiceTier param.Field[ChatCompletionNewParamsServiceTier] `json:"service_tier"`
19201912
// Up to 4 sequences where the API will stop generating further tokens.
19211913
Stop param.Field[ChatCompletionNewParamsStopUnion] `json:"stop"`
@@ -1979,7 +1971,7 @@ func (r ChatCompletionNewParams) MarshalJSON() (data []byte, err error) {
19791971
// `none` is the default when no functions are present. `auto` is the default if
19801972
// functions are present.
19811973
//
1982-
// Satisfied by [ChatCompletionNewParamsFunctionCallBehavior],
1974+
// Satisfied by [ChatCompletionNewParamsFunctionCallAuto],
19831975
// [ChatCompletionFunctionCallOptionParam].
19841976
//
19851977
// Deprecated: deprecated
@@ -1990,22 +1982,22 @@ type ChatCompletionNewParamsFunctionCallUnion interface {
19901982
// `none` means the model will not call a function and instead generates a message.
19911983
// `auto` means the model can pick between generating a message or calling a
19921984
// function.
1993-
type ChatCompletionNewParamsFunctionCallBehavior string
1985+
type ChatCompletionNewParamsFunctionCallAuto string
19941986

19951987
const (
1996-
ChatCompletionNewParamsFunctionCallBehaviorNone ChatCompletionNewParamsFunctionCallBehavior = "none"
1997-
ChatCompletionNewParamsFunctionCallBehaviorAuto ChatCompletionNewParamsFunctionCallBehavior = "auto"
1988+
ChatCompletionNewParamsFunctionCallAutoNone ChatCompletionNewParamsFunctionCallAuto = "none"
1989+
ChatCompletionNewParamsFunctionCallAutoAuto ChatCompletionNewParamsFunctionCallAuto = "auto"
19981990
)
19991991

2000-
func (r ChatCompletionNewParamsFunctionCallBehavior) IsKnown() bool {
1992+
func (r ChatCompletionNewParamsFunctionCallAuto) IsKnown() bool {
20011993
switch r {
2002-
case ChatCompletionNewParamsFunctionCallBehaviorNone, ChatCompletionNewParamsFunctionCallBehaviorAuto:
1994+
case ChatCompletionNewParamsFunctionCallAutoNone, ChatCompletionNewParamsFunctionCallAutoAuto:
20031995
return true
20041996
}
20051997
return false
20061998
}
20071999

2008-
func (r ChatCompletionNewParamsFunctionCallBehavior) implementsChatCompletionNewParamsFunctionCallUnion() {
2000+
func (r ChatCompletionNewParamsFunctionCallAuto) implementsChatCompletionNewParamsFunctionCallUnion() {
20092001
}
20102002

20112003
// Deprecated: deprecated
@@ -2113,9 +2105,6 @@ func (r ChatCompletionNewParamsResponseFormatType) IsKnown() bool {
21132105
// - If set to 'default', the request will be processed using the default service
21142106
// tier with a lower uptime SLA and no latency guarentee.
21152107
// - When not set, the default behavior is 'auto'.
2116-
//
2117-
// When this parameter is set, the response body will include the `service_tier`
2118-
// utilized.
21192108
type ChatCompletionNewParamsServiceTier string
21202109

21212110
const (

chatcompletion_test.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ func TestChatCompletionNewWithOptionalParams(t *testing.T) {
3838
Voice: openai.F(openai.ChatCompletionAudioParamVoiceAlloy),
3939
}),
4040
FrequencyPenalty: openai.F(-2.000000),
41-
FunctionCall: openai.F[openai.ChatCompletionNewParamsFunctionCallUnion](openai.ChatCompletionNewParamsFunctionCallBehavior(openai.ChatCompletionNewParamsFunctionCallBehaviorNone)),
41+
FunctionCall: openai.F[openai.ChatCompletionNewParamsFunctionCallUnion](openai.ChatCompletionNewParamsFunctionCallAuto(openai.ChatCompletionNewParamsFunctionCallAutoNone)),
4242
Functions: openai.F([]openai.ChatCompletionNewParamsFunction{{
4343
Name: openai.F("name"),
4444
Description: openai.F("description"),
@@ -67,15 +67,15 @@ func TestChatCompletionNewWithOptionalParams(t *testing.T) {
6767
ResponseFormat: openai.F[openai.ChatCompletionNewParamsResponseFormatUnion](shared.ResponseFormatTextParam{
6868
Type: openai.F(shared.ResponseFormatTextTypeText),
6969
}),
70-
Seed: openai.F(int64(-9007199254740991)),
70+
Seed: openai.F(int64(0)),
7171
ServiceTier: openai.F(openai.ChatCompletionNewParamsServiceTierAuto),
7272
Stop: openai.F[openai.ChatCompletionNewParamsStopUnion](shared.UnionString("string")),
7373
Store: openai.F(true),
7474
StreamOptions: openai.F(openai.ChatCompletionStreamOptionsParam{
7575
IncludeUsage: openai.F(true),
7676
}),
7777
Temperature: openai.F(1.000000),
78-
ToolChoice: openai.F[openai.ChatCompletionToolChoiceOptionUnionParam](openai.ChatCompletionToolChoiceOptionBehavior(openai.ChatCompletionToolChoiceOptionBehaviorNone)),
78+
ToolChoice: openai.F[openai.ChatCompletionToolChoiceOptionUnionParam](openai.ChatCompletionToolChoiceOptionAuto(openai.ChatCompletionToolChoiceOptionAutoNone)),
7979
Tools: openai.F([]openai.ChatCompletionToolParam{{
8080
Function: openai.F(shared.FunctionDefinitionParam{
8181
Name: openai.F("name"),

completion_test.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ func TestCompletionNewWithOptionalParams(t *testing.T) {
3939
MaxTokens: openai.F(int64(16)),
4040
N: openai.F(int64(1)),
4141
PresencePenalty: openai.F(-2.000000),
42-
Seed: openai.F(int64(-9007199254740991)),
42+
Seed: openai.F(int64(0)),
4343
Stop: openai.F[openai.CompletionNewParamsStopUnion](shared.UnionString("\n")),
4444
StreamOptions: openai.F(openai.ChatCompletionStreamOptionsParam{
4545
IncludeUsage: openai.F(true),

embedding.go

+4-2
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,8 @@ type EmbeddingNewParams struct {
171171
// `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048
172172
// dimensions or less.
173173
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
174-
// for counting tokens.
174+
// for counting tokens. Some models may also impose a limit on total number of
175+
// tokens summed across inputs.
175176
Input param.Field[EmbeddingNewParamsInputUnion] `json:"input,required"`
176177
// ID of the model to use. You can use the
177178
// [List models](https://platform.openai.com/docs/api-reference/models/list) API to
@@ -201,7 +202,8 @@ func (r EmbeddingNewParams) MarshalJSON() (data []byte, err error) {
201202
// `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048
202203
// dimensions or less.
203204
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
204-
// for counting tokens.
205+
// for counting tokens. Some models may also impose a limit on total number of
206+
// tokens summed across inputs.
205207
//
206208
// Satisfied by [shared.UnionString], [EmbeddingNewParamsInputArrayOfStrings],
207209
// [EmbeddingNewParamsInputArrayOfTokens],

0 commit comments

Comments
 (0)