Skip to content

Commit

Permalink
[JS] feat: Updates to OpenAIModel and OpenAIEmbeddings classes. (#1541)
Browse files Browse the repository at this point in the history
## Linked issues

closes: #1201 

## Details

These changes update the `OpenAIModel` and `OpenAIEmbeddings` classes to
support recent parameter changes by OpenAI. They also add support for
calling other OpenAI compliant services like
[LLaMA.cpp](https://github.com/ggerganov/llama.cpp/blob/master/examples/server/README.md).

#### Change details

- Added new `OpenAILikeModelOptions` interface and updated `OpenAIModel`
class to conditionally send an Authorization header to the model server.
- Updated `BaseOpenAIModelOptions` interface and `OpenAIModel` class to
support OpenAI's `response_format` parameter.
- Updated `BaseOpenAIModelOptions` interface and `OpenAIModel` class to
support OpenAI's `seed` parameter.
- Added new `OpenAILikeEmbeddingOptions` interface and updated
`OpenAIEmbeddingl` class to conditionally send an Authorization header
to the model server.
- Updated `BaseOpenAIEmbeddingOptions` interface and `OpenAIEmbeddings`
class to support OpenAI's new `dimension` parameter.
- Tweaked documentation comment for embeddings class.

## Attestation Checklist

- [ x] My code follows the style guidelines of this project

- I have checked for/fixed spelling, linting, and other errors
- I have commented my code for clarity
- I have made corresponding changes to the documentation (updating the
doc strings in the code is sufficient)
- My changes generate no new warnings
- I have added tests that validates my changes, and provides sufficient
test coverage. I have tested with:
  - Local testing
  - E2E testing in Teams
- New and existing unit tests pass locally with my changes

---------

Co-authored-by: Corina <[email protected]>
  • Loading branch information
Stevenic and corinagum authored Apr 15, 2024
1 parent ab13f85 commit 81634fa
Show file tree
Hide file tree
Showing 3 changed files with 105 additions and 14 deletions.
56 changes: 47 additions & 9 deletions js/packages/teams-ai/src/embeddings/OpenAIEmbeddings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,13 @@ import { CreateEmbeddingRequest, CreateEmbeddingResponse, OpenAICreateEmbeddingR
* Base model options common to both OpenAI and Azure OpenAI services.
*/
export interface BaseOpenAIEmbeddingsOptions {
/**
* Optional. Number of dimensions to use when generating embeddings.
* @remarks
* Only valid for embedding models that support dynamic dimensionality.
*/
dimensions?: number;

/**
* Optional. Whether to log requests to the console.
* @remarks
Expand Down Expand Up @@ -47,9 +54,7 @@ export interface OpenAIEmbeddingsOptions extends BaseOpenAIEmbeddingsOptions {
apiKey: string;

/**
* Model to use for completion.
* @remarks
* For Azure OpenAI this is the name of the deployment to use.
* Embeddings Model to use.
*/
model: string;

Expand All @@ -66,6 +71,35 @@ export interface OpenAIEmbeddingsOptions extends BaseOpenAIEmbeddingsOptions {
endpoint?: string;
}

/**
* Options for configuring an embeddings object that calls an `OpenAI` compliant endpoint.
* @remarks
* The endpoint should comply with the OpenAPI spec for OpenAI's API:
*
* https://github.com/openai/openai-openapi
*
* And an example of a compliant endpoint is LLaMA.cpp's reference server:
*
* https://github.com/ggerganov/llama.cpp/blob/master/examples/server/README.md
*
*/
export interface OpenAILikeEmbeddingsOptions extends BaseOpenAIEmbeddingsOptions {
/**
* Endpoint of the embeddings server to call.
*/
endpoint: string;

/**
* Embeddings Model to use.
*/
model: string;

/**
* Optional. API key to use when calling the embeddings server.
*/
apiKey?: string;
}

/**
* Options for configuring an `OpenAIEmbeddings` to generate embeddings using an Azure OpenAI hosted model.
*/
Expand Down Expand Up @@ -103,11 +137,11 @@ export class OpenAIEmbeddings implements EmbeddingsModel {
/**
* Options the client was configured with.
*/
public readonly options: OpenAIEmbeddingsOptions | AzureOpenAIEmbeddingsOptions;
public readonly options: OpenAIEmbeddingsOptions | AzureOpenAIEmbeddingsOptions | OpenAILikeEmbeddingsOptions;

/**
* Creates a new `OpenAIEmbeddings` instance.
* @param {OpenAIEmbeddingsOptions | AzureOpenAIEmbeddingsOptions} options Options for configuring the embeddings client.
* @param {OpenAIEmbeddingsOptions | AzureOpenAIEmbeddingsOptions | OpenAILikeEmbeddingsOptions} options Options for configuring the embeddings client.
*/
public constructor(options: OpenAIEmbeddingsOptions | AzureOpenAIEmbeddingsOptions) {
// Check for azure config
Expand Down Expand Up @@ -162,12 +196,16 @@ export class OpenAIEmbeddings implements EmbeddingsModel {
console.log(Colorize.output(inputs));
}

const startTime = Date.now();
const response = await this.createEmbeddingRequest({
const request: CreateEmbeddingRequest = {
model: model,
input: inputs
});
};
if (this.options.dimensions) {
request.dimensions = this.options.dimensions;
}

const startTime = Date.now();
const response = await this.createEmbeddingRequest(request);
if (this.options.logRequests) {
console.log(Colorize.title('RESPONSE:'));
console.log(Colorize.value('status', response.status));
Expand Down Expand Up @@ -236,7 +274,7 @@ export class OpenAIEmbeddings implements EmbeddingsModel {
if (this._useAzure) {
const options = this.options as AzureOpenAIEmbeddingsOptions;
requestConfig.headers['api-key'] = options.azureApiKey;
} else {
} else if ((this.options as OpenAIEmbeddingsOptions).apiKey){
const options = this.options as OpenAIEmbeddingsOptions;
requestConfig.headers['Authorization'] = `Bearer ${options.apiKey}`;
if (options.organization) {
Expand Down
1 change: 1 addition & 0 deletions js/packages/teams-ai/src/internals/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,7 @@ export interface CreateEmbeddingRequest {
model: string;
encoding_format?: string;
user?: string;
dimensions?: number;
}

/**
Expand Down
62 changes: 57 additions & 5 deletions js/packages/teams-ai/src/models/OpenAIModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,13 @@ export interface BaseOpenAIModelOptions {
*/
logRequests?: boolean;

/**
* Optional. Forces the model return a specific response format.
* @remarks
* This can be used to force the model to always return a valid JSON object.
*/
responseFormat?: { "type": "json_object" };

/**
* Optional. Retry policy to use when calling the OpenAI API.
* @remarks
Expand All @@ -44,6 +51,13 @@ export interface BaseOpenAIModelOptions {
*/
requestConfig?: AxiosRequestConfig;

/**
* Optional. A static seed to use when making model calls.
* @remarks
* The default is to use a random seed. Specifying a seed will make the model deterministic.
*/
seed?: number;

/**
* Optional. Whether to use `system` messages when calling the OpenAI API.
* @remarks
Expand All @@ -52,6 +66,7 @@ export interface BaseOpenAIModelOptions {
* prompt to be sent as `user` messages instead.
*/
useSystemMessages?: boolean;

}

/**
Expand Down Expand Up @@ -83,6 +98,35 @@ export interface OpenAIModelOptions extends BaseOpenAIModelOptions {
endpoint?: string;
}

/**
* Options for configuring a model that calls and `OpenAI` compliant endpoint.
* @remarks
* The endpoint should comply with the OpenAPI spec for OpenAI's API:
*
* https://github.com/openai/openai-openapi
*
* And an example of a compliant endpoint is LLaMA.cpp's reference server:
*
* https://github.com/ggerganov/llama.cpp/blob/master/examples/server/README.md
*
*/
export interface OpenAILikeModelOptions extends BaseOpenAIModelOptions {
/**
* Endpoint of the model server to call.
*/
endpoint: string;

/**
* Default model to use for completions.
*/
defaultModel: string;

/**
* Optional. API key to use when calling the models endpoint.
*/
apiKey?: string;
}

/**
* Options for configuring an `OpenAIModel` to call an Azure OpenAI hosted model.
*/
Expand Down Expand Up @@ -120,13 +164,13 @@ export class OpenAIModel implements PromptCompletionModel {
/**
* Options the client was configured with.
*/
public readonly options: OpenAIModelOptions | AzureOpenAIModelOptions;
public readonly options: OpenAIModelOptions | AzureOpenAIModelOptions | OpenAILikeModelOptions;

/**
* Creates a new `OpenAIModel` instance.
* @param {OpenAIModelOptions} options - Options for configuring the model client.
*/
public constructor(options: OpenAIModelOptions | AzureOpenAIModelOptions) {
public constructor(options: OpenAIModelOptions | AzureOpenAIModelOptions | OpenAILikeModelOptions) {
// Check for azure config
if ((options as AzureOpenAIModelOptions).azureApiKey) {
this._useAzure = true;
Expand Down Expand Up @@ -221,7 +265,7 @@ export class OpenAIModel implements PromptCompletionModel {
input = result.output[last];
}

// Call chat completion API
// Initialize chat completion request
const request: CreateChatCompletionRequest = this.copyOptionsToRequest<CreateChatCompletionRequest>(
{
messages: result.output as ChatCompletionRequestMessage[]
Expand All @@ -243,9 +287,17 @@ export class OpenAIModel implements PromptCompletionModel {
'user',
'functions',
'function_call',
'data_sources'
'data_sources',
]
);
if (this.options.responseFormat) {
request.response_format = this.options.responseFormat;
}
if (this.options.seed !== undefined) {
request.seed = this.options.seed;
}

// Call chat completion API
const response = await this.createChatCompletion(request, model);
if (this.options.logRequests) {
console.log(Colorize.title('CHAT RESPONSE:'));
Expand Down Expand Up @@ -346,7 +398,7 @@ export class OpenAIModel implements PromptCompletionModel {
if (this._useAzure) {
const options = this.options as AzureOpenAIModelOptions;
requestConfig.headers['api-key'] = options.azureApiKey;
} else {
} else if ((this.options as OpenAIModelOptions).apiKey) {
const options = this.options as OpenAIModelOptions;
requestConfig.headers['Authorization'] = `Bearer ${options.apiKey}`;
if (options.organization) {
Expand Down

0 comments on commit 81634fa

Please sign in to comment.