Skip to content

Commit

Permalink
[C#] fix: use TeamsAIException for PromptResponse.Error (#924)
Browse files Browse the repository at this point in the history
## Linked issues

closes: #915 
## Details

Provide a list of your changes here. If you are fixing a bug, please
provide steps to reproduce the bug.

#### Change details

- use TeamsAIException instead of bot schema error for
PromptResponse.Error

**code snippets**:

**screenshots**:

## Attestation Checklist

- [ ] My code follows the style guidelines of this project

- I have checked for/fixed spelling, linting, and other errors
- I have commented my code for clarity
- I have made corresponding changes to the documentation (we use
[TypeDoc](https://typedoc.org/) to document our code)
- My changes generate no new warnings
- I have added tests that validates my changes, and provides sufficient
test coverage. I have tested with:
  - Local testing
  - E2E testing in Teams
- New and existing unit tests pass locally with my changes

### Additional information

> Feel free to add other relevant information below
  • Loading branch information
kuojianlu authored Nov 29, 2023
1 parent e573fed commit 9235231
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ public async Task<PromptResponse> CompletePromptAsync(
return new()
{
Status = PromptResponseStatus.Error,
Error = new() { Message = ex.Message ?? string.Empty }
Error = new(ex.Message ?? string.Empty)
};
}
}
Expand Down Expand Up @@ -366,10 +366,7 @@ CancellationToken cancellationToken
return new()
{
Status = PromptResponseStatus.InvalidResponse,
Error = new()
{
Message = feedback ?? "The response was invalid. Try another strategy."
}
Error = new(feedback ?? "The response was invalid. Try another strategy.")
};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
using Azure.Core;
using Azure.Core.Pipeline;
using Microsoft.Bot.Builder;
using Microsoft.Bot.Schema;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Teams.AI.AI.Prompts;
Expand Down Expand Up @@ -135,10 +134,7 @@ public async Task<PromptResponse> CompletePromptAsync(ITurnContext turnContext,
return new PromptResponse
{
Status = PromptResponseStatus.TooLong,
Error = new Error
{
Message = $"The generated text completion prompt had a length of {prompt.Length} tokens which exceeded the MaxInputTokens of {maxInputTokens}."
}
Error = new($"The generated text completion prompt had a length of {prompt.Length} tokens which exceeded the MaxInputTokens of {maxInputTokens}.")
};
}
if (_options.LogRequests!.Value)
Expand Down Expand Up @@ -177,18 +173,12 @@ public async Task<PromptResponse> CompletePromptAsync(ITurnContext turnContext,
if (httpOperationException.StatusCode == (HttpStatusCode)429)
{
promptResponse.Status = PromptResponseStatus.RateLimited;
promptResponse.Error = new Error
{
Message = "The text completion API returned a rate limit error."
};
promptResponse.Error = new("The text completion API returned a rate limit error.");
}
else
{
promptResponse.Status = PromptResponseStatus.Error;
promptResponse.Error = new Error
{
Message = $"The text completion API returned an error status of {httpOperationException.StatusCode}: {httpOperationException.Message}"
};
promptResponse.Error = new($"The text completion API returned an error status of {httpOperationException.StatusCode}: {httpOperationException.Message}");
}
}

Expand Down Expand Up @@ -220,10 +210,7 @@ public async Task<PromptResponse> CompletePromptAsync(ITurnContext turnContext,
return new PromptResponse
{
Status = PromptResponseStatus.TooLong,
Error = new Error
{
Message = $"The generated chat completion prompt had a length of {prompt.Length} tokens which exceeded the MaxInputTokens of {maxInputTokens}."
}
Error = new($"The generated chat completion prompt had a length of {prompt.Length} tokens which exceeded the MaxInputTokens of {maxInputTokens}.")
};
}
if (!_options.UseSystemMessages!.Value && prompt.Output.Count > 0 && prompt.Output[0].Role == ChatRole.System)
Expand Down Expand Up @@ -265,18 +252,12 @@ public async Task<PromptResponse> CompletePromptAsync(ITurnContext turnContext,
if (httpOperationException.StatusCode == (HttpStatusCode)429)
{
promptResponse.Status = PromptResponseStatus.RateLimited;
promptResponse.Error = new Error
{
Message = "The chat completion API returned a rate limit error."
};
promptResponse.Error = new("The chat completion API returned a rate limit error.");
}
else
{
promptResponse.Status = PromptResponseStatus.Error;
promptResponse.Error = new Error
{
Message = $"The chat completion API returned an error status of {httpOperationException.StatusCode}: {httpOperationException.Message}"
};
promptResponse.Error = new($"The chat completion API returned an error status of {httpOperationException.StatusCode}: {httpOperationException.Message}");
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
using Microsoft.Bot.Schema;
using Microsoft.Teams.AI.AI.Models;
using Microsoft.Teams.AI.AI.Models;
using Microsoft.Teams.AI.Exceptions;

namespace Microsoft.Teams.AI.AI.Prompts
{
Expand All @@ -21,7 +21,7 @@ public class PromptResponse
/// <summary>
/// Error returned.
/// </summary>
public Error? Error { get; set; }
public TeamsAIException? Error { get; set; }
}

/// <summary>
Expand Down

0 comments on commit 9235231

Please sign in to comment.