diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AITests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AITests.cs index 77483a81a..e3b9e0cae 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AITests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AITests.cs @@ -197,7 +197,7 @@ public string DoCommand([ActionName] string action) [Action(AIConstants.SayCommandActionName)] public string SayCommand([ActionParameters] PredictedSayCommand command) { - SayActionRecord.Add(command.Response.Content); + SayActionRecord.Add(command.Response.GetContent()); return string.Empty; } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs new file mode 100644 index 000000000..d310837ae --- /dev/null +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs @@ -0,0 +1,32 @@ +using Microsoft.Teams.AI.AI.Models; + +namespace Microsoft.Teams.AI.Tests.AITests +{ + public class ChatMessageTests + { + [Fact] + public void Test_Get_Content() + { + // Arrange + ChatMessage msg = new(ChatRole.Assistant); + msg.Content = "test"; + + // Act + var content = msg.GetContent(); + + // Assert + Assert.Equal("test", content); + } + + [Fact] + public void Test_Get_Content_TypeMismatch_ThrowsException() + { + // Arrange + ChatMessage msg = new(ChatRole.Assistant); + msg.Content = "test"; + + // Act & Assert + Assert.Throws(() => msg.GetContent()); + } + } +} diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs index 1fc9e067c..ea089bd77 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs @@ -132,8 +132,10 @@ public async Task Test_CompletePromptAsync_PromptResponse_Success() } }); + memory.SetValue("temp.input", "hello"); + // Act - var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager()); // Assert Assert.NotNull(response); @@ -161,7 +163,7 @@ public async Task Test_CompletePromptAsync_PromptResponse_Exception() TestMemory memory = new(); // Act - var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager()); // Assert Assert.NotNull(response); @@ -211,8 +213,10 @@ public async Task Test_CompletePromptAsync_PromptResponse_Repair() Valid = true }); + memory.SetValue("temp.input", "hello"); + // Act - var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager()); // Assert Assert.NotNull(response); @@ -277,8 +281,10 @@ public async Task Test_CompletePromptAsync_PromptResponse_RepairNotSuccess() Valid = true }); + memory.SetValue("temp.input", "hello"); + // Act - var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager()); // Assert Assert.NotNull(response); @@ -344,8 +350,10 @@ public async Task Test_CompletePromptAsync_PromptResponse_Repair_ExceedMaxRepair Valid = true }); + memory.SetValue("temp.input", "hello"); + // Act - var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager()); // Assert Assert.NotNull(response); @@ -368,7 +376,6 @@ public async Task Test_CompletePromptAsync_PromptResponse_DisableHistory() LLMClientOptions options = new(promptCompletionModel, promptTemplate) { HistoryVariable = string.Empty, - InputVariable = string.Empty }; LLMClient client = new(options, null); TestMemory memory = new(); @@ -391,7 +398,7 @@ public async Task Test_CompletePromptAsync_PromptResponse_DisableHistory() Assert.NotNull(response.Message); Assert.Equal(ChatRole.Assistant, response.Message.Role); Assert.Equal("welcome", response.Message.Content); - Assert.Equal(0, memory.Values.Count); + Assert.Equal(1, memory.Values.Count); } [Fact] @@ -425,8 +432,10 @@ public async Task Test_CompletePromptAsync_PromptResponse_DisableRepair() Valid = false }); + memory.SetValue("temp.input", "hello"); + // Act - var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager()); // Assert Assert.NotNull(response); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs index 608ff75d0..d42e60b7b 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs @@ -10,7 +10,10 @@ public class ChatMessageExtensionsTests public void Test_InvalidRole_ToAzureSdkChatMessage() { // Arrange - var chatMessage = new ChatMessage(new AI.Models.ChatRole("InvalidRole")); + var chatMessage = new ChatMessage(new AI.Models.ChatRole("InvalidRole")) + { + Content = "test" + }; // Act var ex = Assert.Throws(() => chatMessage.ToChatRequestMessage()); @@ -20,7 +23,7 @@ public void Test_InvalidRole_ToAzureSdkChatMessage() } [Fact] - public void Test_UserRole_ToAzureSdkChatMessage() + public void Test_UserRole_StringContent_ToAzureSdkChatMessage() { // Arrange var chatMessage = new ChatMessage(AI.Models.ChatRole.User) @@ -39,6 +42,32 @@ public void Test_UserRole_ToAzureSdkChatMessage() Assert.Equal("author", ((ChatRequestUserMessage)result).Name); } + [Fact] + public void Test_UserRole_MultiModalContent_ToAzureSdkChatMessage() + { + // Arrange + var messageContentParts = new List() { new TextContentPart() { Text = "test" }, new ImageContentPart { ImageUrl = "https://www.testurl.com" } }; + var chatMessage = new ChatMessage(AI.Models.ChatRole.User) + { + Content = messageContentParts, + Name = "author" + }; + + // Act + var result = chatMessage.ToChatRequestMessage(); + + // Assert + Assert.Equal(Azure.AI.OpenAI.ChatRole.User, result.Role); + Assert.Equal(typeof(ChatRequestUserMessage), result.GetType()); + + var userMessage = (ChatRequestUserMessage)result; + + Assert.Equal(null, userMessage.Content); + Assert.Equal("test", ((ChatMessageTextContentItem)userMessage.MultimodalContentItems[0]).Text); + Assert.Equal(typeof(ChatMessageImageContentItem), userMessage.MultimodalContentItems[1].GetType()); + Assert.Equal("author", userMessage.Name); + } + [Fact] public void Test_AssistantRole_ToAzureSdkChatMessage() { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs index 31cb0792c..d608eee79 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs @@ -142,7 +142,7 @@ public async void Test_CompletePromptAsync_AzureOpenAI_Text_RequestFailed() // Assert Assert.Equal(PromptResponseStatus.Error, result.Status); Assert.NotNull(result.Error); - Assert.Equal("The text completion API returned an error status of InternalServerError: Service request failed.\r\nStatus: 500 (exception)\r\n\r\nHeaders:\r\n", result.Error.Message); + Assert.True(result.Error.Message.StartsWith("The text completion API returned an error status of InternalServerError: Service request failed.\r\nStatus: 500 (exception)")); } [Fact] @@ -273,7 +273,7 @@ public async void Test_CompletePromptAsync_AzureOpenAI_Chat_RequestFailed() // Assert Assert.Equal(PromptResponseStatus.Error, result.Status); Assert.NotNull(result.Error); - Assert.Equal("The chat completion API returned an error status of InternalServerError: Service request failed.\r\nStatus: 500 (exception)\r\n\r\nHeaders:\r\n", result.Error.Message); + Assert.True(result.Error.Message.StartsWith("The chat completion API returned an error status of InternalServerError: Service request failed.\r\nStatus: 500 (exception)")); } [Fact] diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/ConversationHistorySectionTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/ConversationHistorySectionTests.cs index 0d27d5286..a5cd93625 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/ConversationHistorySectionTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/ConversationHistorySectionTests.cs @@ -13,6 +13,7 @@ public class ConversationHistorySectionTests [Fact] public async void Test_RenderAsTextAsync_ShouldRender() { + // Arrange ConversationHistorySection section = new("history"); Mock context = new(); MemoryFork memory = new(); @@ -26,7 +27,10 @@ public async void Test_RenderAsTextAsync_ShouldRender() new(ChatRole.Assistant) { Content = "hi, how may I assist you?" } }); + // Act RenderedPromptSection rendered = await section.RenderAsTextAsync(context.Object, memory, manager, tokenizer, 50); + + // Assert Assert.Equal("assistant: hi, how may I assist you?\nuser: hi\nyou are a unit test bot", rendered.Output); Assert.Equal(21, rendered.Length); } @@ -34,15 +38,46 @@ public async void Test_RenderAsTextAsync_ShouldRender() [Fact] public async void Test_RenderAsTextAsync_ShouldRenderEmpty() { + // Arrange ConversationHistorySection section = new("history"); Mock context = new(); MemoryFork memory = new(); GPTTokenizer tokenizer = new(); PromptManager manager = new(); + // Act RenderedPromptSection rendered = await section.RenderAsTextAsync(context.Object, memory, manager, tokenizer, 50); + + // Assert Assert.Equal("", rendered.Output); Assert.Equal(0, rendered.Length); } + + + [Fact] + public async void Test_RenderAsMessagesAsync_ShoulderRender() + { + // Arrange + ConversationHistorySection section = new("history"); + Mock context = new(); + MemoryFork memory = new(); + GPTTokenizer tokenizer = new(); + PromptManager manager = new(); + + // Act + memory.SetValue("history", new List() + { + new(ChatRole.System) { Content = "you are a unit test bot" }, + new(ChatRole.User) { Content = "hi" }, + new(ChatRole.Assistant) { Content = "hi, how may I assist you?" } + }); + + // Assert + RenderedPromptSection> rendered = await section.RenderAsMessagesAsync(context.Object, memory, manager, tokenizer, 50); + Assert.Equal("you are a unit test bot", rendered.Output[2].GetContent()); + Assert.Equal("hi", rendered.Output[1].GetContent()); + Assert.Equal("hi, how may I assist you?", rendered.Output[0].GetContent()); + Assert.Equal(15, rendered.Length); + } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/PromptSectionTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/PromptSectionTests.cs index 94a49a69e..f8703e65f 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/PromptSectionTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/PromptSectionTests.cs @@ -22,6 +22,11 @@ public override async Task>> RenderAsMes return await Task.FromResult(this.TruncateMessages(messages, tokenizer, maxTokens)); } + + public string GetMessage(ChatMessage message) + { + return this.GetMessageText(message); + } } public class PromptSectionTests @@ -53,5 +58,29 @@ public async void Test_RenderAsTextAsync_ShouldTruncate() Assert.Equal("Hello World", rendered.Output); Assert.Equal(2, rendered.Length); } + + [Fact] + public void Test_GetMessage() + { + ChatMessage message = new(ChatRole.User) + { + Content = new List() + { + new TextContentPart() + { + Text = "Hello", + }, + + new TextContentPart() + { + Text = "World" + } + } + }; + + string msg = new TestSection().GetMessage(message); + + Assert.Equal("Hello World", msg); + } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/UserInputMessageSectionTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/UserInputMessageSectionTests.cs new file mode 100644 index 000000000..fbe00b619 --- /dev/null +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/PromptsTests/SectionsTests/UserInputMessageSectionTests.cs @@ -0,0 +1,51 @@ +using Microsoft.Bot.Builder; +using Microsoft.Teams.AI.AI.Models; +using Microsoft.Teams.AI.AI.Prompts.Sections; +using Microsoft.Teams.AI.AI.Prompts; +using Microsoft.Teams.AI.AI.Tokenizers; +using Microsoft.Teams.AI.State; +using Moq; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Teams.AI.Application; +using static System.Net.Mime.MediaTypeNames; + +namespace Microsoft.Teams.AI.Tests.AITests.PromptsTests.SectionsTests +{ + public class UserInputMessageSectionTest + { + [Fact] + public async void Test_RenderAsMessagesAsync_ShoulderRender() + { + // Arrange + UserInputMessageSection section = new(); + Mock context = new(); + MemoryFork memory = new(); + GPTTokenizer tokenizer = new(); + PromptManager manager = new(); + + // Act + memory.SetValue("input", "hi"); + + memory.SetValue("inputFiles", new List() + { + new(BinaryData.FromString("testData"), "image/png") + }); + + // Assert + RenderedPromptSection> rendered = await section.RenderAsMessagesAsync(context.Object, memory, manager, tokenizer, 200); + var messageContentParts = rendered.Output[0].GetContent>(); + + Assert.Equal("hi", ((TextContentPart)messageContentParts[0]).Text); + + // the base64 string is an encoding of "hi" + var imageUrl = $"data:image/png;base64,dGVzdERhdGE="; + Assert.Equal(imageUrl, ((ImageContentPart)messageContentParts[1]).ImageUrl); + + Assert.Equal(86, rendered.Length); + } + } +} diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/IntegrationTests/OpenAIModelTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/IntegrationTests/OpenAIModelTests.cs index c7de18a6f..4fe603757 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/IntegrationTests/OpenAIModelTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/IntegrationTests/OpenAIModelTests.cs @@ -66,7 +66,7 @@ public async Task OpenAIModel_CompletePrompt(string input, string expectedAnswer // Assert Assert.Equal(PromptResponseStatus.Success, result.Status); - Assert.Contains(expectedAnswer, result.Message!.Content); + Assert.Contains(expectedAnswer, result.Message!.GetContent()); } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj index 13eda191a..24ac89040 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj @@ -1,4 +1,4 @@ - + net6.0 @@ -7,10 +7,11 @@ false true + x64 - + diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/AI.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/AI.cs index 3b9f1b3c2..26b242699 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/AI.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/AI.cs @@ -312,6 +312,7 @@ await _actions[AIConstants.TooManyStepsActionName] // Copy the actions output to the input turnState.Temp!.Input = output; + turnState.Temp.InputFiles = new(); } // Check for looping diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Action/DefaultActions.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Action/DefaultActions.cs index 0fd13639a..c82653b4d 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Action/DefaultActions.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Action/DefaultActions.cs @@ -83,12 +83,12 @@ public async Task SayCommandAsync([ActionTurnContext] ITurnContext turnC Verify.ParamNotNull(command); Verify.ParamNotNull(command.Response); - if (command.Response.Content == null || command.Response.Content == string.Empty) + if (command.Response.Content == null || command.Response.GetContent() == string.Empty) { return ""; } - string content = command.Response.Content; + string content = command.Response.GetContent(); bool isTeamsChannel = turnContext.Activity.ChannelId == Channels.Msteams; diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/DefaultAugmentation.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/DefaultAugmentation.cs index b5e04badc..53ea66f2f 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/DefaultAugmentation.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/DefaultAugmentation.cs @@ -31,7 +31,7 @@ public DefaultAugmentation() /// public async Task CreatePlanFromResponseAsync(ITurnContext context, IMemory memory, PromptResponse response, CancellationToken cancellationToken = default) { - PredictedSayCommand say = new(response.Message?.Content ?? ""); + PredictedSayCommand say = new(response.Message?.GetContent() ?? ""); if (response.Message != null) { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/MonologueAugmentation.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/MonologueAugmentation.cs index a4fd54a58..5de86ef9b 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/MonologueAugmentation.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/MonologueAugmentation.cs @@ -237,7 +237,7 @@ public MonologueAugmentation(List actions) { try { - InnerMonologue? monologue = JsonSerializer.Deserialize(response.Message?.Content ?? ""); + InnerMonologue? monologue = JsonSerializer.Deserialize(response.Message?.GetContent() ?? ""); if (monologue == null) { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/SequenceAugmentation.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/SequenceAugmentation.cs index b20b395d8..ea60889e5 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/SequenceAugmentation.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Augmentations/SequenceAugmentation.cs @@ -47,7 +47,7 @@ public SequenceAugmentation(List actions) { try { - Plan? plan = JsonSerializer.Deserialize(response.Message?.Content ?? ""); + Plan? plan = JsonSerializer.Deserialize(response.Message?.GetContent() ?? ""); if (plan != null) { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs index 2e51a2708..58ed3f5d7 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs @@ -21,7 +21,7 @@ namespace Microsoft.Teams.AI.AI.Clients /// model. At least not without it being flagged as an `invalid_response`. /// /// Using the , for example, guarantees that you only ever get a valid - /// object back from . In fact, you'll get back a fully parsed object and any + /// object back from . In fact, you'll get back a fully parsed object and any /// additional response text from the model will be dropped. If you give the /// a JSON Schema, you will get back a strongly typed and validated instance of an object in /// the returned `response.message.content`. @@ -135,26 +135,15 @@ public void AddFunctionResultToHistory(IMemory memory, string name, object resul /// Current turn context. /// An interface for accessing state values. /// Functions to use when rendering the prompt. - /// Input to use when completing the prompt. /// /// A `PromptResponse` with the status and message. public async Task CompletePromptAsync( ITurnContext context, IMemory memory, IPromptFunctions> functions, - string? input = null, CancellationToken cancellationToken = default ) { - if (input != null) - { - memory.SetValue(this.Options.InputVariable, input); - } - else - { - input = memory.GetValue(this.Options.InputVariable)?.ToString() ?? string.Empty; - } - try { PromptResponse response = await this.Options.Model.CompletePromptAsync( @@ -171,6 +160,16 @@ public async Task CompletePromptAsync( return response; } + // Get input message + string inputVariable = Options.InputVariable; + ChatMessage? inputMsg = response.Input; + if (inputMsg == null) + { + object? content = memory.GetValue(inputVariable); + inputMsg = new ChatMessage(ChatRole.User); + inputMsg.Content = content; + } + Validation validation = await this.Options.Validator.ValidateResponseAsync( context, memory, @@ -187,7 +186,7 @@ public async Task CompletePromptAsync( response.Message.Content = validation.Value.ToString(); } - this.AddInputToHistory(memory, this.Options.HistoryVariable, input); + this.AddInputToHistory(memory, this.Options.HistoryVariable, inputMsg); if (response.Message != null) { @@ -207,7 +206,7 @@ public async Task CompletePromptAsync( if (this.Options.LogRepairs) { this._logger.LogInformation("REPAIRING RESPONSE:"); - this._logger.LogInformation(response.Message?.Content ?? string.Empty); + this._logger.LogInformation(response.Message?.Content?.ToString() ?? string.Empty); } PromptResponse repairResponse = await this.RepairResponseAsync( @@ -227,7 +226,7 @@ public async Task CompletePromptAsync( if (repairResponse.Status == PromptResponseStatus.Success) { - this.AddInputToHistory(memory, this.Options.HistoryVariable, input); + this.AddInputToHistory(memory, this.Options.HistoryVariable, inputMsg); if (repairResponse.Message != null) { @@ -247,19 +246,16 @@ public async Task CompletePromptAsync( } } - private void AddInputToHistory(IMemory memory, string variable, string input) + private void AddInputToHistory(IMemory memory, string variable, ChatMessage input) { - if (variable == string.Empty || input == string.Empty) + if (variable == null) { return; } List history = (List?)memory.GetValue(variable) ?? new() { }; - history.Insert(0, new(ChatRole.User) - { - Content = input - }); + history.Insert(0, input); if (history.Count > this.Options.MaxHistoryMessages) { @@ -300,7 +296,7 @@ CancellationToken cancellationToken { string feedback = validation.Feedback ?? "The response was invalid. Try another strategy."; - this.AddInputToHistory(fork, $"{this.Options.HistoryVariable}-repair", feedback); + this.AddInputToHistory(fork, $"{this.Options.HistoryVariable}-repair", new(ChatRole.User) { Content = feedback }); if (response.Message != null) { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs index ff663faf1..07f0de4e1 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs @@ -1,7 +1,7 @@ namespace Microsoft.Teams.AI.AI.Models { /// - /// Chat Message + /// Represents a message that will be passed to the Chat Completions API /// public class ChatMessage { @@ -13,7 +13,7 @@ public class ChatMessage /// /// The text associated with this message payload. /// - public string Content { get; set; } = string.Empty; + public object? Content; /// /// The name of the author of this message. `name` is required if role is `function`, and it should be the name of the @@ -42,6 +42,17 @@ public class ChatMessage /// public IList? ToolCalls { get; set; } + + /// + /// Gets the content with the given type. + /// Will throw an exception if the content is not of the given type. + /// + /// The content. + public TContent GetContent() + { + return (TContent)Content!; + } + /// Initializes a new instance of ChatMessage. /// The role associated with this message payload. public ChatMessage(ChatRole role) @@ -79,4 +90,56 @@ public FunctionCall(string name, string arguments) this.Arguments = arguments; } } + + /// + /// Represents the ChatMessage content. + /// + public abstract class MessageContentParts + { + /// + /// The type of message content. + /// + public string Type { get; } + + /// + /// The chat message content. + /// + /// + public MessageContentParts(string type) + { + this.Type = type; + } + } + + /// + /// The image content part of the ChatMessage + /// + public class TextContentPart : MessageContentParts + { + /// + /// The constructor + /// + public TextContentPart() : base("text") { } + + /// + /// The text of the message + /// + public string Text = string.Empty; + } + + /// + /// The image content part of the ChatMessage + /// + public class ImageContentPart : MessageContentParts + { + /// + /// The constructor + /// + public ImageContentPart() : base("image") { } + + /// + /// The URL of the image. + /// + public string ImageUrl = string.Empty; + } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessageExtensions.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessageExtensions.cs index 7c8595591..b0f8d94f5 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessageExtensions.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessageExtensions.cs @@ -22,9 +22,42 @@ public static ChatRequestMessage ToChatRequestMessage(this ChatMessage chatMessa ChatRole role = chatMessage.Role; ChatRequestMessage? message = null; + string? content = null; + List contentItems = new(); + + // Content is a text + if (chatMessage.Content is string textContent) + { + content = textContent; + } + else if (chatMessage.Content is IEnumerable contentParts) + { + // Content is has multiple possibly multi-modal parts. + foreach (MessageContentParts contentPart in contentParts) + { + if (contentPart is TextContentPart textPart) + { + contentItems.Add(new ChatMessageTextContentItem(textPart.Text)); + } + else if (contentPart is ImageContentPart imagePart) + { + contentItems.Add(new ChatMessageImageContentItem(new Uri(imagePart.ImageUrl))); + } + } + } + + // Different roles map to different classes if (role == ChatRole.User) { - ChatRequestUserMessage userMessage = new(chatMessage.Content); + ChatRequestUserMessage userMessage; + if (content != null) + { + userMessage = new(content); + } + else + { + userMessage = new(contentItems); + } if (chatMessage.Name != null) { @@ -36,7 +69,7 @@ public static ChatRequestMessage ToChatRequestMessage(this ChatMessage chatMessa if (role == ChatRole.Assistant) { - ChatRequestAssistantMessage assistantMessage = new(chatMessage.Content); + ChatRequestAssistantMessage assistantMessage = new(chatMessage.GetContent()); if (chatMessage.FunctionCall != null) { @@ -61,7 +94,7 @@ public static ChatRequestMessage ToChatRequestMessage(this ChatMessage chatMessa if (role == ChatRole.System) { - ChatRequestSystemMessage systemMessage = new(chatMessage.Content); + ChatRequestSystemMessage systemMessage = new(chatMessage.GetContent()); if (chatMessage.Name != null) { @@ -73,12 +106,12 @@ public static ChatRequestMessage ToChatRequestMessage(this ChatMessage chatMessa if (role == ChatRole.Function) { - message = new ChatRequestFunctionMessage(chatMessage.Name ?? "", chatMessage.Content); + message = new ChatRequestFunctionMessage(chatMessage.Name ?? "", chatMessage.GetContent()); } if (role == ChatRole.Tool) { - message = new ChatRequestToolMessage(chatMessage.Content, chatMessage.ToolCallId ?? ""); + message = new ChatRequestToolMessage(chatMessage.GetContent(), chatMessage.ToolCallId ?? ""); } if (message == null) diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs index 0dcbb2d4c..5d26f7f68 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs @@ -3,7 +3,6 @@ using Azure.Core; using Azure.Core.Pipeline; using Microsoft.Bot.Builder; -using Microsoft.Extensions.Azure; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Teams.AI.AI.Prompts; @@ -128,6 +127,8 @@ public async Task CompletePromptAsync(ITurnContext turnContext, { DateTime startTime = DateTime.UtcNow; int maxInputTokens = promptTemplate.Configuration.Completion.MaxInputTokens; + + if (_options.CompletionType == CompletionType.Text) { // Render prompt @@ -227,6 +228,15 @@ public async Task CompletePromptAsync(ITurnContext turnContext, _logger.LogTrace(JsonSerializer.Serialize(prompt.Output, _serializerOptions)); } + // Get input message + // - we're doing this here because the input message can be complex and include images. + ChatMessage? input = null; + int last = prompt.Output.Count - 1; + if (last >= 0 && prompt.Output[last].Role == "user") + { + input = prompt.Output[last]; + } + // Call chat completion API IEnumerable chatMessages = prompt.Output.Select(chatMessage => chatMessage.ToChatRequestMessage()); ChatCompletionsOptions chatCompletionsOptions = new(_deploymentName, chatMessages) @@ -250,6 +260,7 @@ public async Task CompletePromptAsync(ITurnContext turnContext, rawResponse = chatCompletionsResponse.GetRawResponse(); promptResponse.Status = PromptResponseStatus.Success; promptResponse.Message = chatCompletionsResponse.Value.Choices[0].Message.ToChatMessage(); + promptResponse.Input = input; } catch (RequestFailedException e) { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModerator.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModerator.cs index ff65d70c1..c2a6efc44 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModerator.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModerator.cs @@ -56,7 +56,7 @@ public async Task ReviewOutputAsync(ITurnContext turnContext, TState turnS { if (command is PredictedSayCommand sayCommand) { - string output = sayCommand.Response.Content; + string output = sayCommand.Response.GetContent(); // If plan is flagged it will be replaced Plan? newPlan = await _HandleTextModeration(output, false); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/OpenAIModerator.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/OpenAIModerator.cs index 663c9e044..a55707977 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/OpenAIModerator.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/OpenAIModerator.cs @@ -65,7 +65,7 @@ public async Task ReviewOutputAsync(ITurnContext turnContext, TState turnS { if (command is PredictedSayCommand sayCommand) { - string output = sayCommand.Response.Content; + string output = sayCommand.Response.GetContent(); // If plan is flagged it will be replaced Plan? newPlan = await _HandleTextModerationAsync(output, false, cancellationToken); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs index 499f1f39f..e34196516 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs @@ -167,7 +167,7 @@ public async Task CompletePromptAsync( LogRepairs = this.Options.LogRepairs }, this._logger); - return await client.CompletePromptAsync(context, memory, this.Prompts, null, cancellationToken); + return await client.CompletePromptAsync(context, memory, this.Prompts, cancellationToken); } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/PredictedSayCommand.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/PredictedSayCommand.cs index 00199807f..586b0640a 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/PredictedSayCommand.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/PredictedSayCommand.cs @@ -33,7 +33,6 @@ public PredictedSayCommand(ChatMessage response) Response = response; } - /// /// Creates a new instance of the class. /// /// The response that the AI system should say. diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs index 55c8be66a..f8ef76a77 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs @@ -131,7 +131,11 @@ public PromptTemplate GetPrompt(string name) )); } - if (template.Configuration.Completion.IncludeInput) + if (template.Configuration.Completion.IncludeImages) + { + template.Prompt.AddSection(new UserInputMessageSection(Options.MaxInputTokens)); + } + else if (template.Configuration.Completion.IncludeInput) { template.Prompt.AddSection(new UserMessageSection( "{{$temp.input}}", diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptResponse.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptResponse.cs index d856b65d8..3b47ea89e 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptResponse.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptResponse.cs @@ -13,6 +13,11 @@ public class PromptResponse /// public PromptResponseStatus Status { get; set; } + /// + /// User input message sent to the model. null if no input was sent. + /// + public ChatMessage? Input { get; set; } + /// /// Message returned. /// diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs index 2c8df5b32..2c77d30fe 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs @@ -244,6 +244,14 @@ public class CompletionConfiguration [JsonPropertyOrder(11)] public double TopP { get; set; } = 0.0f; + /// + /// If true, the prompt will be augmented with any images uploaded by the user. + /// Defaults to false. + /// + [JsonPropertyName("include_images")] + [JsonPropertyOrder(12)] + public bool IncludeImages { get; set; } = false; + /// /// Additional data provided in the completion configuration. /// diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/ConversationHistorySection.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/ConversationHistorySection.cs index 821995d42..f51c20747 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/ConversationHistorySection.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/ConversationHistorySection.cs @@ -98,7 +98,6 @@ public override async Task>> RenderAsMes messages.Reverse(); // Populate history and stay under the token budget - int tokens = 0; int budget = this.Tokens > 1 ? Math.Min(this.Tokens, maxTokens) : maxTokens; List output = new(); @@ -107,6 +106,13 @@ public override async Task>> RenderAsMes { int length = tokenizer.Encode(this.GetMessageText(message)).Count; + // Add length of any image parts + // This accounts for low detail images but not high detail images. + if (message.Content is IList contentParts) + { + length += contentParts.Where((part) => part is ImageContentPart).Count() * 85; + } + // Stop if we're over the token budget if (tokens + length > budget) { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/PromptSection.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/PromptSection.cs index 438010b5d..e2a3e6900 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/PromptSection.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/PromptSection.cs @@ -173,7 +173,26 @@ protected RenderedPromptSection> TruncateMessages(Listthe parsed message text protected string GetMessageText(ChatMessage message) { - string text = message.Content ?? ""; + string text = string.Empty; + + if (message.Content is IEnumerable) + { + IList? contentParts = message.GetContent>(); + foreach (MessageContentParts part in contentParts) + { + if (part is TextContentPart textPart) + { + text += " " + textPart.Text; + } + + // Remove the leading " " + text = text.TrimStart(); + } + } + else if (message.Content is string) + { + text = message.Content.ToString(); + } if (message.FunctionCall != null) { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/UserInputMessageSection.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/UserInputMessageSection.cs new file mode 100644 index 000000000..ea929de8e --- /dev/null +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/UserInputMessageSection.cs @@ -0,0 +1,89 @@ +using Microsoft.Bot.Builder; +using Microsoft.Teams.AI.AI.Models; +using Microsoft.Teams.AI.AI.Tokenizers; +using Microsoft.Teams.AI.State; +using Microsoft.Teams.AI.Application; + +namespace Microsoft.Teams.AI.AI.Prompts.Sections +{ + /// + /// The user input message section. + /// + public class UserInputMessageSection : PromptSection + { + private readonly string inputVariable; + private readonly string filesVariable; + + /// + /// Creates a UserInputMessageSection + /// + /// Number of tokens + /// Name of the input variable + /// Name of the files variable + public UserInputMessageSection(int tokens = -1, string inputVariable = "input", string filesVariable = "inputFiles") : base(tokens, true, "\n", "user: ") + { + this.inputVariable = inputVariable; + this.filesVariable = filesVariable; + } + + /// + public override Task>> RenderAsMessagesAsync(ITurnContext context, IMemory memory, IPromptFunctions> functions, ITokenizer tokenizer, int maxTokens, CancellationToken cancellationToken = default) + { + // Get input text and images + string inputText = memory.GetValue(this.inputVariable) as string ?? string.Empty; + List inputFiles = memory.GetValue(this.filesVariable) as List ?? new(); + + // Create message + List messageContents = new(); + ChatMessage message = new(ChatRole.User) + { + Content = messageContents + }; + + // Append text content part + int length = 0; + int budget = this.Tokens > 1 ? Math.Min(this.Tokens, maxTokens) : maxTokens; + if (inputText.Length > 0) + { + IEnumerable encoded = tokenizer.Encode(inputText); + if (encoded.Count() <= budget) + { + messageContents.Add(new TextContentPart { Text = inputText }); + length += encoded.Count(); + budget -= encoded.Count(); + } + else + { + messageContents.Add(new TextContentPart { Text = tokenizer.Decode(encoded.Take(budget)) }); + } + } + + // Append image content parts + IEnumerable images = inputFiles.Where((f) => f.ContentType.StartsWith("image/")); + + foreach (InputFile image in images) + { + // Check for budget to add image. + // TODO: This accounts for low detail images but not high detail images. + // Additional work is needed to accoutn for high detail images. + if (budget < 85) + { + break; + } + + // Add image + string url = $"data:{image.ContentType};base64,{Convert.ToBase64String(image.Content.ToArray())}"; + messageContents.Add(new ImageContentPart { ImageUrl = url }); + length += 85; + budget -= 85; + } + + List messages = new() { message }; + RenderedPromptSection> renderedSection = new(messages, length); + + // Return output + return Task.FromResult(renderedSection); + } + + } +} diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Validators/JSONResponseValidator.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Validators/JSONResponseValidator.cs index 8352ff359..bee99217c 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Validators/JSONResponseValidator.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Validators/JSONResponseValidator.cs @@ -45,7 +45,7 @@ public JsonResponseValidator(JsonSchema? schema = null, string? missingJsonFeedb /// public async Task ValidateResponseAsync(ITurnContext context, IMemory memory, ITokenizer tokenizer, PromptResponse response, int remainingAttempts, CancellationToken cancellationToken = default) { - string text = response.Message?.Content ?? ""; + string text = response.Message?.GetContent() ?? ""; List> jsonObjects = ResponseJsonParsers.ParseAllObjects(text); if (jsonObjects.Count == 0) diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/Application.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/Application.cs index 08a56fb28..28b8cd0cc 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/Application.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/Application.cs @@ -902,7 +902,7 @@ public void StartTypingTimer(ITurnContext turnContext) _typingTimer = new TypingTimer(_typingTimerDelay); } - if (_typingTimer.IsRunning() == false) + if (!_typingTimer.IsRunning()) { _typingTimer.Start(turnContext); } @@ -999,9 +999,21 @@ private async Task _OnTurnAsync(ITurnContext turnContext, CancellationToken canc // Populate {{$temp.input}} if ((turnState.Temp.Input == null || turnState.Temp.Input.Length == 0) && turnContext.Activity.Text != null) { + // Use the received activity text turnState.Temp.Input = turnContext.Activity.Text; } + // Download any input files + IList>? fileDownloaders = this.Options.FileDownloaders; + if (fileDownloaders != null && fileDownloaders.Count > 0) + { + foreach (IInputFileDownloader downloader in fileDownloaders) + { + List files = await downloader.DownloadFilesAsync(turnContext, turnState); + turnState.Temp.InputFiles.AddRange(files); + } + } + bool eventHandlerCalled = false; // Run any RouteSelectors in this._invokeRoutes first if the incoming Teams activity.type is "Invoke". diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/ApplicationOptions.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/ApplicationOptions.cs index a8d069221..ebf1a1c87 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/ApplicationOptions.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/ApplicationOptions.cs @@ -2,6 +2,7 @@ using Microsoft.Teams.AI.State; using Microsoft.Extensions.Logging; using Microsoft.Bot.Builder; +using Microsoft.Teams.AI.Application; namespace Microsoft.Teams.AI { @@ -53,6 +54,11 @@ public class ApplicationOptions /// public Func? TurnStateFactory { get; set; } + /// + /// Optional. Array of input file download plugins to use. + /// + public IList>? FileDownloaders { get; set; } + /// /// Optional. Logger factory that will be used in this application. /// diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/IInputFileDownloader.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/IInputFileDownloader.cs new file mode 100644 index 000000000..543ffde0c --- /dev/null +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/IInputFileDownloader.cs @@ -0,0 +1,21 @@ +using Microsoft.Bot.Builder; +using Microsoft.Teams.AI.State; + +namespace Microsoft.Teams.AI.Application +{ + /// + /// A plugin responsible for downloading files relative to the current user's input. + /// + /// Type of application state. + public interface IInputFileDownloader where TState : TurnState, new() + { + /// + /// Download any files relative to the current user's input. + /// + /// The turn context. + /// The turn state. + /// The cancellation token. + /// A list of input files + public Task> DownloadFilesAsync(ITurnContext turnContext, TState turnState, CancellationToken cancellationToken = default); + } +} diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/InputFile.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/InputFile.cs new file mode 100644 index 000000000..01904cc1e --- /dev/null +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/InputFile.cs @@ -0,0 +1,35 @@ + +namespace Microsoft.Teams.AI.Application +{ + /// + /// Represents an upload file + /// + public class InputFile + { + /// + /// The downloaded content of the file + /// + public BinaryData Content { get; set; } + + /// + /// The content type of the file. + /// + public string ContentType { get; set; } + + /// + /// Optional. URL to the content of the file. + /// + public string? ContentUrl { get; set; } + + /// + /// The constructor. + /// + /// The input file content + /// the input file content type + public InputFile(BinaryData content, string contentType) + { + Content = content; + ContentType = contentType; + } + } +} diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAdapter.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAdapter.cs index 971717db4..215902d22 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAdapter.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAdapter.cs @@ -1,6 +1,7 @@ using Microsoft.AspNetCore.Http; using Microsoft.Bot.Builder; using Microsoft.Bot.Builder.Integration.AspNet.Core; +using Microsoft.Bot.Connector.Authentication; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; using System.Net.Http.Headers; @@ -20,6 +21,16 @@ public class TeamsAdapter : CloudAdapter /// public IHttpClientFactory HttpClientFactory { get; } + /// + /// The Configuration + /// + internal IConfiguration? Configuration { get; } + + /// + /// The Service Client Credentials Factory + /// + internal ServiceClientCredentialsFactory? CredentialsFactory { get; } + /// /// Initializes a new instance of the class. (Public cloud. No auth. For testing.) /// @@ -43,6 +54,8 @@ public TeamsAdapter( logger) { HttpClientFactory = new TeamsHttpClientFactory(httpClientFactory); + Configuration = configuration; + CredentialsFactory = new ConfigurationServiceClientCredentialFactory(configuration); } /// diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs new file mode 100644 index 000000000..559f27df6 --- /dev/null +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs @@ -0,0 +1,191 @@ +using Microsoft.Bot.Builder; +using Microsoft.Bot.Connector.Authentication; +using Microsoft.Bot.Schema; +using Microsoft.Rest; +using Microsoft.Teams.AI.State; +using Microsoft.Teams.AI.Utilities; +using Newtonsoft.Json.Linq; + +namespace Microsoft.Teams.AI.Application +{ + /// + /// Downloads attachments from Teams using the Bot access token. + /// + /// + public class TeamsAttachmentDownloader : IInputFileDownloader where TState : TurnState, new() + { + private TeamsAttachmentDownloaderOptions _options; + private HttpClient _httpClient; + + + /// + /// Creates the TeamsAttachmentDownloader + /// + /// The options + /// Optional. The http client + /// + public TeamsAttachmentDownloader(TeamsAttachmentDownloaderOptions options, HttpClient? httpClient = null) + { + this._options = options; + this._httpClient = httpClient ?? DefaultHttpClient.Instance; + + if (this._options.Adapter.CredentialsFactory == null) + { + throw new ArgumentException("The credentials factory is not set in the adapter"); + } + } + + /// + public async Task> DownloadFilesAsync(ITurnContext turnContext, TState turnState, CancellationToken cancellationToken = default) + { + // Filter out HTML attachments + IEnumerable attachments = turnContext.Activity.Attachments.Where((a) => !a.ContentType.StartsWith("text/html")); + if (!attachments.Any()) + { + return new List(); + } + + string accessToken = ""; + + bool authEnabled = !(await this._options.Adapter.CredentialsFactory!.IsAuthenticationDisabledAsync(cancellationToken)); + + // If authentication is enabled, get access token + if (authEnabled) + { + accessToken = await _GetAccessTokenAsync(); + } + + List files = new(); + + foreach (Attachment attachment in attachments) + { + InputFile? file = await _DownloadFile(attachment, accessToken); + if (file != null) + { + files.Add(file); + } + } + + return files; + } + + /// + /// + /// + /// + /// + /// + private async Task _DownloadFile(Attachment attachment, string accessToken) + { + if (attachment.ContentUrl != null && (attachment.ContentUrl.StartsWith("https://") || attachment.ContentUrl.StartsWith("http://localhost"))) + { + // Get downloadable content link + string? downloadUrl = (attachment.Content as JObject)?.Value("downloadUrl"); + if (downloadUrl == null) + { + downloadUrl = attachment.ContentUrl; + } + + using (HttpRequestMessage request = new(HttpMethod.Get, downloadUrl)) + { + request.Headers.Add("Authorization", $"Bearer {accessToken}"); + + HttpResponseMessage response = await _httpClient.SendAsync(request).ConfigureAwait(false); + + // Failed to download file + if (!response.IsSuccessStatusCode) + { + return null; + } + + // Convert to a buffer + byte[] content = await response.Content.ReadAsByteArrayAsync(); + + // Fixup content type + string contentType = response.Content.Headers.ContentType.MediaType; + if (contentType.StartsWith("image/")) + { + contentType = "image/png"; + } + + return new InputFile(new BinaryData(content), contentType) + { + ContentUrl = attachment.ContentUrl, + }; + } + } + else + { + return new InputFile(new BinaryData(attachment.Content), attachment.ContentType) + { + ContentUrl = attachment.ContentUrl, + }; + } + } + + + private async Task _GetAccessTokenAsync(CancellationToken cancellationToken = default) + { + // Normalize the ToChannelFromBotLoginUrlPrefix (and use a default value when it is undefined). + // If non-public (specific tenant) login URL is to be used, make sure the full url including tenant ID is provided to TeamsAdapter on setup. + string? toChannelFromBotLoginUrl = this._options.Adapter.Configuration?.GetSection("ToChannelFromBotLoginUrl")?.Value; + if (toChannelFromBotLoginUrl == null) + { + toChannelFromBotLoginUrl = string.Format(AuthenticationConstants.ToChannelFromBotLoginUrlTemplate, AuthenticationConstants.DefaultChannelAuthTenant); + } + + string? audience = this._options.Adapter.Configuration?.GetSection("ToChannelFromBotOAuthScope")?.Value; + + string ToChannelFromBotLoginUrlPrefix = "https://login.microsoftonline.com/"; + string ToChannelFromBotLoginUrlPrefixGov = "https://login.microsoftonline.us/"; + + // If there is no toChannelFromBotLoginUrl set on the provided configuration, or it starts with 'https://login.microsoftonline.com/', the bot is operating in Public Azure. + // So we use the Public Azure audience or the specified audience. + if (toChannelFromBotLoginUrl.StartsWith(ToChannelFromBotLoginUrlPrefix)) + { + audience ??= AuthenticationConstants.ToChannelFromBotOAuthScope; + } + else if (toChannelFromBotLoginUrl.StartsWith(ToChannelFromBotLoginUrlPrefixGov)) + { + audience ??= GovernmentAuthenticationConstants.ToChannelFromBotOAuthScope; + } + + ServiceClientCredentials appCreds = await this._options.Adapter.CredentialsFactory!.CreateCredentialsAsync( + this._options.BotAppId, + audience, + toChannelFromBotLoginUrl, + true, + cancellationToken + ); + + return await ((AppCredentials)appCreds).GetTokenAsync(); + } + } + + /// + /// The TeamsAttachmentDownloader options + /// + public class TeamsAttachmentDownloaderOptions + { + /// + /// The bot app id. + /// + public string BotAppId { get; set; } = string.Empty; + + /// + /// The bot app password. + /// + public TeamsAdapter Adapter { get; set; } + + /// + /// The constructor. + /// + /// The bot's application id. + /// The teams adapter + public TeamsAttachmentDownloaderOptions(string botAppId, TeamsAdapter adapter) + { + BotAppId = botAppId; + Adapter = adapter; + } + } +} diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj index e0f243601..0ec7c40df 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj @@ -38,7 +38,7 @@ - + diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/State/TempState.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/State/TempState.cs index 71c963fad..7fdb62ffd 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/State/TempState.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/State/TempState.cs @@ -1,4 +1,6 @@  +using Microsoft.Teams.AI.Application; + namespace Microsoft.Teams.AI.State { /// @@ -29,12 +31,16 @@ public class TempState : Record /// public const string AuthTokenKey = "authTokens"; - /// /// Name of the duplicate token exchange property /// public const string DuplicateTokenExchangeKey = "duplicateTokenExchange"; + /// + /// Name of the input files key + /// + public const string InputFilesKey = "inputFiles"; + /// /// Creates a new instance of the class. /// @@ -45,6 +51,7 @@ public TempState() : base() this[ActionOutputsKey] = new Dictionary(); this[AuthTokenKey] = new Dictionary(); this[DuplicateTokenExchangeKey] = false; + this[InputFilesKey] = new List(); } /// @@ -92,5 +99,14 @@ public bool DuplicateTokenExchange get => Get(DuplicateTokenExchangeKey)!; set => Set(DuplicateTokenExchangeKey, value); } + + /// + /// Downloaded files passed by the user to the AI library + /// + public List InputFiles + { + get => Get>(InputFilesKey)!; + set => Set(InputFilesKey, value); + } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Utilities/JsonConverters/ChatMessageJsonConverter.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Utilities/JsonConverters/ChatMessageJsonConverter.cs index 1797c2928..26500e45f 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Utilities/JsonConverters/ChatMessageJsonConverter.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Utilities/JsonConverters/ChatMessageJsonConverter.cs @@ -26,7 +26,7 @@ public override ChatMessage Read( public override void Write(Utf8JsonWriter writer, ChatMessage value, JsonSerializerOptions options) { - writer.WriteStringValue(value.Content); + writer.WriteStringValue(value.GetContent()); writer.Flush(); } } diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/.gitignore b/dotnet/samples/04.ai.f.vision.cardMaster/.gitignore new file mode 100644 index 000000000..30dcb3bf5 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/.gitignore @@ -0,0 +1,27 @@ +# TeamsFx files +build +appPackage/build +env/.env.*.user +env/.env.local +.deployment + +# User-specific files +*.user + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Notification local store +.notification.localstore.json + +# Visual Studio files +.vs/ \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/AdapterWithErrorHandler.cs b/dotnet/samples/04.ai.f.vision.cardMaster/AdapterWithErrorHandler.cs new file mode 100644 index 000000000..8d87e01d0 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/AdapterWithErrorHandler.cs @@ -0,0 +1,28 @@ +using Microsoft.Bot.Builder.TraceExtensions; +using Microsoft.Teams.AI; + +namespace CardGazer +{ + public class AdapterWithErrorHandler : TeamsAdapter + { + public AdapterWithErrorHandler(IConfiguration configuration, ILogger logger) + : base(configuration, null, logger) + { + OnTurnError = async (turnContext, exception) => + { + // Log any leaked exception from the application. + // NOTE: In production environment, you should consider logging this to + // Azure Application Insights. Visit https://aka.ms/bottelemetry to see how + // to add telemetry capture to your bot. + logger.LogError(exception, $"[OnTurnError] unhandled error : {exception.Message}"); + + // Send a message to the user + await turnContext.SendActivityAsync($"The bot encountered an unhandled error: {exception.Message}"); + await turnContext.SendActivityAsync("To continue to run this bot, please fix the bot source code."); + + // Send a trace activity + await turnContext.TraceActivityAsync("OnTurnError Trace", exception.Message, "https://www.botframework.com/schemas/error", "TurnError"); + }; + } + } +} diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.csproj b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.csproj new file mode 100644 index 000000000..2006f835e --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.csproj @@ -0,0 +1,34 @@ + + + + net6.0 + enable + enable + + + + + + + + + + + + + + + + + + + + + + + + ..\..\packages\Microsoft.TeamsAI\Microsoft.TeamsAI\bin\Debug\netstandard2.0\Microsoft.Teams.AI.dll + + + + diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.sln b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.sln new file mode 100644 index 000000000..7abb31c47 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.sln @@ -0,0 +1,27 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.7.33906.173 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CardGazer", "CardGazer.csproj", "{D045C9A3-F421-4E8B-91D0-33A62C61DCCD}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D045C9A3-F421-4E8B-91D0-33A62C61DCCD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D045C9A3-F421-4E8B-91D0-33A62C61DCCD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D045C9A3-F421-4E8B-91D0-33A62C61DCCD}.Debug|Any CPU.Deploy.0 = Debug|Any CPU + {D045C9A3-F421-4E8B-91D0-33A62C61DCCD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D045C9A3-F421-4E8B-91D0-33A62C61DCCD}.Release|Any CPU.Build.0 = Release|Any CPU + {D045C9A3-F421-4E8B-91D0-33A62C61DCCD}.Release|Any CPU.Deploy.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {1A3065E4-A54D-45EE-BDCB-1BADCD6EA7CA} + EndGlobalSection +EndGlobal diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/CardGazerBot.cs b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazerBot.cs new file mode 100644 index 000000000..185edce8b --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazerBot.cs @@ -0,0 +1,17 @@ +using CardGazer.Model; +using Microsoft.Teams.AI; + +namespace CardGazer +{ + /// + /// A bot that echo back the user's message. + /// + public class CardGazerBot : Application + { + public CardGazerBot(ApplicationOptions options) : base(options) + { + // Registering action handlers that will be hooked up to the planner. + AI.ImportActions(new CardGazerBotActions()); + } + } +} diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/CardGazerBotActions.cs b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazerBotActions.cs new file mode 100644 index 000000000..66695b0e8 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazerBotActions.cs @@ -0,0 +1,61 @@ +using CardGazer.Model; +using Microsoft.Bot.Builder; +using Microsoft.Teams.AI.AI.Action; +using Microsoft.Teams.AI.AI; +using AdaptiveCards; +using Microsoft.Bot.Schema; +using System.Text.Json; + +namespace CardGazer +{ + public class CardGazerBotActions + { + [Action("SendCard")] + public async Task SendCard([ActionTurnContext] ITurnContext turnContext, [ActionTurnState] AppState turnState, [ActionParameters] Dictionary args) + { + + if (args.TryGetValue("card", out object? cardObject) && cardObject is JsonElement cardJson) + { + // Deserialize the JSON string to an AdaptiveCard object + AdaptiveCardParseResult parseResult = AdaptiveCard.FromJson(cardJson.ToString()); + + // Get the AdaptiveCard object + AdaptiveCard adaptiveCard = parseResult.Card; + + Attachment card = new Attachment() + { + ContentType = AdaptiveCard.ContentType, + Content = adaptiveCard + }; + + await turnContext.SendActivityAsync(MessageFactory.Attachment(card)); + + return "card sent"; + } + + return "failed to parsed card from action arguments"; + } + + [Action("ShowCardJSON")] + public async Task ShowCardJSON([ActionTurnContext] ITurnContext turnContext, [ActionTurnState] AppState turnState, [ActionParameters] Dictionary args) + { + if (args.TryGetValue("card", out object? cardObject) && cardObject is JsonElement cardJson) + { + string cardString = cardJson.ToString(); + + await turnContext.SendActivityAsync($"
{cardString}
"); + + return "card displayed"; + } + + return "failed to parsed card from action arguments"; + } + + [Action(AIConstants.UnknownActionName)] + public async Task UnknownAction([ActionTurnContext] TurnContext turnContext, [ActionName] string action) + { + await turnContext.SendActivityAsync(ResponseGenerator.UnknownAction(action ?? "Unknown")); + return "unknown action"; + } + } +} diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/Config.cs b/dotnet/samples/04.ai.f.vision.cardMaster/Config.cs new file mode 100644 index 000000000..40e45ab56 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/Config.cs @@ -0,0 +1,29 @@ +namespace CardGazer +{ + public class ConfigOptions + { + public string? BOT_ID { get; set; } + public string? BOT_PASSWORD { get; set; } + public OpenAIConfigOptions? OpenAI { get; set; } + public AzureConfigOptions? Azure { get; set; } + } + + /// + /// Options for Open AI + /// + public class OpenAIConfigOptions + { + public string? ApiKey { get; set; } + } + + /// + /// Options for Azure OpenAI and Azure Content Safety + /// + public class AzureConfigOptions + { + public string? OpenAIApiKey { get; set; } + public string? OpenAIEndpoint { get; set; } + public string? ContentSafetyApiKey { get; set; } + public string? ContentSafetyEndpoint { get; set; } + } +} diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/Controllers/BotController.cs b/dotnet/samples/04.ai.f.vision.cardMaster/Controllers/BotController.cs new file mode 100644 index 000000000..762c5db05 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/Controllers/BotController.cs @@ -0,0 +1,32 @@ +using Microsoft.AspNetCore.Mvc; +using Microsoft.Bot.Builder; +using Microsoft.Bot.Builder.Integration.AspNet.Core; + +namespace CardGazer.Controllers +{ + [Route("api/messages")] + [ApiController] + public class BotController : ControllerBase + { + private readonly IBotFrameworkHttpAdapter _adapter; + private readonly IBot _bot; + + public BotController(IBotFrameworkHttpAdapter adapter, IBot bot) + { + _adapter = adapter; + _bot = bot; + } + + [HttpPost] + public async Task PostAsync(CancellationToken cancellationToken = default) + { + await _adapter.ProcessAsync + ( + Request, + Response, + _bot, + cancellationToken + ); + } + } +} diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/Model/AppState.cs b/dotnet/samples/04.ai.f.vision.cardMaster/Model/AppState.cs new file mode 100644 index 000000000..390b104f5 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/Model/AppState.cs @@ -0,0 +1,9 @@ +using Microsoft.Teams.AI.State; + +namespace CardGazer.Model +{ + // Extend the turn state by configuring custom strongly typed state classes. + public class AppState : TurnState + { + } +} diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/Program.cs b/dotnet/samples/04.ai.f.vision.cardMaster/Program.cs new file mode 100644 index 000000000..155f6c560 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/Program.cs @@ -0,0 +1,124 @@ +using CardGazer; +using CardGazer.Model; +using Microsoft.Bot.Builder; +using Microsoft.Bot.Builder.Integration.AspNet.Core; +using Microsoft.Bot.Connector.Authentication; +using Microsoft.Teams.AI; +using Microsoft.Teams.AI.AI.Models; +using Microsoft.Teams.AI.AI.Planners; +using Microsoft.Teams.AI.AI.Prompts; +using Microsoft.Teams.AI.Application; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddControllers(); +builder.Services.AddHttpClient("WebClient", client => client.Timeout = TimeSpan.FromSeconds(600)); +builder.Services.AddHttpContextAccessor(); +builder.Logging.AddConsole(); + +// Prepare Configuration for ConfigurationBotFrameworkAuthentication +var config = builder.Configuration.Get()!; +builder.Configuration["MicrosoftAppType"] = "MultiTenant"; +builder.Configuration["MicrosoftAppId"] = config.BOT_ID; +builder.Configuration["MicrosoftAppPassword"] = config.BOT_PASSWORD; + +// Create the Bot Framework Authentication to be used with the Bot Adapter. +builder.Services.AddSingleton(); + +// Create the Cloud Adapter with error handling enabled. +builder.Services.AddSingleton(); +builder.Services.AddSingleton(sp => sp.GetService()!); + +// Create singleton instances for bot application +builder.Services.AddSingleton(); + +// Create AI Model +if (!string.IsNullOrEmpty(config.OpenAI?.ApiKey)) +{ + builder.Services.AddSingleton(sp => new( + new OpenAIModelOptions(config.OpenAI.ApiKey, "gpt-4o") + { + LogRequests = true + }, + sp.GetService() + )); +} +else if (!string.IsNullOrEmpty(config.Azure?.OpenAIApiKey) && !string.IsNullOrEmpty(config.Azure.OpenAIEndpoint)) +{ + builder.Services.AddSingleton(sp => new( + new AzureOpenAIModelOptions( + config.Azure.OpenAIApiKey, + "gpt-4o", + config.Azure.OpenAIEndpoint + ) + { + LogRequests = true + }, + sp.GetService() + )); +} +else +{ + throw new Exception("please configure settings for either OpenAI or Azure"); +} + +// Create the bot as transient. In this case the ASP Controller is expecting an IBot. +builder.Services.AddTransient(sp => +{ + // Create loggers + ILoggerFactory loggerFactory = sp.GetService()!; + + // Create Prompt Manager + PromptManager prompts = new(new() + { + PromptFolder = "./Prompts" + }); + + // Set up the attachment downloader + TeamsAdapter adapter = sp.GetService()!; + TeamsAttachmentDownloaderOptions options = new(config.BOT_ID!, adapter); + IInputFileDownloader downloader = new TeamsAttachmentDownloader(options); + + // Create ActionPlanner + ActionPlanner planner = new( + options: new( + model: sp.GetService()!, + prompts: prompts, + defaultPrompt: async (context, state, planner) => + { + PromptTemplate template = prompts.GetPrompt("sequence"); + return await Task.FromResult(template); + } + ) + { LogRepairs = true }, + loggerFactory: loggerFactory + ); + + return new CardGazerBot(new() + { + Storage = sp.GetService(), + AI = new(planner), + LoggerFactory = loggerFactory, + TurnStateFactory = () => + { + return new AppState(); + }, + FileDownloaders = new List> { downloader } + }); +}); + +var app = builder.Build(); + +if (app.Environment.IsDevelopment()) +{ + app.UseDeveloperExceptionPage(); +} + +app.UseStaticFiles(); +app.UseRouting(); +app.UseEndpoints(endpoints => +{ + endpoints.MapControllers(); +}); + +app.Run(); diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/actions.json b/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/actions.json new file mode 100644 index 000000000..bd5ebd9b0 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/actions.json @@ -0,0 +1,34 @@ +[ + { + "name": "SendCard", + "description": "Sends an adaptive card to the user", + "parameters": { + "type": "object", + "properties": { + "card": { + "type": "object", + "description": "The adaptive card to send" + } + }, + "required": [ + "card" + ] + } + }, + { + "name": "ShowCardJSON", + "description": "Shows the user the JSON for an adaptive card", + "parameters": { + "type": "object", + "properties": { + "card": { + "type": "object", + "description": "The adaptive card JSON to show" + } + }, + "required": [ + "card" + ] + } + } +] \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/config.json b/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/config.json new file mode 100644 index 000000000..79722468f --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/config.json @@ -0,0 +1,22 @@ +{ + "schema": 1.1, + "description": "Vision Bot", + "type": "completion", + "completion": { + "model": "gpt-4-vision-preview", + "completion_type": "chat", + "include_history": true, + "include_input": true, + "include_images": true, + "max_input_tokens": 2800, + "max_tokens": 1000, + "temperature": 0.2, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": [] + }, + "augmentation": { + "augmentation_type": "sequence" + } +} \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/skprompt.txt b/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/skprompt.txt new file mode 100644 index 000000000..f4d4af46f --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/Prompts/sequence/skprompt.txt @@ -0,0 +1,4 @@ +You are a friendly assistant for Microsoft Teams with vision support. +You are an expert on converting doodles and images to Adaptive Cards for Microsoft Teams. +When shown an image try to convert it to an Adaptive Card and send it using SendCard. +For Adaptive Cards with Image placeholders use ShowCardJSON instead.. \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/Properties/launchSettings.json b/dotnet/samples/04.ai.f.vision.cardMaster/Properties/launchSettings.json new file mode 100644 index 000000000..12c7d7d86 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/Properties/launchSettings.json @@ -0,0 +1,25 @@ +{ + "profiles": { + "Microsoft Teams (browser)": { + "commandName": "Project", + "launchBrowser": true, + "launchUrl": "https://teams.microsoft.com/", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + }, + "dotnetRunMessages": true, + "applicationUrl": "http://localhost:5130", + "hotReloadProfile": "aspnetcore" + }, + "WSL": { + "commandName": "WSL2", + "launchBrowser": true, + "launchUrl": "https://teams.microsoft.com/", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development", + "ASPNETCORE_URLS": "http://localhost:5130" + }, + "distributionName": "" + } + } +} \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/README.md b/dotnet/samples/04.ai.f.vision.cardMaster/README.md new file mode 100644 index 000000000..36bfceca2 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/README.md @@ -0,0 +1,72 @@ +# Card Gazer Bot + +This is a conversational bot for Microsoft Teams with AI Vision support that is able to generate Adaptive Cards from uploaded images using gpt-4-vision-preview. +This sample illustrates more complex conversational bot behavior in Microsoft Teams. The bot is built to allow GPT to facilitate the conversation on its behalf as well as manually defined responses, and maps user intents to user defined actions. + + +## Set up instructions + +All the samples for the C# .NET SDK can be set up in the same way: You can find step by step instructions here: + [Setup Instructions](../README.md). + +Note that, this sample requires AI service so you need one more pre-step before Local Debug (F5). + +1. Set your Azure OpenAI related settings to *appsettings.Development.json*. + + ```json + "Azure": { + "OpenAIApiKey": "", + "OpenAIEndpoint": "" + } + ``` + +## Interacting with the Bot + +You can interact with this bot by sending it a message with an image or a doodle. Be sure to add a message like "Turn this image into an Adaptive Card". As an example, you can use the image included in the `./assets`` folder. Large resolution images will not work due to the limitations of the AI model. + +## Deploy to Azure + +You can use Teams Toolkit for Visual Studio or CLI to host the bot in Azure. The sample includes Bicep templates in the `/infra` directory which are used by the tools to create resources in Azure. + +You can find deployment instructions [here](../README.md). + +Note that, this sample requires AI service so you need one more pre-step before deploy to Azure. To configure the Azure resources to have an environment variable for the Azure OpenAI Key and other settings: + +1. In `./env/.env.dev.user` file, paste your Azure OpenAI related variables. + + ```bash + SECRET_AZURE_OPENAI_API_KEY= + SECRET_AZURE_OPENAI_ENDPOINT= + ``` + +The SECRET_ prefix is a convention used by Teams Toolkit to mask the value in any logging output and is optional. + +## Use OpenAI + +Above steps use Azure OpenAI as AI service, optionally, you can also use OpenAI as AI service. + +**As prerequisites** + +1. Prepare your own OpenAI service. +1. Modify source code `Program.cs`, comment out the "*#Use Azure OpenAI*" part, and uncomment the "*#Use OpenAI*" part. + +**For Local Debug (F5) with Teams Toolkit for Visual Studio** + +1. Set your [OpenAI API Key](https://openai.com/api/) to *appsettings.Development.json*. + + ```json + "OpenAI": { + "ApiKey": "" + }, + ``` + +**For Deploy to Azure with Teams Toolkit for Visual Studio** + +To configure the Azure resources to have OpenAI environment variables: + +1. In `./env/.env.dev.user` file, paste your [OpenAI API Key](https://openai.com/api/) to the environment variable `SECRET_OPENAI_KEY=`. + +## Further reading + +- [Teams Toolkit overview](https://aka.ms/vs-teams-toolkit-getting-started) +- [How Microsoft Teams bots work](https://learn.microsoft.com/azure/bot-service/bot-builder-basics-teams?view=azure-bot-service-4.0&tabs=csharp) \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/ResponseGenerator.cs b/dotnet/samples/04.ai.f.vision.cardMaster/ResponseGenerator.cs new file mode 100644 index 000000000..2d165624d --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/ResponseGenerator.cs @@ -0,0 +1,22 @@ +namespace CardGazer +{ + public static class ResponseGenerator + { + // Returns a friendly response for an unknown action + public static string UnknownAction(string action) + { + return GetRandomResponse(new string[] + { + $"I'm sorry, I'm not sure how to {action}.", + $"I don't know the first thing about {action}.", + $"I'm not sure I'm the best person to help with {action}.", + $"I'm still learning about {action}, but I'll try my best.", + $"I'm afraid I'm not experienced enough with {action}." + }); + } + + // Returns a random response from an array of responses + private static string GetRandomResponse(string[] responses) => + responses[Random.Shared.Next(responses.Length)]; + } +} diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/color.png b/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/color.png new file mode 100644 index 000000000..4ab158588 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/color.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67c7c063ba4dc41c977080c1f1fa17c897e1c72ec4a6412ed5e681b5d4cb9680 +size 1066 diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/manifest.json b/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/manifest.json new file mode 100644 index 000000000..b430067b7 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/manifest.json @@ -0,0 +1,48 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/teams/v1.15/MicrosoftTeams.schema.json", + "version": "1.1.0", + "manifestVersion": "1.15", + "id": "${{TEAMS_APP_ID}}", + "packageName": "com.package.name", + "name": { + "short": "CardGazer-${{TEAMSFX_ENV}}", + "full": "Teams Card Gazer" + }, + "developer": { + "name": "CardGazer", + "mpnId": "", + "websiteUrl": "https://microsoft.com", + "privacyUrl": "https://privacy.microsoft.com/privacystatement", + "termsOfUseUrl": "https://www.microsoft.com/legal/terms-of-use" + }, + "description": { + "short": "A vision enabled bot capable of generating Adaptive Cards using uploaded images.", + "full": "A vision enabled bot capable of generating Adaptive Cards using uploaded images." + }, + "icons": { + "outline": "outline.png", + "color": "color.png" + }, + "accentColor": "#FFFFFF", + "staticTabs": [ + { + "entityId": "conversations", + "scopes": [ "personal" ] + }, + { + "entityId": "about", + "scopes": [ "personal" ] + } + ], + "bots": [ + { + "botId": "${{BOT_ID}}", + "scopes": [ "personal", "team", "groupChat" ], + "isNotificationOnly": false, + "supportsCalling": false, + "supportsVideo": false, + "supportsFiles": false + } + ], + "validDomains": [] +} \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/outline.png b/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/outline.png new file mode 100644 index 000000000..458549f6d --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/appPackage/outline.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b1ddc76f79027d9c0300689721649ce1f1950271a5fc4ca50ae56545228fb566 +size 249 diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/appsettings.Development.json b/dotnet/samples/04.ai.f.vision.cardMaster/appsettings.Development.json new file mode 100644 index 000000000..94bdaee2e --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/appsettings.Development.json @@ -0,0 +1,20 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft": "Information", + "Microsoft.Hosting.Lifetime": "Information", + "Microsoft.Teams.AI": "Trace" + } + }, + "AllowedHosts": "*", + "BOT_ID": "", + "BOT_PASSWORD": "", + "Azure": { + "OpenAIApiKey": "", + "OpenAIEndpoint": "" + }, + "OpenAI": { + "ApiKey": "" + } +} \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/appsettings.json b/dotnet/samples/04.ai.f.vision.cardMaster/appsettings.json new file mode 100644 index 000000000..12a253487 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/appsettings.json @@ -0,0 +1,19 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + }, + "AllowedHosts": "*", + "BOT_ID": "$botId$", + "BOT_PASSWORD": "$bot-password$", + "Azure": { + "OpenAIApiKey": "", + "OpenAIEndpoint": "" + }, + "OpenAI": { + "ApiKey": "" + } +} \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/assets/card.png b/dotnet/samples/04.ai.f.vision.cardMaster/assets/card.png new file mode 100644 index 000000000..461e6e37d --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/assets/card.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2b7a6b14a10e669d1da3339061c2d4807894cd348b0a39cffe7a13001553696f +size 45664 diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/env/.env.dev b/dotnet/samples/04.ai.f.vision.cardMaster/env/.env.dev new file mode 100644 index 000000000..f6ae1bd30 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/env/.env.dev @@ -0,0 +1,16 @@ +# This file includes environment variables that will be committed to git by default. + +# Built-in environment variables +TEAMSFX_ENV=dev + +# Updating AZURE_SUBSCRIPTION_ID or AZURE_RESOURCE_GROUP_NAME after provision may also require an update to RESOURCE_SUFFIX, because some services require a globally unique name across subscriptions/resource groups. +AZURE_SUBSCRIPTION_ID= +AZURE_RESOURCE_GROUP_NAME= +RESOURCE_SUFFIX= + +# Generated during provision, you can also add your own variables. +BOT_ID= +TEAMS_APP_ID= +BOT_AZURE_APP_SERVICE_RESOURCE_ID= +BOT_DOMAIN= +TEAMS_APP_TENANT_ID= \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/infra/azure.bicep b/dotnet/samples/04.ai.f.vision.cardMaster/infra/azure.bicep new file mode 100644 index 000000000..b84c95e19 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/infra/azure.bicep @@ -0,0 +1,103 @@ +@maxLength(20) +@minLength(4) +@description('Used to generate names for all resources in this file') +param resourceBaseName string + +@description('Required when create Azure Bot service') +param botAadAppClientId string + +@secure() +@description('Required by Bot Framework package in your bot project') +param botAadAppClientSecret string + +@secure() +@description('The OpenAI API Key to be added to App Service Settings') +param openAIApiKey string + +@secure() +@description('The Azure OpenAI API Key to be added to App Service Settings') +param azureOpenAIApiKey string + +@secure() +@description('The Azure OpenAI Endpoint to be added to App Service Settings') +param azureOpenAIEndpoint string + +param webAppSKU string + +@maxLength(42) +param botDisplayName string + +param serverfarmsName string = resourceBaseName +param webAppName string = resourceBaseName +param location string = resourceGroup().location + +// Compute resources for your Web App +resource serverfarm 'Microsoft.Web/serverfarms@2021-02-01' = { + kind: 'app' + location: location + name: serverfarmsName + sku: { + name: webAppSKU + } +} + +// Web App that hosts your bot +resource webApp 'Microsoft.Web/sites@2021-02-01' = { + kind: 'app' + location: location + name: webAppName + properties: { + serverFarmId: serverfarm.id + httpsOnly: true + siteConfig: { + alwaysOn: true + appSettings: [ + { + name: 'WEBSITE_RUN_FROM_PACKAGE' + value: '1' // Run Azure APP Service from a package file + } + { + name: 'RUNNING_ON_AZURE' + value: '1' + } + { + name: 'BOT_ID' + value: botAadAppClientId + } + { + name: 'BOT_PASSWORD' + value: botAadAppClientSecret + } + // ASP.NET Core treats double underscore (__) as colon (:) to support hierarchical keys + { + name: 'OpenAI__ApiKey' + value: openAIApiKey + } + { + name: 'Azure__OpenAIApiKey' + value: azureOpenAIApiKey + } + { + name: 'Azure__OpenAIEndpoint' + value: azureOpenAIEndpoint + } + ] + ftpsState: 'FtpsOnly' + } + } +} + +// Register your web service as a bot with the Bot Framework +module azureBotRegistration './botRegistration/azurebot.bicep' = { + name: 'Azure-Bot-registration' + params: { + resourceBaseName: resourceBaseName + botAadAppClientId: botAadAppClientId + botAppDomain: webApp.properties.defaultHostName + botDisplayName: botDisplayName + } +} + +// The output will be persisted in .env.{envName}. Visit https://aka.ms/teamsfx-actions/arm-deploy for more details. +output BOT_AZURE_APP_SERVICE_RESOURCE_ID string = webApp.id +output BOT_DOMAIN string = webApp.properties.defaultHostName diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/infra/azure.parameters.json b/dotnet/samples/04.ai.f.vision.cardMaster/infra/azure.parameters.json new file mode 100644 index 000000000..16a96563b --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/infra/azure.parameters.json @@ -0,0 +1,30 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "resourceBaseName": { + "value": "bot${{RESOURCE_SUFFIX}}" + }, + "botAadAppClientId": { + "value": "${{BOT_ID}}" + }, + "botAadAppClientSecret": { + "value": "${{SECRET_BOT_PASSWORD}}" + }, + "webAppSKU": { + "value": "B1" + }, + "botDisplayName": { + "value": "CardGazerBot" + }, + "openAIApiKey": { + "value": "${{SECRET_OPENAI_API_KEY}}" + }, + "azureOpenAIApiKey": { + "value": "${{SECRET_AZURE_OPENAI_API_KEY}}" + }, + "azureOpenAIEndpoint": { + "value": "${{SECRET_AZURE_OPENAI_ENDPOINT}}" + } + } + } \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/infra/botRegistration/azurebot.bicep b/dotnet/samples/04.ai.f.vision.cardMaster/infra/botRegistration/azurebot.bicep new file mode 100644 index 000000000..ab67c7a56 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/infra/botRegistration/azurebot.bicep @@ -0,0 +1,37 @@ +@maxLength(20) +@minLength(4) +@description('Used to generate names for all resources in this file') +param resourceBaseName string + +@maxLength(42) +param botDisplayName string + +param botServiceName string = resourceBaseName +param botServiceSku string = 'F0' +param botAadAppClientId string +param botAppDomain string + +// Register your web service as a bot with the Bot Framework +resource botService 'Microsoft.BotService/botServices@2021-03-01' = { + kind: 'azurebot' + location: 'global' + name: botServiceName + properties: { + displayName: botDisplayName + endpoint: 'https://${botAppDomain}/api/messages' + msaAppId: botAadAppClientId + } + sku: { + name: botServiceSku + } +} + +// Connect the bot service to Microsoft Teams +resource botServiceMsTeamsChannel 'Microsoft.BotService/botServices/channels@2021-03-01' = { + parent: botService + location: 'global' + name: 'MsTeamsChannel' + properties: { + channelName: 'MsTeamsChannel' + } +} diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/infra/botRegistration/readme.md b/dotnet/samples/04.ai.f.vision.cardMaster/infra/botRegistration/readme.md new file mode 100644 index 000000000..d5416243c --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/infra/botRegistration/readme.md @@ -0,0 +1 @@ +The `azurebot.bicep` module is provided to help you create Azure Bot service when you don't use Azure to host your app. If you use Azure as infrastrcture for your app, `azure.bicep` under infra folder already leverages this module to create Azure Bot service for you. You don't need to deploy `azurebot.bicep` again. \ No newline at end of file diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/teamsapp.local.yml b/dotnet/samples/04.ai.f.vision.cardMaster/teamsapp.local.yml new file mode 100644 index 000000000..4ad526618 --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/teamsapp.local.yml @@ -0,0 +1,86 @@ +# yaml-language-server: $schema=https://aka.ms/teams-toolkit/1.1.0/yaml.schema.json +# Visit https://aka.ms/teamsfx-v5.0-guide for details on this file +# Visit https://aka.ms/teamsfx-actions for details on actions +version: 1.1.0 + +provision: + # Creates a Teams app + - uses: teamsApp/create + with: + # Teams app name + name: CardGazerBot-${{TEAMSFX_ENV}} + # Write the information of created resources into environment file for + # the specified environment variable(s). + writeToEnvironmentFile: + teamsAppId: TEAMS_APP_ID + + # Create or reuse an existing Azure Active Directory application for bot. + - uses: botAadApp/create + with: + # The Azure Active Directory application's display name + name: CardGazerBot-${{TEAMSFX_ENV}} + writeToEnvironmentFile: + # The Azure Active Directory application's client id created for bot. + botId: BOT_ID + # The Azure Active Directory application's client secret created for bot. + botPassword: SECRET_BOT_PASSWORD + + # Generate runtime appsettings to JSON file + - uses: file/createOrUpdateJsonFile + with: + target: ./appsettings.Development.json + content: + BOT_ID: ${{BOT_ID}} + BOT_PASSWORD: ${{SECRET_BOT_PASSWORD}} + + # Create or update the bot registration on dev.botframework.com + - uses: botFramework/create + with: + botId: ${{BOT_ID}} + name: CardGazerBot + messagingEndpoint: ${{BOT_ENDPOINT}}/api/messages + description: "" + channels: + - name: msteams + + # Validate using manifest schema + - uses: teamsApp/validateManifest + with: + # Path to manifest template + manifestPath: ./appPackage/manifest.json + # Build Teams app package with latest env value + - uses: teamsApp/zipAppPackage + with: + # Path to manifest template + manifestPath: ./appPackage/manifest.json + outputZipPath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + outputJsonPath: ./appPackage/build/manifest.${{TEAMSFX_ENV}}.json + # Validate app package using validation rules + - uses: teamsApp/validateAppPackage + with: + # Relative path to this file. This is the path for built zip file. + appPackagePath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + + # Apply the Teams app manifest to an existing Teams app in + # Teams Developer Portal. + # Will use the app id in manifest file to determine which Teams app to update. + - uses: teamsApp/update + with: + # Relative path to this file. This is the path for built zip file. + appPackagePath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + + # Create or update debug profile in lauchsettings file + - uses: file/createOrUpdateJsonFile + with: + target: ./Properties/launchSettings.json + content: + profiles: + Microsoft Teams (browser): + commandName: "Project" + dotnetRunMessages: true + launchBrowser: true + launchUrl: "https://teams.microsoft.com/l/app/${{TEAMS_APP_ID}}?installAppPackage=true&webjoin=true&appTenantId=${{TEAMS_APP_TENANT_ID}}&login_hint=${{TEAMSFX_M365_USER_NAME}}" + applicationUrl: "http://localhost:5130" + environmentVariables: + ASPNETCORE_ENVIRONMENT: "Development" + hotReloadProfile: "aspnetcore" diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/teamsapp.yml b/dotnet/samples/04.ai.f.vision.cardMaster/teamsapp.yml new file mode 100644 index 000000000..3ad69f3bf --- /dev/null +++ b/dotnet/samples/04.ai.f.vision.cardMaster/teamsapp.yml @@ -0,0 +1,97 @@ +# yaml-language-server: $schema=https://aka.ms/teams-toolkit/1.1.0/yaml.schema.json +# Visit https://aka.ms/teamsfx-v5.0-guide for details on this file +# Visit https://aka.ms/teamsfx-actions for details on actions +version: 1.1.0 + +environmentFolderPath: ./env + +# Triggered when 'teamsfx provision' is executed +provision: + # Creates a Teams app + - uses: teamsApp/create + with: + # Teams app name + name: CardGazerBot-${{TEAMSFX_ENV}} + # Write the information of created resources into environment file for + # the specified environment variable(s). + writeToEnvironmentFile: + teamsAppId: TEAMS_APP_ID + + # Create or reuse an existing Azure Active Directory application for bot. + - uses: botAadApp/create + with: + # The Azure Active Directory application's display name + name: CardGazerBot-${{TEAMSFX_ENV}} + writeToEnvironmentFile: + # The Azure Active Directory application's client id created for bot. + botId: BOT_ID + # The Azure Active Directory application's client secret created for bot. + botPassword: SECRET_BOT_PASSWORD + + - uses: arm/deploy # Deploy given ARM templates parallelly. + with: + # AZURE_SUBSCRIPTION_ID is a built-in environment variable, + # if its value is empty, TeamsFx will prompt you to select a subscription. + # Referencing other environment variables with empty values + # will skip the subscription selection prompt. + subscriptionId: ${{AZURE_SUBSCRIPTION_ID}} + # AZURE_RESOURCE_GROUP_NAME is a built-in environment variable, + # if its value is empty, TeamsFx will prompt you to select or create one + # resource group. + # Referencing other environment variables with empty values + # will skip the resource group selection prompt. + resourceGroupName: ${{AZURE_RESOURCE_GROUP_NAME}} + templates: + - path: ./infra/azure.bicep # Relative path to this file + # Relative path to this yaml file. + # Placeholders will be replaced with corresponding environment + # variable before ARM deployment. + parameters: ./infra/azure.parameters.json + # Required when deploying ARM template + deploymentName: Create-resources-for-tab + # Teams Toolkit will download this bicep CLI version from github for you, + # will use bicep CLI in PATH if you remove this config. + bicepCliVersion: v0.9.1 + + # Validate using manifest schema + - uses: teamsApp/validateManifest + with: + # Path to manifest template + manifestPath: ./appPackage/manifest.json + # Build Teams app package with latest env value + - uses: teamsApp/zipAppPackage + with: + # Path to manifest template + manifestPath: ./appPackage/manifest.json + outputZipPath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + outputJsonPath: ./appPackage/build/manifest.${{TEAMSFX_ENV}}.json + # Validate app package using validation rules + - uses: teamsApp/validateAppPackage + with: + # Relative path to this file. This is the path for built zip file. + appPackagePath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + # Apply the Teams app manifest to an existing Teams app in + # Teams Developer Portal. + # Will use the app id in manifest file to determine which Teams app to update. + - uses: teamsApp/update + with: + # Relative path to this file. This is the path for built zip file. + appPackagePath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + +# Triggered when 'teamsfx deploy' is executed +deploy: + - uses: cli/runDotnetCommand + with: + args: publish --configuration Release --runtime win-x86 --self-contained + # Deploy your application to Azure App Service using the zip deploy feature. + # For additional details, refer to https://aka.ms/zip-deploy-to-app-services. + - uses: azureAppService/zipDeploy + with: + # deploy base folder + artifactFolder: bin/Release/net6.0/win-x86/publish + # The resource id of the cloud resource to be deployed to. + # This key will be generated by arm/deploy action automatically. + # You can replace it with your existing Azure Resource id + # or add it to your environment variable file. + resourceId: ${{BOT_AZURE_APP_SERVICE_RESOURCE_ID}} +projectId: bfb7ae21-7937-436f-9efd-c746df935101