From 14ccd915f4ef72f4a5a8209d6a85254f50619c2c Mon Sep 17 00:00:00 2001 From: Lily Du Date: Wed, 18 Sep 2024 11:42:24 -0700 Subject: [PATCH] fix testmodel class --- js/packages/teams-ai/src/models/TestModel.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/js/packages/teams-ai/src/models/TestModel.ts b/js/packages/teams-ai/src/models/TestModel.ts index 7bdc14718..9aad724cb 100644 --- a/js/packages/teams-ai/src/models/TestModel.ts +++ b/js/packages/teams-ai/src/models/TestModel.ts @@ -13,6 +13,7 @@ import { Tokenizer } from '../tokenizers'; import { TurnContext } from 'botbuilder'; import { Memory } from '../MemoryFork'; import EventEmitter from 'events'; +import { StreamingResponse } from '../StreamingResponse'; /** * A `PromptCompletionModel` used for testing. @@ -90,7 +91,8 @@ export class TestModel implements PromptCompletionModel { return new TestModel(async (model, context, memory, functions, tokenizer, template) => { model.events.emit('beforeCompletion', context, memory, functions, tokenizer, template, false); await new Promise((resolve) => setTimeout(resolve, delay)); - model.events.emit('responseReceived', context, memory, response); + const streamer = new StreamingResponse(context); + model.events.emit('responseReceived', context, memory, response, streamer); return response; }); } @@ -127,7 +129,8 @@ export class TestModel implements PromptCompletionModel { // Finalize the response. await new Promise((resolve) => setTimeout(resolve, delay)); const response: PromptResponse = { status: 'success', message: { role: 'assistant', content } }; - model.events.emit('responseReceived', context, memory, response); + const streamer = new StreamingResponse(context); + model.events.emit('responseReceived', context, memory, response, streamer); return response; }); }