From a404326ab81e8954e7d29b223efdc36b42e698f5 Mon Sep 17 00:00:00 2001 From: Sam Brenner Date: Thu, 13 Feb 2025 14:12:40 -0500 Subject: [PATCH] re-enable tests --- .../test/integration-test/client.spec.js | 8 ++--- .../test/integration-test/server.mjs | 14 ++++++-- .../test/integration-test/client.spec.js | 9 +++-- .../test/integration-test/server.mjs | 33 +++++++++++++------ 4 files changed, 45 insertions(+), 19 deletions(-) diff --git a/packages/datadog-plugin-langchain/test/integration-test/client.spec.js b/packages/datadog-plugin-langchain/test/integration-test/client.spec.js index bc505687115..7d8578210fc 100644 --- a/packages/datadog-plugin-langchain/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-langchain/test/integration-test/client.spec.js @@ -8,9 +8,7 @@ const { } = require('../../../../integration-tests/helpers') const { assert } = require('chai') -// there is currently an issue with langchain + esm loader hooks from IITM -// https://github.com/nodejs/import-in-the-middle/issues/163 -describe.skip('esm', () => { +describe('esm', () => { let agent let proc let sandbox @@ -47,7 +45,9 @@ describe.skip('esm', () => { assert.strictEqual(checkSpansForServiceName(payload, 'langchain.request'), true) }) - proc = await spawnPluginIntegrationTestProc(sandbox.folder, 'server.mjs', agent.port) + proc = await spawnPluginIntegrationTestProc(sandbox.folder, 'server.mjs', agent.port, null, { + NODE_OPTIONS: '--import dd-trace/register.js' + }) await res }).timeout(20000) diff --git a/packages/datadog-plugin-langchain/test/integration-test/server.mjs b/packages/datadog-plugin-langchain/test/integration-test/server.mjs index b929824b7dd..f0c1d92c5b0 100644 --- a/packages/datadog-plugin-langchain/test/integration-test/server.mjs +++ b/packages/datadog-plugin-langchain/test/integration-test/server.mjs @@ -1,11 +1,21 @@ import 'dd-trace/init.js' + import { OpenAI } from '@langchain/openai' import { StringOutputParser } from '@langchain/core/output_parsers' import nock from 'nock' nock('https://api.openai.com:443') .post('/v1/completions') - .reply(200, {}) + .reply(200, { + model: 'gpt-3.5-turbo-instruct', + choices: [{ + text: 'The answer is 4', + index: 0, + logprobs: null, + finish_reason: 'length' + }], + usage: { prompt_tokens: 8, completion_tokens: 12, otal_tokens: 20 } + }) const llm = new OpenAI({ apiKey: '' @@ -15,4 +25,4 @@ const parser = new StringOutputParser() const chain = llm.pipe(parser) -await chain.invoke('a test') +await chain.invoke('what is 2 + 2?') diff --git a/packages/datadog-plugin-openai/test/integration-test/client.spec.js b/packages/datadog-plugin-openai/test/integration-test/client.spec.js index 41a55eaf09d..aca18d9c617 100644 --- a/packages/datadog-plugin-openai/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-openai/test/integration-test/client.spec.js @@ -15,8 +15,9 @@ describe('esm', () => { let sandbox // limit v4 tests while the IITM issue is resolved or a workaround is introduced + // this is only relevant for `openai` >=4.0 <=4.1 // issue link: https://github.com/DataDog/import-in-the-middle/issues/60 - withVersions('openai', 'openai', '>=3 <4', version => { + withVersions('openai', 'openai', '>=3 <4.0.0 || >4.1.0', version => { before(async function () { this.timeout(20000) sandbox = await createSandbox([`'openai@${version}'`, 'nock'], false, [ @@ -43,9 +44,11 @@ describe('esm', () => { assert.strictEqual(checkSpansForServiceName(payload, 'openai.request'), true) }) - proc = await spawnPluginIntegrationTestProc(sandbox.folder, 'server.mjs', agent.port) + proc = await spawnPluginIntegrationTestProc(sandbox.folder, 'server.mjs', agent.port, null, { + NODE_OPTIONS: '--import dd-trace/register.js' + }) await res - }).timeout(20000) + }).timeout(5000) }) }) diff --git a/packages/datadog-plugin-openai/test/integration-test/server.mjs b/packages/datadog-plugin-openai/test/integration-test/server.mjs index 0b47fb8cc82..2f8bbc03746 100644 --- a/packages/datadog-plugin-openai/test/integration-test/server.mjs +++ b/packages/datadog-plugin-openai/test/integration-test/server.mjs @@ -1,18 +1,31 @@ import 'dd-trace/init.js' -import openai from 'openai' +import OpenAI from 'openai' import nock from 'nock' nock('https://api.openai.com:443') .post('/v1/completions') .reply(200, {}) -const openaiApp = new openai.OpenAIApi(new openai.Configuration({ - apiKey: 'sk-DATADOG-ACCEPTANCE-TESTS' -})) +if (OpenAI.OpenAIApi) { + const openaiApp = new OpenAI.OpenAIApi(new OpenAI.Configuration({ + apiKey: 'sk-DATADOG-ACCEPTANCE-TESTS' + })) -await openaiApp.createCompletion({ - model: 'text-davinci-002', - prompt: 'Hello, ', - suffix: 'foo', - stream: true -}) + await openaiApp.createCompletion({ + model: 'text-davinci-002', + prompt: 'Hello, ', + suffix: 'foo', + stream: true + }) +} else { + const client = new OpenAI({ + apiKey: 'sk-DATADOG-ACCEPTANCE-TESTS' + }) + + await client.completions.create({ + model: 'text-davinci-002', + prompt: 'Hello, ', + suffix: 'foo', + stream: false + }) +}