diff --git a/e2e/scenarios/anthropic-instrumentation/scenario.anthropic-v0273.mjs b/e2e/scenarios/anthropic-instrumentation/scenario.anthropic-v0273.mjs index f33a4f98..65f94765 100644 --- a/e2e/scenarios/anthropic-instrumentation/scenario.anthropic-v0273.mjs +++ b/e2e/scenarios/anthropic-instrumentation/scenario.anthropic-v0273.mjs @@ -3,5 +3,8 @@ import { runMain } from "../../helpers/provider-runtime.mjs"; import { runAutoAnthropicInstrumentation } from "./scenario.impl.mjs"; runMain(async () => - runAutoAnthropicInstrumentation(Anthropic, { useBetaMessages: false }), + runAutoAnthropicInstrumentation(Anthropic, { + expectStreamWithResponse: false, + useBetaMessages: false, + }), ); diff --git a/e2e/scenarios/anthropic-instrumentation/scenario.anthropic-v0273.ts b/e2e/scenarios/anthropic-instrumentation/scenario.anthropic-v0273.ts index f6d3267e..4c75349d 100644 --- a/e2e/scenarios/anthropic-instrumentation/scenario.anthropic-v0273.ts +++ b/e2e/scenarios/anthropic-instrumentation/scenario.anthropic-v0273.ts @@ -3,5 +3,8 @@ import { runMain } from "../../helpers/scenario-runtime"; import { runWrappedAnthropicInstrumentation } from "./scenario.impl.mjs"; runMain(async () => - runWrappedAnthropicInstrumentation(Anthropic, { useBetaMessages: false }), + runWrappedAnthropicInstrumentation(Anthropic, { + expectStreamWithResponse: false, + useBetaMessages: false, + }), ); diff --git a/e2e/scenarios/anthropic-instrumentation/scenario.impl.mjs b/e2e/scenarios/anthropic-instrumentation/scenario.impl.mjs index a5563296..6d3eb714 100644 --- a/e2e/scenarios/anthropic-instrumentation/scenario.impl.mjs +++ b/e2e/scenarios/anthropic-instrumentation/scenario.impl.mjs @@ -28,8 +28,8 @@ async function runAnthropicInstrumentationScenario( Anthropic, { decorateClient, + expectStreamWithResponse = true, useBetaMessages = true, - useMessagesStreamHelper = true, } = {}, ) { const imageBase64 = ( @@ -123,33 +123,32 @@ async function runAnthropicInstrumentationScenario( "anthropic-stream-with-response-operation", "stream-with-response", async () => { - const stream = - useMessagesStreamHelper === false - ? await client.messages.create({ - model: ANTHROPIC_MODEL, - max_tokens: 32, - temperature: 0, - stream: true, - messages: [ - { - role: "user", - content: - "Count from 1 to 3 and include the words one two three.", - }, - ], - }) - : client.messages.stream({ - model: ANTHROPIC_MODEL, - max_tokens: 32, - temperature: 0, - messages: [ - { - role: "user", - content: - "Count from 1 to 3 and include the words one two three.", - }, - ], - }); + const stream = client.messages.stream({ + model: ANTHROPIC_MODEL, + max_tokens: 32, + temperature: 0, + messages: [ + { + role: "user", + content: + "Count from 1 to 3 and include the words one two three.", + }, + ], + }); + + if (expectStreamWithResponse) { + if (typeof stream.withResponse !== "function") { + throw new Error( + "Expected messages.stream() to expose withResponse()", + ); + } + await stream.withResponse(); + } else if (typeof stream.withResponse === "function") { + throw new Error( + "Expected messages.stream() to not expose withResponse()", + ); + } + await collectAsync(stream); }, ); @@ -251,7 +250,6 @@ export async function runWrappedAnthropicInstrumentation(Anthropic, options) { export async function runAutoAnthropicInstrumentation(Anthropic, options) { await runAnthropicInstrumentationScenario(Anthropic, { ...options, - useMessagesStreamHelper: false, }); } diff --git a/e2e/scenarios/openai-instrumentation/scenario.impl.mjs b/e2e/scenarios/openai-instrumentation/scenario.impl.mjs index 5383462a..58fb0577 100644 --- a/e2e/scenarios/openai-instrumentation/scenario.impl.mjs +++ b/e2e/scenarios/openai-instrumentation/scenario.impl.mjs @@ -34,16 +34,6 @@ async function collectOneAndReturn(stream) { } } -async function awaitMaybeWithResponse(request) { - if (typeof request?.withResponse === "function") { - return await request.withResponse(); - } - - return { - data: await request, - }; -} - export async function runOpenAIInstrumentationScenario(options) { const baseClient = new options.OpenAI({ apiKey: process.env.OPENAI_API_KEY, @@ -68,14 +58,14 @@ export async function runOpenAIInstrumentationScenario(options) { "openai-chat-with-response-operation", "chat-with-response", async () => { - await awaitMaybeWithResponse( - client.chat.completions.create({ + await client.chat.completions + .create({ model: OPENAI_MODEL, messages: [{ role: "user", content: "Reply with exactly FOUR." }], max_tokens: 8, temperature: 0, - }), - ); + }) + .withResponse(); }, ); @@ -97,8 +87,8 @@ export async function runOpenAIInstrumentationScenario(options) { "openai-stream-with-response-operation", "stream-with-response", async () => { - const { data: chatStream } = await awaitMaybeWithResponse( - client.chat.completions.create({ + const { data: chatStream } = await client.chat.completions + .create({ model: OPENAI_MODEL, messages: [ { @@ -112,8 +102,8 @@ export async function runOpenAIInstrumentationScenario(options) { stream_options: { include_usage: true, }, - }), - ); + }) + .withResponse(); await collectAsync(chatStream); }, ); @@ -210,13 +200,13 @@ export async function runOpenAIInstrumentationScenario(options) { "openai-responses-with-response-operation", "responses-with-response", async () => { - await awaitMaybeWithResponse( - client.responses.create({ + await client.responses + .create({ model: OPENAI_MODEL, input: "What is 2 + 2? Reply with just the number.", max_output_tokens: 16, - }), - ); + }) + .withResponse(); }, ); @@ -224,14 +214,14 @@ export async function runOpenAIInstrumentationScenario(options) { "openai-responses-create-stream-operation", "responses-create-stream", async () => { - const { data: responseStream } = await awaitMaybeWithResponse( - client.responses.create({ + const { data: responseStream } = await client.responses + .create({ model: OPENAI_MODEL, input: "Reply with exactly RESPONSE STREAM.", max_output_tokens: 16, stream: true, - }), - ); + }) + .withResponse(); await collectAsync(responseStream); }, ); diff --git a/js/src/auto-instrumentations/patch-tracing-channel.test.ts b/js/src/auto-instrumentations/patch-tracing-channel.test.ts index a78b9e08..897f8a76 100644 --- a/js/src/auto-instrumentations/patch-tracing-channel.test.ts +++ b/js/src/auto-instrumentations/patch-tracing-channel.test.ts @@ -201,6 +201,58 @@ describe("patchTracingChannel", () => { expect(withResponse.response.ok).toBe(true); }); + it("patched tracePromise preserves helper methods on augmented native Promise instances", async () => { + const FakeTCClass = makeUnpatchedTracingChannel(); + const channel = new FakeTCClass(); + patchTracingChannel(() => channel); + + const nativePromise = Promise.resolve("hello") as Promise & { + withResponse: () => Promise<{ data: string; response: { ok: boolean } }>; + }; + nativePromise.withResponse = async () => ({ + data: await nativePromise, + response: { ok: true }, + }); + + const traced = channel.tracePromise(() => nativePromise, {}, null); + const withResponse = await traced.withResponse(); + + expect(traced).toBe(nativePromise); + expect(withResponse.data).toBe("hello"); + expect(withResponse.response.ok).toBe(true); + }); + + it("patched tracePromise preserves helper methods on prototype-augmented native Promise instances", async () => { + const FakeTCClass = makeUnpatchedTracingChannel(); + const channel = new FakeTCClass(); + patchTracingChannel(() => channel); + + const nativePromise = Promise.resolve("hello"); + const augmentedProto = Object.create( + Promise.prototype, + ) as Promise & { + withResponse: () => Promise<{ data: string; response: { ok: boolean } }>; + }; + + augmentedProto.withResponse = async function () { + const data = await this; + return { data, response: { ok: true } }; + }; + + Object.setPrototypeOf(nativePromise, augmentedProto); + + const traced = channel.tracePromise( + () => nativePromise, + {}, + null, + ) as typeof nativePromise & typeof augmentedProto; + const withResponse = await traced.withResponse(); + + expect(traced).toBe(nativePromise); + expect(withResponse.data).toBe("hello"); + expect(withResponse.response.ok).toBe(true); + }); + it("patched tracePromise correctly handles plain async functions", async () => { const FakeTCClass = makeUnpatchedTracingChannel(); const channel = new FakeTCClass(); diff --git a/js/src/auto-instrumentations/patch-tracing-channel.ts b/js/src/auto-instrumentations/patch-tracing-channel.ts index b7f71893..df07d40c 100644 --- a/js/src/auto-instrumentations/patch-tracing-channel.ts +++ b/js/src/auto-instrumentations/patch-tracing-channel.ts @@ -10,6 +10,15 @@ * and in configureNode/configureBrowser for the bundler plugin path. */ +function isPlainNativePromiseWithoutHelpers(result: Promise): boolean { + return ( + result.constructor === Promise && + Object.getPrototypeOf(result) === Promise.prototype && + Object.getOwnPropertyNames(result).length === 0 && + Object.getOwnPropertySymbols(result).length === 0 + ); +} + // eslint-disable-next-line @typescript-eslint/no-explicit-any export function patchTracingChannel( tracingChannelFn: (name: string) => any, @@ -77,7 +86,7 @@ export function patchTracingChannel( // established by bindStore — required for span context to propagate across awaits. // PATCHED: inside the callback, use duck-type thenable check instead of // PromisePrototypeThen, which triggers Symbol.species and breaks Promise subclasses - // like Anthropic's APIPromise that have non-standard constructors. + // like Anthropic's and Openai's APIPromise that have non-standard constructors. return start.runStores(context, () => { try { // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -89,13 +98,18 @@ export function patchTracingChannel( (typeof result === "object" || typeof result === "function") && typeof result.then === "function" ) { - if (result.constructor === Promise) { + if ( + // Return the Promise chain only for plain native Promises. + // Promise subclasses and prototype-augmented Promises must be + // returned as-is so SDK helper methods stay intact. + isPlainNativePromiseWithoutHelpers(result) + ) { return result.then( - (res) => { + (res: unknown) => { publishResolved(res); return res; }, - (err) => { + (err: unknown) => { publishRejected(err); return Promise.reject(err); },