Skip to content

Commit f386ded

Browse files
committed
Fix more centrally
1 parent de64af6 commit f386ded

File tree

4 files changed

+63
-61
lines changed

4 files changed

+63
-61
lines changed

e2e/scenarios/anthropic-instrumentation/scenario.impl.mjs

Lines changed: 14 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,7 @@ const WEATHER_TOOL = {
2626

2727
async function runAnthropicInstrumentationScenario(
2828
Anthropic,
29-
{
30-
decorateClient,
31-
useBetaMessages = true,
32-
useMessagesStreamHelper = true,
33-
} = {},
29+
{ decorateClient, useBetaMessages = true } = {},
3430
) {
3531
const imageBase64 = (
3632
await readFile(new URL("./test-image.png", import.meta.url))
@@ -123,33 +119,19 @@ async function runAnthropicInstrumentationScenario(
123119
"anthropic-stream-with-response-operation",
124120
"stream-with-response",
125121
async () => {
126-
const stream =
127-
useMessagesStreamHelper === false
128-
? await client.messages.create({
129-
model: ANTHROPIC_MODEL,
130-
max_tokens: 32,
131-
temperature: 0,
132-
stream: true,
133-
messages: [
134-
{
135-
role: "user",
136-
content:
137-
"Count from 1 to 3 and include the words one two three.",
138-
},
139-
],
140-
})
141-
: client.messages.stream({
142-
model: ANTHROPIC_MODEL,
143-
max_tokens: 32,
144-
temperature: 0,
145-
messages: [
146-
{
147-
role: "user",
148-
content:
149-
"Count from 1 to 3 and include the words one two three.",
150-
},
151-
],
152-
});
122+
const stream = client.messages.stream({
123+
model: ANTHROPIC_MODEL,
124+
max_tokens: 32,
125+
temperature: 0,
126+
messages: [
127+
{
128+
role: "user",
129+
content:
130+
"Count from 1 to 3 and include the words one two three.",
131+
},
132+
],
133+
});
134+
await stream.withResponse();
153135
await collectAsync(stream);
154136
},
155137
);
@@ -251,7 +233,6 @@ export async function runWrappedAnthropicInstrumentation(Anthropic, options) {
251233
export async function runAutoAnthropicInstrumentation(Anthropic, options) {
252234
await runAnthropicInstrumentationScenario(Anthropic, {
253235
...options,
254-
useMessagesStreamHelper: false,
255236
});
256237
}
257238

e2e/scenarios/openai-instrumentation/scenario.impl.mjs

Lines changed: 16 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -34,16 +34,6 @@ async function collectOneAndReturn(stream) {
3434
}
3535
}
3636

37-
async function awaitMaybeWithResponse(request) {
38-
if (typeof request?.withResponse === "function") {
39-
return await request.withResponse();
40-
}
41-
42-
return {
43-
data: await request,
44-
};
45-
}
46-
4737
export async function runOpenAIInstrumentationScenario(options) {
4838
const baseClient = new options.OpenAI({
4939
apiKey: process.env.OPENAI_API_KEY,
@@ -68,14 +58,14 @@ export async function runOpenAIInstrumentationScenario(options) {
6858
"openai-chat-with-response-operation",
6959
"chat-with-response",
7060
async () => {
71-
await awaitMaybeWithResponse(
72-
client.chat.completions.create({
61+
await client.chat.completions
62+
.create({
7363
model: OPENAI_MODEL,
7464
messages: [{ role: "user", content: "Reply with exactly FOUR." }],
7565
max_tokens: 8,
7666
temperature: 0,
77-
}),
78-
);
67+
})
68+
.withResponse();
7969
},
8070
);
8171

@@ -97,8 +87,8 @@ export async function runOpenAIInstrumentationScenario(options) {
9787
"openai-stream-with-response-operation",
9888
"stream-with-response",
9989
async () => {
100-
const { data: chatStream } = await awaitMaybeWithResponse(
101-
client.chat.completions.create({
90+
const { data: chatStream } = await client.chat.completions
91+
.create({
10292
model: OPENAI_MODEL,
10393
messages: [
10494
{
@@ -112,8 +102,8 @@ export async function runOpenAIInstrumentationScenario(options) {
112102
stream_options: {
113103
include_usage: true,
114104
},
115-
}),
116-
);
105+
})
106+
.withResponse();
117107
await collectAsync(chatStream);
118108
},
119109
);
@@ -210,28 +200,28 @@ export async function runOpenAIInstrumentationScenario(options) {
210200
"openai-responses-with-response-operation",
211201
"responses-with-response",
212202
async () => {
213-
await awaitMaybeWithResponse(
214-
client.responses.create({
203+
await client.responses
204+
.create({
215205
model: OPENAI_MODEL,
216206
input: "What is 2 + 2? Reply with just the number.",
217207
max_output_tokens: 16,
218-
}),
219-
);
208+
})
209+
.withResponse();
220210
},
221211
);
222212

223213
await runOperation(
224214
"openai-responses-create-stream-operation",
225215
"responses-create-stream",
226216
async () => {
227-
const { data: responseStream } = await awaitMaybeWithResponse(
228-
client.responses.create({
217+
const { data: responseStream } = await client.responses
218+
.create({
229219
model: OPENAI_MODEL,
230220
input: "Reply with exactly RESPONSE STREAM.",
231221
max_output_tokens: 16,
232222
stream: true,
233-
}),
234-
);
223+
})
224+
.withResponse();
235225
await collectAsync(responseStream);
236226
},
237227
);

js/src/auto-instrumentations/patch-tracing-channel.test.ts

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,27 @@ describe("patchTracingChannel", () => {
201201
expect(withResponse.response.ok).toBe(true);
202202
});
203203

204+
it("patched tracePromise preserves helper methods on augmented native Promise instances", async () => {
205+
const FakeTCClass = makeUnpatchedTracingChannel();
206+
const channel = new FakeTCClass();
207+
patchTracingChannel(() => channel);
208+
209+
const nativePromise = Promise.resolve("hello") as Promise<string> & {
210+
withResponse: () => Promise<{ data: string; response: { ok: boolean } }>;
211+
};
212+
nativePromise.withResponse = async () => ({
213+
data: await nativePromise,
214+
response: { ok: true },
215+
});
216+
217+
const traced = channel.tracePromise(() => nativePromise, {}, null);
218+
const withResponse = await traced.withResponse();
219+
220+
expect(traced).toBe(nativePromise);
221+
expect(withResponse.data).toBe("hello");
222+
expect(withResponse.response.ok).toBe(true);
223+
});
224+
204225
it("patched tracePromise correctly handles plain async functions", async () => {
205226
const FakeTCClass = makeUnpatchedTracingChannel();
206227
const channel = new FakeTCClass();

js/src/auto-instrumentations/patch-tracing-channel.ts

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,10 @@
1010
* and in configureNode/configureBrowser for the bundler plugin path.
1111
*/
1212

13+
function hasOwnPromiseAugmentations(promise: Promise<unknown>): boolean {
14+
return;
15+
}
16+
1317
// eslint-disable-next-line @typescript-eslint/no-explicit-any
1418
export function patchTracingChannel(
1519
tracingChannelFn: (name: string) => any,
@@ -77,7 +81,7 @@ export function patchTracingChannel(
7781
// established by bindStore — required for span context to propagate across awaits.
7882
// PATCHED: inside the callback, use duck-type thenable check instead of
7983
// PromisePrototypeThen, which triggers Symbol.species and breaks Promise subclasses
80-
// like Anthropic's APIPromise that have non-standard constructors.
84+
// like Anthropic's and Openai's APIPromise that have non-standard constructors.
8185
return start.runStores(context, () => {
8286
try {
8387
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@@ -89,7 +93,13 @@ export function patchTracingChannel(
8993
(typeof result === "object" || typeof result === "function") &&
9094
typeof result.then === "function"
9195
) {
92-
if (result.constructor === Promise) {
96+
if (
97+
// We only want to return the Promise chain when it's an actual
98+
// promise and also doesn't have any additional fields
99+
result.constructor === Promise &&
100+
Object.getOwnPropertyNames(result).length === 0 &&
101+
Object.getOwnPropertySymbols(result).length === 0
102+
) {
93103
return result.then(
94104
(res) => {
95105
publishResolved(res);

0 commit comments

Comments
 (0)