Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .size-limit.js
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ module.exports = [
path: createCDNPath('bundle.tracing.replay.logs.metrics.min.js'),
gzip: false,
brotli: false,
limit: '250 KB',
limit: '251 KB',
},
{
name: 'CDN Bundle (incl. Tracing, Replay, Feedback) - uncompressed',
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import * as Sentry from '@sentry/node';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
dsn: 'https://public@dsn.ingest.sentry.io/1337',
release: '1.0',
tracesSampleRate: 1.0,
sendDefaultPii: false,
transport: loggingTransport,
integrations: [
Sentry.anthropicAIIntegration({
recordInputs: true,
recordOutputs: true,
enableTruncation: false,
}),
],
beforeSendTransaction: event => {
// Filter out mock express server transactions
if (event.transaction.includes('/anthropic/v1/')) {
return null;
}
return event;
},
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import { instrumentAnthropicAiClient } from '@sentry/core';
import * as Sentry from '@sentry/node';

class MockAnthropic {
constructor(config) {
this.apiKey = config.apiKey;
this.messages = {
create: this._messagesCreate.bind(this),
};
}

async _messagesCreate(params) {
await new Promise(resolve => setTimeout(resolve, 10));
return {
id: 'msg-no-truncation-test',
type: 'message',
role: 'assistant',
content: [{ type: 'text', text: 'Response' }],
model: params.model,
stop_reason: 'end_turn',
stop_sequence: null,
usage: { input_tokens: 10, output_tokens: 5 },
};
}
}

async function run() {
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const mockClient = new MockAnthropic({ apiKey: 'mock-api-key' });
const client = instrumentAnthropicAiClient(mockClient, { enableTruncation: false, recordInputs: true });

// Long array messages (would normally be truncated)
const longContent = 'A'.repeat(50_000);
await client.messages.create({
model: 'claude-3-haiku-20240307',
max_tokens: 100,
messages: [{ role: 'user', content: longContent }],
Comment thread
nicohrubec marked this conversation as resolved.
Outdated
});

// Long string input (messagesFromParams wraps it in an array)
const longStringInput = 'B'.repeat(50_000);
await client.messages.create({
model: 'claude-3-haiku-20240307',
max_tokens: 100,
input: longStringInput,
});
});
}

run();
Original file line number Diff line number Diff line change
Expand Up @@ -802,4 +802,42 @@ describe('Anthropic integration', () => {
});
},
);

const longContent = 'A'.repeat(50_000);
const longStringInput = 'B'.repeat(50_000);

const EXPECTED_TRANSACTION_NO_TRUNCATION = {
transaction: 'main',
spans: expect.arrayContaining([
// Long array messages should not be truncated
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_INPUT_MESSAGES_ATTRIBUTE]: JSON.stringify([{ role: 'user', content: longContent }]),
[GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE]: 1,
}),
}),
// Long string input should not be truncated (messagesFromParams wraps it in an array)
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_INPUT_MESSAGES_ATTRIBUTE]: JSON.stringify([longStringInput]),
[GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE]: 1,
}),
}),
]),
};

createEsmAndCjsTests(
__dirname,
'scenario-no-truncation.mjs',
'instrument-no-truncation.mjs',
(createRunner, test) => {
test('does not truncate input messages when enableTruncation is false', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_NO_TRUNCATION })
.start()
.completed();
});
},
);
});
10 changes: 5 additions & 5 deletions packages/core/src/tracing/anthropic-ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,9 @@ function extractRequestAttributes(args: unknown[], methodPath: string, operation
* Add private request attributes to spans.
* This is only recorded if recordInputs is true.
*/
function addPrivateRequestAttributes(span: Span, params: Record<string, unknown>): void {
function addPrivateRequestAttributes(span: Span, params: Record<string, unknown>, enableTruncation: boolean): void {
const messages = messagesFromParams(params);
setMessagesAttribute(span, messages);
setMessagesAttribute(span, messages, enableTruncation);

if ('prompt' in params) {
span.setAttributes({ [GEN_AI_PROMPT_ATTRIBUTE]: JSON.stringify(params.prompt) });
Expand Down Expand Up @@ -206,7 +206,7 @@ function handleStreamingRequest<T extends unknown[], R>(
originalResult = originalMethod.apply(context, args) as Promise<R>;

if (options.recordInputs && params) {
addPrivateRequestAttributes(span, params);
addPrivateRequestAttributes(span, params, options.enableTruncation ?? true);
}

return (async () => {
Expand All @@ -228,7 +228,7 @@ function handleStreamingRequest<T extends unknown[], R>(
return startSpanManual(spanConfig, span => {
try {
if (options.recordInputs && params) {
addPrivateRequestAttributes(span, params);
addPrivateRequestAttributes(span, params, options.enableTruncation ?? true);
}
const messageStream = target.apply(context, args);
return instrumentMessageStream(messageStream, span, options.recordOutputs ?? false);
Expand Down Expand Up @@ -289,7 +289,7 @@ function instrumentMethod<T extends unknown[], R>(
originalResult = target.apply(context, args) as Promise<R>;

if (options.recordInputs && params) {
addPrivateRequestAttributes(span, params);
addPrivateRequestAttributes(span, params, options.enableTruncation ?? true);
}

return originalResult.then(
Expand Down
5 changes: 5 additions & 0 deletions packages/core/src/tracing/anthropic-ai/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@ export interface AnthropicAiOptions {
* Enable or disable output recording.
*/
recordOutputs?: boolean;
/**
* Enable or disable truncation of recorded input messages.
* Defaults to `true`.
*/
enableTruncation?: boolean;
}

export type Message = {
Expand Down
8 changes: 5 additions & 3 deletions packages/core/src/tracing/anthropic-ai/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@ import {
GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE,
GEN_AI_SYSTEM_INSTRUCTIONS_ATTRIBUTE,
} from '../ai/gen-ai-attributes';
import { extractSystemInstructions, getTruncatedJsonString } from '../ai/utils';
import { extractSystemInstructions, getJsonString, getTruncatedJsonString } from '../ai/utils';
import type { AnthropicAiResponse } from './types';

/**
* Set the messages and messages original length attributes.
* Extracts system instructions before truncation.
*/
export function setMessagesAttribute(span: Span, messages: unknown): void {
export function setMessagesAttribute(span: Span, messages: unknown, enableTruncation: boolean): void {
if (Array.isArray(messages) && messages.length === 0) {
return;
}
Expand All @@ -29,7 +29,9 @@ export function setMessagesAttribute(span: Span, messages: unknown): void {

const filteredLength = Array.isArray(filteredMessages) ? filteredMessages.length : 1;
span.setAttributes({
[GEN_AI_INPUT_MESSAGES_ATTRIBUTE]: getTruncatedJsonString(filteredMessages),
[GEN_AI_INPUT_MESSAGES_ATTRIBUTE]: enableTruncation
? getTruncatedJsonString(filteredMessages)
: getJsonString(filteredMessages),
[GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE]: filteredLength,
});
}
Expand Down
6 changes: 3 additions & 3 deletions packages/core/test/lib/utils/anthropic-utils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ describe('anthropic-ai-utils', () => {

it('sets length along with truncated value', () => {
const content = 'A'.repeat(200_000);
setMessagesAttribute(span, [{ role: 'user', content }]);
setMessagesAttribute(span, [{ role: 'user', content }], true);
const result = [{ role: 'user', content: 'A'.repeat(19970) }];
expect(mock.attributes).toStrictEqual({
'sentry.sdk_meta.gen_ai.input.messages.original_length': 1,
Expand All @@ -107,15 +107,15 @@ describe('anthropic-ai-utils', () => {
});

it('sets length to 1 for non-array input', () => {
setMessagesAttribute(span, { content: 'hello, world' });
setMessagesAttribute(span, { content: 'hello, world' }, true);
expect(mock.attributes).toStrictEqual({
'sentry.sdk_meta.gen_ai.input.messages.original_length': 1,
'gen_ai.input.messages': '{"content":"hello, world"}',
});
});

it('ignores empty array', () => {
setMessagesAttribute(span, []);
setMessagesAttribute(span, [], true);
expect(mock.attributes).toStrictEqual({
'sentry.sdk_meta.gen_ai.input.messages.original_length': 1,
'gen_ai.input.messages': '{"content":"hello, world"}',
Expand Down
Loading