From 3c3701728dd071e0e850f1d72f83ba6fabcf6a04 Mon Sep 17 00:00:00 2001 From: RulaKhaled Date: Fri, 5 Sep 2025 10:56:44 +0200 Subject: [PATCH 1/4] feat(core): Improve error handling for Anthropic AI instrumentation --- .../tracing/anthropic/scenario-errors.mjs | 115 ++++++++++++ .../anthropic/scenario-stream-errors.mjs | 166 ++++++++++++++++++ .../suites/tracing/anthropic/test.ts | 97 ++++++++++ packages/core/src/utils/anthropic-ai/index.ts | 146 +++++++++------ .../core/src/utils/anthropic-ai/streaming.ts | 10 +- packages/core/src/utils/anthropic-ai/types.ts | 15 +- 6 files changed, 492 insertions(+), 57 deletions(-) create mode 100644 dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-errors.mjs create mode 100644 dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-errors.mjs diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-errors.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-errors.mjs new file mode 100644 index 000000000000..6417ca87fcbd --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-errors.mjs @@ -0,0 +1,115 @@ +import { instrumentAnthropicAiClient } from '@sentry/core'; +import * as Sentry from '@sentry/node'; + +class MockAnthropic { + constructor(config) { + this.apiKey = config.apiKey; + this.messages = { + create: this._messagesCreate.bind(this), + }; + this.models = { + retrieve: this._modelsRetrieve.bind(this), + }; + } + + async _messagesCreate(params) { + await new Promise(resolve => setTimeout(resolve, 5)); + + // Case 1: Invalid tool format error + if (params.model === 'invalid-format') { + const error = new Error('Invalid format'); + error.status = 400; + error.headers = { 'x-request-id': 'mock-invalid-tool-format-error' }; + throw error; + } + + // Default case (success) - return tool use for successful tool usage test + return { + id: 'msg_ok', + type: 'message', + model: params.model, + role: 'assistant', + content: [ + { + type: 'tool_use', + id: 'tool_ok_1', + name: 'calculator', + input: { expression: '2+2' }, + }, + ], + stop_reason: 'tool_use', + usage: { input_tokens: 7, output_tokens: 9 }, + }; + } + + async _modelsRetrieve(modelId) { + await new Promise(resolve => setTimeout(resolve, 5)); + + // Case for model retrieval error + if (modelId === 'nonexistent-model') { + const error = new Error('Model not found'); + error.status = 404; + error.headers = { 'x-request-id': 'mock-model-retrieval-error' }; + throw error; + } + + return { + id: modelId, + name: modelId, + created_at: 1715145600, + model: modelId, + }; + } +} + +async function run() { + await Sentry.startSpan({ op: 'function', name: 'main' }, async () => { + const mockClient = new MockAnthropic({ apiKey: 'mock-api-key' }); + const client = instrumentAnthropicAiClient(mockClient); + + // 1. Test invalid format error + // https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/implement-tool-use#handling-tool-use-and-tool-result-content-blocks + try { + await client.messages.create({ + model: 'invalid-format', + messages: [ + { + role: 'user', + content: [ + { type: 'text', text: 'Here are the results:' }, // ❌ Text before tool_result + { type: 'tool_result', tool_use_id: 'toolu_01' }, + ], + }, + ], + }); + } catch (e) { + // Error expected + } + + // 2. Test model retrieval error + try { + await client.models.retrieve('nonexistent-model'); + } catch (e) { + // Error expected + } + + // 3. Test successful tool usage for comparison + await client.messages.create({ + model: 'claude-3-haiku-20240307', + messages: [{ role: 'user', content: 'Calculate 2+2' }], + tools: [ + { + name: 'calculator', + description: 'Perform calculations', + input_schema: { + type: 'object', + properties: { expression: { type: 'string' } }, + required: ['expression'], + }, + }, + ], + }); + }); +} + +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-errors.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-errors.mjs new file mode 100644 index 000000000000..ead5146c492a --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-errors.mjs @@ -0,0 +1,166 @@ +import { instrumentAnthropicAiClient } from '@sentry/core'; +import * as Sentry from '@sentry/node'; + +// Generator for default fallback +function createMockDefaultFallbackStream() { + async function* generator() { + yield { + type: 'content_block_start', + index: 0, + }; + yield { + type: 'content_block_delta', + index: 0, + delta: { text: 'This stream will work fine.' }, + }; + yield { + type: 'content_block_stop', + index: 0, + }; + } + return generator(); +} + +// Generator that errors midway through streaming +function createMockMidwayErrorStream() { + async function* generator() { + // First yield some initial data to start the stream + yield { + type: 'content_block_start', + message: { + id: 'msg_error_stream_1', + type: 'message', + role: 'assistant', + model: 'claude-3-haiku-20240307', + content: [], + usage: { input_tokens: 5 }, + }, + }; + + // Yield one chunk of content + yield { type: 'content_block_delta', delta: { text: 'This stream will ' } }; + + // Then throw an error + await new Promise(resolve => setTimeout(resolve, 5)); + throw new Error('Stream interrupted'); + } + + return generator(); +} + +class MockAnthropic { + constructor(config) { + this.apiKey = config.apiKey; + + this.messages = { + create: this._messagesCreate.bind(this), + stream: this._messagesStream.bind(this), + }; + } + + // client.messages.create with stream: true + async _messagesCreate(params) { + await new Promise(resolve => setTimeout(resolve, 5)); + + // Error on initialization for 'error-stream-init' model + if (params.model === 'error-stream-init') { + if (params?.stream === true) { + throw new Error('Failed to initialize stream'); + } + } + + // Error midway for 'error-stream-midway' model + if (params.model === 'error-stream-midway') { + if (params?.stream === true) { + return createMockMidwayErrorStream(); + } + } + + // Default fallback + return { + id: 'msg_mock123', + type: 'message', + model: params.model, + role: 'assistant', + content: [{ type: 'text', text: 'Non-stream response' }], + usage: { input_tokens: 5, output_tokens: 7 }, + }; + } + + // client.messages.stream + async _messagesStream(params) { + await new Promise(resolve => setTimeout(resolve, 5)); + + // Error on initialization for 'error-stream-init' model + if (params.model === 'error-stream-init') { + throw new Error('Failed to initialize stream'); + } + + // Error midway for 'error-stream-midway' model + if (params.model === 'error-stream-midway') { + return createMockMidwayErrorStream(); + } + + // Default fallback + return createMockDefaultFallbackStream(); + } +} + +async function run() { + await Sentry.startSpan({ op: 'function', name: 'main' }, async () => { + const mockClient = new MockAnthropic({ apiKey: 'mock-api-key' }); + const client = instrumentAnthropicAiClient(mockClient); + + // 1) Error on stream initialization with messages.create + try { + await client.messages.create({ + model: 'error-stream-init', + messages: [{ role: 'user', content: 'This will fail immediately' }], + stream: true, + }); + } catch (e) { + // Error expected + } + + // 2) Error on stream initialization with messages.stream + try { + await client.messages.stream({ + model: 'error-stream-init', + messages: [{ role: 'user', content: 'This will also fail immediately' }], + }); + } catch (e) { + // Error expected + } + + // 3) Error midway through streaming with messages.create + try { + const stream = await client.messages.create({ + model: 'error-stream-midway', + messages: [{ role: 'user', content: 'This will fail midway' }], + stream: true, + }); + + for await (const _ of stream) { + void _; + } + } catch (e) { + // Error expected + } + + // 4) Error midway through streaming with messages.stream + try { + const stream = await client.messages.stream({ + model: 'error-stream-midway', + messages: [{ role: 'user', content: 'This will also fail midway' }], + }); + + for await (const _ of stream) { + void _; + } + } catch (e) { + // Error expected + } + }); +} + +run(); diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts b/dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts index 35252f574003..27a0a523b927 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts @@ -348,4 +348,101 @@ describe('Anthropic integration', () => { .completed(); }); }); + + // Additional error scenarios - Streaming errors + const EXPECTED_STREAM_ERROR_SPANS = { + transaction: 'main', + spans: expect.arrayContaining([ + // Error with messages.create on stream initialization + expect.objectContaining({ + description: 'messages error-stream-init stream-response', + op: 'gen_ai.messages', + status: 'internal_error', // Actual status coming from the instrumentation + data: expect.objectContaining({ + 'gen_ai.request.model': 'error-stream-init', + 'gen_ai.request.stream': true, + }), + }), + // Error with messages.stream on stream initialization + expect.objectContaining({ + description: 'messages error-stream-init stream-response', + op: 'gen_ai.messages', + status: 'internal_error', // Actual status coming from the instrumentation + data: expect.objectContaining({ + 'gen_ai.request.model': 'error-stream-init', + }), + }), + // Error midway with messages.create on streaming - note: The stream is started successfully + // so we get a successful span with the content that was streamed before the error + expect.objectContaining({ + description: 'messages error-stream-midway stream-response', + op: 'gen_ai.messages', + status: 'ok', + data: expect.objectContaining({ + 'gen_ai.request.model': 'error-stream-midway', + 'gen_ai.request.stream': true, + 'gen_ai.response.streaming': true, + 'gen_ai.response.text': 'This stream will ', // We received some data before error + }), + }), + // Error midway with messages.stream - same behavior, we get a span with the streamed data + expect.objectContaining({ + description: 'messages error-stream-midway stream-response', + op: 'gen_ai.messages', + status: 'ok', + data: expect.objectContaining({ + 'gen_ai.request.model': 'error-stream-midway', + 'gen_ai.response.streaming': true, + 'gen_ai.response.text': 'This stream will ', // We received some data before error + }), + }), + ]), + }; + + createEsmAndCjsTests(__dirname, 'scenario-stream-errors.mjs', 'instrument-with-pii.mjs', (createRunner, test) => { + test('handles streaming errors correctly', async () => { + await createRunner().ignore('event').expect({ transaction: EXPECTED_STREAM_ERROR_SPANS }).start().completed(); + }); + }); + + // Additional error scenarios - Tool errors and model retrieval errors + const EXPECTED_ERROR_SPANS = { + transaction: 'main', + spans: expect.arrayContaining([ + // Invalid tool format error + expect.objectContaining({ + description: 'messages invalid-format', + op: 'gen_ai.messages', + status: 'unknown_error', + data: expect.objectContaining({ + 'gen_ai.request.model': 'invalid-format', + }), + }), + // Model retrieval error + expect.objectContaining({ + description: 'models nonexistent-model', + op: 'gen_ai.models', + status: 'unknown_error', + data: expect.objectContaining({ + 'gen_ai.request.model': 'nonexistent-model', + }), + }), + // Successful tool usage (for comparison) + expect.objectContaining({ + description: 'messages claude-3-haiku-20240307', + op: 'gen_ai.messages', + status: 'ok', + data: expect.objectContaining({ + 'gen_ai.request.model': 'claude-3-haiku-20240307', + 'gen_ai.response.tool_calls': expect.stringContaining('tool_ok_1'), + }), + }), + ]), + }; + + createEsmAndCjsTests(__dirname, 'scenario-errors.mjs', 'instrument-with-pii.mjs', (createRunner, test) => { + test('handles tool errors and model retrieval errors correctly', async () => { + await createRunner().ignore('event').expect({ transaction: EXPECTED_ERROR_SPANS }).start().completed(); + }); + }); }); diff --git a/packages/core/src/utils/anthropic-ai/index.ts b/packages/core/src/utils/anthropic-ai/index.ts index c54fdc2a8a9c..c0f968aef8e2 100644 --- a/packages/core/src/utils/anthropic-ai/index.ts +++ b/packages/core/src/utils/anthropic-ai/index.ts @@ -90,71 +90,117 @@ function addPrivateRequestAttributes(span: Span, params: Record } /** - * Add response attributes to spans + * Capture error information from the response + * @see https://docs.anthropic.com/en/api/errors#error-shapes */ -function addResponseAttributes(span: Span, response: AnthropicAiResponse, recordOutputs?: boolean): void { - if (!response || typeof response !== 'object') return; +function handleResponseError(span: Span, response: AnthropicAiResponse): void { + // Check for standard error information in the response + if (response.error) { + const errorType = response.error.type || 'unknown_error'; + span.setStatus({ code: SPAN_STATUS_ERROR, message: errorType }); - // Private response attributes that are only recorded if recordOutputs is true. - if (recordOutputs) { - // Messages.create - if ('content' in response) { - if (Array.isArray(response.content)) { - span.setAttributes({ - [GEN_AI_RESPONSE_TEXT_ATTRIBUTE]: response.content - .map((item: ContentBlock) => item.text) - .filter(text => !!text) - .join(''), - }); + captureException(new Error(`anthropic_error: ${errorType}`), { + mechanism: { + handled: false, + type: 'auto.ai.anthropic', + data: { + function: 'anthropic_error', + error_message: response.error.message, + }, + }, + }); + } +} - const toolCalls: Array = []; +/** + * Add content attributes when recordOutputs is enabled + */ +function addContentAttributes(span: Span, response: AnthropicAiResponse): void { + // Messages.create + if ('content' in response) { + if (Array.isArray(response.content)) { + span.setAttributes({ + [GEN_AI_RESPONSE_TEXT_ATTRIBUTE]: response.content + .map((item: ContentBlock) => item.text) + .filter(text => !!text) + .join(''), + }); - for (const item of response.content) { - if (item.type === 'tool_use' || item.type === 'server_tool_use') { - toolCalls.push(item); - } - } - if (toolCalls.length > 0) { - span.setAttributes({ [GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE]: JSON.stringify(toolCalls) }); + const toolCalls: Array = []; + + for (const item of response.content) { + if (item.type === 'tool_use' || item.type === 'server_tool_use') { + toolCalls.push(item); } } + if (toolCalls.length > 0) { + span.setAttributes({ [GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE]: JSON.stringify(toolCalls) }); + } } - // Completions.create - if ('completion' in response) { - span.setAttributes({ [GEN_AI_RESPONSE_TEXT_ATTRIBUTE]: response.completion }); - } - // Models.countTokens - if ('input_tokens' in response) { - span.setAttributes({ [GEN_AI_RESPONSE_TEXT_ATTRIBUTE]: JSON.stringify(response.input_tokens) }); - } } + // Completions.create + if ('completion' in response) { + span.setAttributes({ [GEN_AI_RESPONSE_TEXT_ATTRIBUTE]: response.completion }); + } + // Models.countTokens + if ('input_tokens' in response) { + span.setAttributes({ [GEN_AI_RESPONSE_TEXT_ATTRIBUTE]: JSON.stringify(response.input_tokens) }); + } +} - span.setAttributes({ - [GEN_AI_RESPONSE_ID_ATTRIBUTE]: response.id, - }); - span.setAttributes({ - [GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: response.model, - }); - if ('created' in response && typeof response.created === 'number') { +/** + * Add basic metadata attributes from the response + */ +function addMetadataAttributes(span: Span, response: AnthropicAiResponse): void { + if ('id' in response && 'model' in response) { span.setAttributes({ - [ANTHROPIC_AI_RESPONSE_TIMESTAMP_ATTRIBUTE]: new Date(response.created * 1000).toISOString(), + [GEN_AI_RESPONSE_ID_ATTRIBUTE]: response.id, + [GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: response.model, }); + + if ('created' in response && typeof response.created === 'number') { + span.setAttributes({ + [ANTHROPIC_AI_RESPONSE_TIMESTAMP_ATTRIBUTE]: new Date(response.created * 1000).toISOString(), + }); + } + if ('created_at' in response && typeof response.created_at === 'number') { + span.setAttributes({ + [ANTHROPIC_AI_RESPONSE_TIMESTAMP_ATTRIBUTE]: new Date(response.created_at * 1000).toISOString(), + }); + } + + if ('usage' in response && response.usage) { + setTokenUsageAttributes( + span, + response.usage.input_tokens, + response.usage.output_tokens, + response.usage.cache_creation_input_tokens, + response.usage.cache_read_input_tokens, + ); + } } - if ('created_at' in response && typeof response.created_at === 'number') { - span.setAttributes({ - [ANTHROPIC_AI_RESPONSE_TIMESTAMP_ATTRIBUTE]: new Date(response.created_at * 1000).toISOString(), - }); +} + +/** + * Add response attributes to spans + * @internal Exported for testing + */ +export function addResponseAttributes(span: Span, response: AnthropicAiResponse, recordOutputs?: boolean): void { + if (!response || typeof response !== 'object') return; + + // capture error, do not add attributes if error (they shouldn't exist) + if ('type' in response && response.type === 'error') { + handleResponseError(span, response); + return; } - if (response.usage) { - setTokenUsageAttributes( - span, - response.usage.input_tokens, - response.usage.output_tokens, - response.usage.cache_creation_input_tokens, - response.usage.cache_read_input_tokens, - ); + // Private response attributes that are only recorded if recordOutputs is true. + if (recordOutputs) { + addContentAttributes(span, response); } + + // Add basic metadata attributes + addMetadataAttributes(span, response); } /** diff --git a/packages/core/src/utils/anthropic-ai/streaming.ts b/packages/core/src/utils/anthropic-ai/streaming.ts index c48dc8a6def7..c68c69898dd8 100644 --- a/packages/core/src/utils/anthropic-ai/streaming.ts +++ b/packages/core/src/utils/anthropic-ai/streaming.ts @@ -60,19 +60,17 @@ function isErrorEvent(event: AnthropicAiStreamingEvent, span: Span): boolean { // If the event is an error, set the span status and capture the error // These error events are not rejected by the API by default, but are sent as metadata of the response if (event.type === 'error') { - const message = event.error?.message ?? 'internal_error'; - span.setStatus({ code: SPAN_STATUS_ERROR, message }); - captureException(new Error(`anthropic_stream_error: ${message}`), { + const errorType = event.error?.type ?? 'unknown_error'; + span.setStatus({ code: SPAN_STATUS_ERROR, message: errorType }); + captureException(new Error(`anthropic_stream_error: ${errorType}`), { mechanism: { handled: false, type: 'auto.ai.anthropic', data: { function: 'anthropic_stream_error', + error_message: event.error?.message ?? 'internal_error', }, }, - data: { - function: 'anthropic_stream_error', - }, }); return true; } diff --git a/packages/core/src/utils/anthropic-ai/types.ts b/packages/core/src/utils/anthropic-ai/types.ts index 6ab2e790e651..7968ccf7d47e 100644 --- a/packages/core/src/utils/anthropic-ai/types.ts +++ b/packages/core/src/utils/anthropic-ai/types.ts @@ -27,7 +27,17 @@ export type ContentBlock = { tool_use_id?: string; }; -export type AnthropicAiResponse = { +// @see https://docs.anthropic.com/en/api/errors#error-shapes +export type MessageError = { + type: 'error'; + error: { + type: string; + message: string; + }; + request_id?: string; +}; + +type SuccessfulResponse = { [key: string]: unknown; // Allow for additional unknown properties id: string; model: string; @@ -43,8 +53,11 @@ export type AnthropicAiResponse = { cache_creation_input_tokens: number; cache_read_input_tokens: number; }; + error?: never; // This should help TypeScript infer the type correctly }; +export type AnthropicAiResponse = SuccessfulResponse | MessageError; + /** * Basic interface for Anthropic AI client with only the instrumented methods * This provides type safety while being generic enough to work with different client implementations From 4f26ed0701b0545df39f9f55c9317fc5fbe95088 Mon Sep 17 00:00:00 2001 From: RulaKhaled Date: Fri, 5 Sep 2025 11:01:27 +0200 Subject: [PATCH 2/4] remove unnecessary comments --- packages/core/src/utils/anthropic-ai/index.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/core/src/utils/anthropic-ai/index.ts b/packages/core/src/utils/anthropic-ai/index.ts index c0f968aef8e2..052282b53fce 100644 --- a/packages/core/src/utils/anthropic-ai/index.ts +++ b/packages/core/src/utils/anthropic-ai/index.ts @@ -94,7 +94,6 @@ function addPrivateRequestAttributes(span: Span, params: Record * @see https://docs.anthropic.com/en/api/errors#error-shapes */ function handleResponseError(span: Span, response: AnthropicAiResponse): void { - // Check for standard error information in the response if (response.error) { const errorType = response.error.type || 'unknown_error'; span.setStatus({ code: SPAN_STATUS_ERROR, message: errorType }); @@ -183,9 +182,8 @@ function addMetadataAttributes(span: Span, response: AnthropicAiResponse): void /** * Add response attributes to spans - * @internal Exported for testing */ -export function addResponseAttributes(span: Span, response: AnthropicAiResponse, recordOutputs?: boolean): void { +function addResponseAttributes(span: Span, response: AnthropicAiResponse, recordOutputs?: boolean): void { if (!response || typeof response !== 'object') return; // capture error, do not add attributes if error (they shouldn't exist) From 19b599ab4836083b384ce6ce8055c6f46a036345 Mon Sep 17 00:00:00 2001 From: RulaKhaled Date: Fri, 5 Sep 2025 13:17:51 +0200 Subject: [PATCH 3/4] resolve some comments --- .../suites/tracing/anthropic/scenario-errors.mjs | 4 ++-- .../suites/tracing/anthropic/scenario-stream-errors.mjs | 8 ++++---- packages/core/src/utils/anthropic-ai/types.ts | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-errors.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-errors.mjs index 6417ca87fcbd..5501ed1a01ff 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-errors.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-errors.mjs @@ -82,14 +82,14 @@ async function run() { }, ], }); - } catch (e) { + } catch { // Error expected } // 2. Test model retrieval error try { await client.models.retrieve('nonexistent-model'); - } catch (e) { + } catch { // Error expected } diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-errors.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-errors.mjs index ead5146c492a..9112f96363ce 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-errors.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-errors.mjs @@ -118,7 +118,7 @@ async function run() { messages: [{ role: 'user', content: 'This will fail immediately' }], stream: true, }); - } catch (e) { + } catch { // Error expected } @@ -128,7 +128,7 @@ async function run() { model: 'error-stream-init', messages: [{ role: 'user', content: 'This will also fail immediately' }], }); - } catch (e) { + } catch { // Error expected } @@ -143,7 +143,7 @@ async function run() { for await (const _ of stream) { void _; } - } catch (e) { + } catch { // Error expected } @@ -157,7 +157,7 @@ async function run() { for await (const _ of stream) { void _; } - } catch (e) { + } catch { // Error expected } }); diff --git a/packages/core/src/utils/anthropic-ai/types.ts b/packages/core/src/utils/anthropic-ai/types.ts index 7968ccf7d47e..124b7c7f73be 100644 --- a/packages/core/src/utils/anthropic-ai/types.ts +++ b/packages/core/src/utils/anthropic-ai/types.ts @@ -34,7 +34,7 @@ export type MessageError = { type: string; message: string; }; - request_id?: string; + request_id: string; }; type SuccessfulResponse = { From a6d6ceb79e712ab0084559f6971f5e5d4dd311f4 Mon Sep 17 00:00:00 2001 From: RulaKhaled Date: Fri, 5 Sep 2025 13:40:57 +0200 Subject: [PATCH 4/4] just pass error, no need to create new exception --- packages/core/src/utils/anthropic-ai/index.ts | 11 +++-------- packages/core/src/utils/anthropic-ai/streaming.ts | 11 +++-------- 2 files changed, 6 insertions(+), 16 deletions(-) diff --git a/packages/core/src/utils/anthropic-ai/index.ts b/packages/core/src/utils/anthropic-ai/index.ts index 052282b53fce..563724d98c5c 100644 --- a/packages/core/src/utils/anthropic-ai/index.ts +++ b/packages/core/src/utils/anthropic-ai/index.ts @@ -95,17 +95,12 @@ function addPrivateRequestAttributes(span: Span, params: Record */ function handleResponseError(span: Span, response: AnthropicAiResponse): void { if (response.error) { - const errorType = response.error.type || 'unknown_error'; - span.setStatus({ code: SPAN_STATUS_ERROR, message: errorType }); + span.setStatus({ code: SPAN_STATUS_ERROR, message: response.error.type || 'unknown_error' }); - captureException(new Error(`anthropic_error: ${errorType}`), { + captureException(response.error, { mechanism: { handled: false, - type: 'auto.ai.anthropic', - data: { - function: 'anthropic_error', - error_message: response.error.message, - }, + type: 'auto.ai.anthropic.anthropic_error', }, }); } diff --git a/packages/core/src/utils/anthropic-ai/streaming.ts b/packages/core/src/utils/anthropic-ai/streaming.ts index c68c69898dd8..cd30d99ad09e 100644 --- a/packages/core/src/utils/anthropic-ai/streaming.ts +++ b/packages/core/src/utils/anthropic-ai/streaming.ts @@ -60,16 +60,11 @@ function isErrorEvent(event: AnthropicAiStreamingEvent, span: Span): boolean { // If the event is an error, set the span status and capture the error // These error events are not rejected by the API by default, but are sent as metadata of the response if (event.type === 'error') { - const errorType = event.error?.type ?? 'unknown_error'; - span.setStatus({ code: SPAN_STATUS_ERROR, message: errorType }); - captureException(new Error(`anthropic_stream_error: ${errorType}`), { + span.setStatus({ code: SPAN_STATUS_ERROR, message: event.error?.type ?? 'unknown_error' }); + captureException(event.error, { mechanism: { handled: false, - type: 'auto.ai.anthropic', - data: { - function: 'anthropic_stream_error', - error_message: event.error?.message ?? 'internal_error', - }, + type: 'auto.ai.anthropic.anthropic_error', }, }); return true;