Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import * as Sentry from '@sentry/node';
import { generateText } from 'ai';

// Custom mock model that doesn't set modelId initially (simulates late model ID setting)
// This tests that the op is correctly set even when model ID is not available at span start.
// The span name update (e.g., 'generate_text gpt-4') is skipped when model ID is missing.t
class LateModelIdMock {
specificationVersion = 'v1';
provider = 'late-model-provider';
// modelId is intentionally undefined initially to simulate late setting
modelId = undefined;
defaultObjectGenerationMode = 'json';

async doGenerate() {
// Model ID is only "available" during generation, not at span start
this.modelId = 'late-mock-model-id';

return {
rawCall: { rawPrompt: null, rawSettings: {} },
finishReason: 'stop',
usage: { promptTokens: 5, completionTokens: 10 },
text: 'Response from late model!',
};
}
}

async function run() {
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
await generateText({
model: new LateModelIdMock(),
prompt: 'Test prompt for late model ID',
});
});
}

run();
Original file line number Diff line number Diff line change
Expand Up @@ -699,4 +699,40 @@ describe('Vercel AI integration', () => {
expect(errorEvent!.contexts!.trace!.span_id).toBe(transactionEvent!.contexts!.trace!.span_id);
});
});

createEsmAndCjsTests(__dirname, 'scenario-late-model-id.mjs', 'instrument.mjs', (createRunner, test) => {
test('sets op correctly even when model ID is not available at span start', async () => {
const expectedTransaction = {
transaction: 'main',
spans: expect.arrayContaining([
// The generateText span should have the correct op even though model ID was not available at span start
expect.objectContaining({
description: 'generateText',
op: 'gen_ai.invoke_agent',
origin: 'auto.vercelai.otel',
status: 'ok',
data: expect.objectContaining({
'sentry.op': 'gen_ai.invoke_agent',
'sentry.origin': 'auto.vercelai.otel',
'gen_ai.operation.name': 'ai.generateText',
}),
}),
// The doGenerate span - name stays as 'generateText.doGenerate' since model ID is missing
expect.objectContaining({
description: 'generateText.doGenerate',
op: 'gen_ai.generate_text',
origin: 'auto.vercelai.otel',
status: 'ok',
data: expect.objectContaining({
'sentry.op': 'gen_ai.generate_text',
'sentry.origin': 'auto.vercelai.otel',
'gen_ai.operation.name': 'ai.generateText.doGenerate',
}),
}),
]),
};

await createRunner().expect({ transaction: expectedTransaction }).start().completed();
});
});
});
35 changes: 35 additions & 0 deletions packages/core/src/tracing/ai/gen-ai-attributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,41 @@ export const GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE = 'gen_ai.usage.input_to
*/
export const GEN_AI_INVOKE_AGENT_OPERATION_ATTRIBUTE = 'gen_ai.invoke_agent';

/**
* The span operation name for generating text
*/
export const GEN_AI_GENERATE_TEXT_DO_GENERATE_OPERATION_ATTRIBUTE = 'gen_ai.generate_text';

/**
* The span operation name for streaming text
*/
export const GEN_AI_STREAM_TEXT_DO_STREAM_OPERATION_ATTRIBUTE = 'gen_ai.stream_text';

/**
* The span operation name for generating object
*/
export const GEN_AI_GENERATE_OBJECT_DO_GENERATE_OPERATION_ATTRIBUTE = 'gen_ai.generate_object';

/**
* The span operation name for streaming object
*/
export const GEN_AI_STREAM_OBJECT_DO_STREAM_OPERATION_ATTRIBUTE = 'gen_ai.stream_object';

/**
* The span operation name for embedding
*/
export const GEN_AI_EMBED_DO_EMBED_OPERATION_ATTRIBUTE = 'gen_ai.embed';

/**
* The span operation name for embedding many
*/
export const GEN_AI_EMBED_MANY_DO_EMBED_OPERATION_ATTRIBUTE = 'gen_ai.embed_many';

/**
* The span operation name for executing a tool
*/
export const GEN_AI_EXECUTE_TOOL_OPERATION_ATTRIBUTE = 'gen_ai.execute_tool';

// =============================================================================
// OPENAI-SPECIFIC ATTRIBUTES
// =============================================================================
Expand Down
102 changes: 30 additions & 72 deletions packages/core/src/tracing/vercel-ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import {
accumulateTokensForParent,
applyAccumulatedTokens,
convertAvailableToolsToJsonString,
getSpanOpFromName,
requestMessagesFromPrompt,
} from './utils';
import type { ProviderMetadata } from './vercel-ai-attributes';
Expand Down Expand Up @@ -64,10 +65,8 @@ function onVercelAiSpanStart(span: Span): void {
return;
}

// The AI model ID must be defined for generate, stream, and embed spans.
// The provider is optional and may not always be present.
const aiModelId = attributes[AI_MODEL_ID_ATTRIBUTE];
if (typeof aiModelId !== 'string' || !aiModelId) {
// Check if this is a Vercel AI span by name pattern.
if (!name.startsWith('ai.')) {
return;
}

Expand Down Expand Up @@ -225,76 +224,35 @@ function processGenerateSpan(span: Span, name: string, attributes: SpanAttribute
}
span.setAttribute('ai.streaming', name.includes('stream'));

// Generate Spans
if (name === 'ai.generateText') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
return;
}

if (name === 'ai.generateText.doGenerate') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.generate_text');
span.updateName(`generate_text ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
return;
}

if (name === 'ai.streamText') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
return;
}

if (name === 'ai.streamText.doStream') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.stream_text');
span.updateName(`stream_text ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
return;
}

if (name === 'ai.generateObject') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
return;
}

if (name === 'ai.generateObject.doGenerate') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.generate_object');
span.updateName(`generate_object ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
return;
}

if (name === 'ai.streamObject') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
return;
}

if (name === 'ai.streamObject.doStream') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.stream_object');
span.updateName(`stream_object ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
return;
}

if (name === 'ai.embed') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
return;
}

if (name === 'ai.embed.doEmbed') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.embed');
span.updateName(`embed ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
return;
// Set the op based on the span name
const op = getSpanOpFromName(name);
if (op) {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, op);
}

if (name === 'ai.embedMany') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
return;
}

if (name === 'ai.embedMany.doEmbed') {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.embed_many');
span.updateName(`embed_many ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
return;
}

if (name.startsWith('ai.stream')) {
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.run');
return;
// Update span names for .do* spans to include the model ID (only if model ID exists)
const modelId = attributes[AI_MODEL_ID_ATTRIBUTE];
if (modelId) {
switch (name) {
case 'ai.generateText.doGenerate':
span.updateName(`generate_text ${modelId}`);
break;
case 'ai.streamText.doStream':
span.updateName(`stream_text ${modelId}`);
break;
case 'ai.generateObject.doGenerate':
span.updateName(`generate_object ${modelId}`);
break;
case 'ai.streamObject.doStream':
span.updateName(`stream_object ${modelId}`);
break;
case 'ai.embed.doEmbed':
span.updateName(`embed ${modelId}`);
break;
case 'ai.embedMany.doEmbed':
span.updateName(`embed_many ${modelId}`);
break;
}
}
}

Expand Down
42 changes: 42 additions & 0 deletions packages/core/src/tracing/vercel-ai/utils.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,15 @@
import type { TraceContext } from '../../types-hoist/context';
import type { Span, SpanAttributes, SpanJSON } from '../../types-hoist/span';
import {
GEN_AI_EMBED_DO_EMBED_OPERATION_ATTRIBUTE,
GEN_AI_EMBED_MANY_DO_EMBED_OPERATION_ATTRIBUTE,
GEN_AI_EXECUTE_TOOL_OPERATION_ATTRIBUTE,
GEN_AI_GENERATE_OBJECT_DO_GENERATE_OPERATION_ATTRIBUTE,
GEN_AI_GENERATE_TEXT_DO_GENERATE_OPERATION_ATTRIBUTE,
GEN_AI_INVOKE_AGENT_OPERATION_ATTRIBUTE,
GEN_AI_REQUEST_MESSAGES_ATTRIBUTE,
GEN_AI_STREAM_OBJECT_DO_STREAM_OPERATION_ATTRIBUTE,
GEN_AI_STREAM_TEXT_DO_STREAM_OPERATION_ATTRIBUTE,
GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE,
GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE,
} from '../ai/gen-ai-attributes';
Expand Down Expand Up @@ -137,3 +145,37 @@ export function requestMessagesFromPrompt(span: Span, attributes: SpanAttributes
if (messages.length) span.setAttribute(GEN_AI_REQUEST_MESSAGES_ATTRIBUTE, getTruncatedJsonString(messages));
}
}

/**
* Maps a Vercel AI span name to the corresponding Sentry op.
*/
export function getSpanOpFromName(name: string): string | undefined {
switch (name) {
case 'ai.generateText':
case 'ai.streamText':
case 'ai.generateObject':
case 'ai.streamObject':
case 'ai.embed':
case 'ai.embedMany':
return GEN_AI_INVOKE_AGENT_OPERATION_ATTRIBUTE;
case 'ai.generateText.doGenerate':
return GEN_AI_GENERATE_TEXT_DO_GENERATE_OPERATION_ATTRIBUTE;
case 'ai.streamText.doStream':
return GEN_AI_STREAM_TEXT_DO_STREAM_OPERATION_ATTRIBUTE;
case 'ai.generateObject.doGenerate':
return GEN_AI_GENERATE_OBJECT_DO_GENERATE_OPERATION_ATTRIBUTE;
case 'ai.streamObject.doStream':
return GEN_AI_STREAM_OBJECT_DO_STREAM_OPERATION_ATTRIBUTE;
case 'ai.embed.doEmbed':
return GEN_AI_EMBED_DO_EMBED_OPERATION_ATTRIBUTE;
case 'ai.embedMany.doEmbed':
return GEN_AI_EMBED_MANY_DO_EMBED_OPERATION_ATTRIBUTE;
case 'ai.toolCall':
return GEN_AI_EXECUTE_TOOL_OPERATION_ATTRIBUTE;
default:
if (name.startsWith('ai.stream')) {
return 'ai.run';
}
return undefined;
}
}
Loading