Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
## Unreleased

- "You miss 100 percent of the chances you don't take. — Wayne Gretzky" — Michael Scott
- feat(core): Support embedding APIs in google-genai ([#19797](https://github.com/getsentry/sentry-javascript/pull/19797))

Adds instrumentation for the Google GenAI [`embedContent`](https://ai.google.dev/gemini-api/docs/embeddings) API, creating `gen_ai.embeddings` spans.

## 10.46.0

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,24 @@ export class MockGoogleGenAI {
},
};
},
embedContent: async (...args) => {
const params = args[0];
await new Promise(resolve => setTimeout(resolve, 10));

if (params.model === 'error-model') {
const error = new Error('Model not found');
error.status = 404;
throw error;
}

return {
embeddings: [
{
values: [0.1, 0.2, 0.3, 0.4, 0.5],
},
],
};
},
generateContentStream: async () => {
// Return a promise that resolves to an async generator
return (async function* () {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,11 @@ const response = await chat.sendMessage({
});

console.log('Received response', response);

// Test embedContent
const embedResponse = await client.models.embedContent({
model: 'text-embedding-004',
contents: 'Hello world',
});

console.log('Received embed response', embedResponse);
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,26 @@ sentryTest('manual Google GenAI instrumentation sends gen_ai transactions', asyn
'gen_ai.request.model': 'gemini-1.5-pro',
});
});

sentryTest('manual Google GenAI instrumentation sends embeddings transactions', async ({ getLocalTestUrl, page }) => {
const transactionPromise = waitForTransactionRequest(page, event => {
return !!event.transaction?.includes('text-embedding-004');
});

const url = await getLocalTestUrl({ testDir: __dirname });
await page.goto(url);

const req = await transactionPromise;

const eventData = envelopeRequestParser(req);

// Verify it's a gen_ai embeddings transaction
expect(eventData.transaction).toBe('embeddings text-embedding-004');
expect(eventData.contexts?.trace?.op).toBe('gen_ai.embeddings');
expect(eventData.contexts?.trace?.origin).toBe('auto.ai.google_genai');
expect(eventData.contexts?.trace?.data).toMatchObject({
'gen_ai.operation.name': 'embeddings',
'gen_ai.system': 'google_genai',
'gen_ai.request.model': 'text-embedding-004',
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,13 @@ export default Sentry.withSentry(
],
});

return new Response(JSON.stringify({ chatResponse, modelResponse }));
// Test 3: models.embedContent
const embedResponse = await client.models.embedContent({
model: 'text-embedding-004',
contents: 'Hello world',
});

return new Response(JSON.stringify({ chatResponse, modelResponse, embedResponse }));
},
},
);
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ export class MockGoogleGenAI implements GoogleGenAIClient {
public models: {
generateContent: (...args: unknown[]) => Promise<GoogleGenAIResponse>;
generateContentStream: (...args: unknown[]) => Promise<AsyncGenerator<GoogleGenAIResponse, any, unknown>>;
embedContent: (...args: unknown[]) => Promise<{ embeddings: { values: number[] }[] }>;
};
public chats: {
create: (...args: unknown[]) => GoogleGenAIChat;
Expand Down Expand Up @@ -49,6 +50,20 @@ export class MockGoogleGenAI implements GoogleGenAIClient {
},
};
},
embedContent: async (...args: unknown[]) => {
const params = args[0] as { model: string; contents?: unknown };
await new Promise(resolve => setTimeout(resolve, 10));

if (params.model === 'error-model') {
const error = new Error('Model not found');
(error as unknown as { status: number }).status = 404;
throw error;
}

return {
embeddings: [{ values: [0.1, 0.2, 0.3, 0.4, 0.5] }],
};
},
generateContentStream: async () => {
// Return a promise that resolves to an async generator
return (async function* (): AsyncGenerator<GoogleGenAIResponse, any, unknown> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,19 @@ it('traces Google GenAI chat creation and message sending', async () => {
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
}),
// Fourth span - models.embedContent
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
}),
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
}),
]),
);
})
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import { GoogleGenAI } from '@google/genai';
import * as Sentry from '@sentry/node';
import express from 'express';

function startMockGoogleGenAIServer() {
const app = express();
app.use(express.json());

app.post('/v1beta/models/:model\\:batchEmbedContents', (req, res) => {
const model = req.params.model;

if (model === 'error-model') {
res.status(404).set('x-request-id', 'mock-request-123').end('Model not found');
return;
}

res.send({
embeddings: [
{
values: [0.1, 0.2, 0.3, 0.4, 0.5],
},
],
});
});

return new Promise(resolve => {
const server = app.listen(0, () => {
resolve(server);
});
});
}

async function run() {
const server = await startMockGoogleGenAIServer();

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const client = new GoogleGenAI({
apiKey: 'mock-api-key',
httpOptions: { baseUrl: `http://localhost:${server.address().port}` },
});

// Test 1: Basic embedContent with string contents
await client.models.embedContent({
model: 'text-embedding-004',
contents: 'What is the capital of France?',
});

// Test 2: Error handling
try {
await client.models.embedContent({
model: 'error-model',
contents: 'This will fail',
});
} catch {
// Expected error
}

// Test 3: embedContent with array contents
await client.models.embedContent({
model: 'text-embedding-004',
contents: [
{
role: 'user',
parts: [{ text: 'First input text' }],
},
{
role: 'user',
parts: [{ text: 'Second input text' }],
},
],
});
});

server.close();
}

run();
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '@sentry/core';
import { afterAll, describe, expect } from 'vitest';
import {
GEN_AI_EMBEDDINGS_INPUT_ATTRIBUTE,
GEN_AI_INPUT_MESSAGES_ATTRIBUTE,
GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE,
GEN_AI_OPERATION_NAME_ATTRIBUTE,
Expand Down Expand Up @@ -601,4 +602,124 @@ describe('Google GenAI integration', () => {
});
},
);

const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE_EMBEDDINGS = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - embedContent with string contents
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
},
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Second span - embedContent error model
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'error-model',
},
description: 'embeddings error-model',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
// Third span - embedContent with array contents
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
},
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
]),
};

const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE_EMBEDDINGS = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - embedContent with PII
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
[GEN_AI_EMBEDDINGS_INPUT_ATTRIBUTE]: 'What is the capital of France?',
},
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Second span - embedContent error model with PII
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'error-model',
[GEN_AI_EMBEDDINGS_INPUT_ATTRIBUTE]: 'This will fail',
},
description: 'embeddings error-model',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
// Third span - embedContent with array contents and PII
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
[GEN_AI_EMBEDDINGS_INPUT_ATTRIBUTE]:
'[{"role":"user","parts":[{"text":"First input text"}]},{"role":"user","parts":[{"text":"Second input text"}]}]',
},
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
]),
};

createEsmAndCjsTests(__dirname, 'scenario-embeddings.mjs', 'instrument.mjs', (createRunner, test) => {
test('creates google genai embeddings spans with sendDefaultPii: false', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE_EMBEDDINGS })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario-embeddings.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
test('creates google genai embeddings spans with sendDefaultPii: true', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE_EMBEDDINGS })
.start()
.completed();
});
});
});
1 change: 1 addition & 0 deletions packages/core/src/tracing/google-genai/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ export const GOOGLE_GENAI_INTEGRATION_NAME = 'Google_GenAI';
export const GOOGLE_GENAI_METHOD_REGISTRY = {
'models.generateContent': { operation: 'generate_content' },
'models.generateContentStream': { operation: 'generate_content', streaming: true },
'models.embedContent': { operation: 'embeddings' },
'chats.create': { operation: 'chat' },
// chat.* paths are built by createDeepProxy when it proxies the chat instance with CHAT_PATH as base
'chat.sendMessage': { operation: 'chat' },
Expand Down
Loading
Loading