Skip to content
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import { ChatAnthropic } from '@langchain/anthropic';
import { HumanMessage, SystemMessage } from '@langchain/core/messages';
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import * as Sentry from '@sentry/node';
import express from 'express';

function startMockAnthropicServer() {
const app = express();
app.use(express.json());

app.post('/v1/messages', (req, res) => {
const model = req.body.model;

res.json({
id: 'msg_react_agent_123',
type: 'message',
role: 'assistant',
content: [
{
type: 'text',
text: 'Paris is the capital of France.',
},
],
model: model,
stop_reason: 'end_turn',
stop_sequence: null,
usage: {
input_tokens: 20,
output_tokens: 10,
},
});
});

return new Promise(resolve => {
const server = app.listen(0, () => {
resolve(server);
});
});
}

async function run() {
const server = await startMockAnthropicServer();
const baseUrl = `http://localhost:${server.address().port}`;

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const llm = new ChatAnthropic({
model: 'claude-3-5-sonnet-20241022',
apiKey: 'mock-api-key',
clientOptions: {
baseURL: baseUrl,
},
});

const agent = createReactAgent({ llm, tools: [], name: 'helpful_assistant' });

await agent.invoke({
messages: [new SystemMessage('You are a helpful assistant.'), new HumanMessage('What is the capital of France?')],
});
});

await Sentry.flush(2000);
server.close();
}

run();
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import { tool } from '@langchain/core/tools';
import { ChatAnthropic } from '@langchain/anthropic';
import { createReactAgent } from '@langchain/langgraph/prebuilt';
import { HumanMessage } from '@langchain/core/messages';
import * as Sentry from '@sentry/node';
import express from 'express';
import { z } from 'zod';

let callCount = 0;

function startMockAnthropicServer() {
const app = express();
app.use(express.json());

app.post('/v1/messages', (req, res) => {
callCount++;
const model = req.body.model;

if (callCount === 1) {
// First call: model decides to call the "add" tool
res.json({
id: 'msg_1',
type: 'message',
role: 'assistant',
content: [
{
type: 'tool_use',
id: 'toolu_add_1',
name: 'add',
input: { a: 3, b: 5 },
},
],
model: model,
stop_reason: 'tool_use',
usage: { input_tokens: 20, output_tokens: 10 },
});
} else if (callCount === 2) {
// Second call: model sees add result=8, calls "multiply"
res.json({
id: 'msg_2',
type: 'message',
role: 'assistant',
content: [
{
type: 'tool_use',
id: 'toolu_mul_1',
name: 'multiply',
input: { a: 8, b: 4 },
},
],
model: model,
stop_reason: 'tool_use',
usage: { input_tokens: 30, output_tokens: 10 },
});
} else {
// Third call: model returns final answer
res.json({
id: 'msg_3',
type: 'message',
role: 'assistant',
content: [{ type: 'text', text: 'The result is 32.' }],
model: model,
stop_reason: 'end_turn',
usage: { input_tokens: 40, output_tokens: 10 },
});
}
});

return new Promise(resolve => {
const server = app.listen(0, () => resolve(server));
});
}

async function run() {
const server = await startMockAnthropicServer();
const baseUrl = `http://localhost:${server.address().port}`;

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const llm = new ChatAnthropic({
model: 'claude-3-5-sonnet-20241022',
apiKey: 'mock-api-key',
clientOptions: { baseURL: baseUrl },
});

const addTool = tool(
async ({ a, b }) => {
return String(a + b);
},
{
name: 'add',
description: 'Add two numbers',
schema: z.object({ a: z.number(), b: z.number() }),
},
);

const multiplyTool = tool(
async ({ a, b }) => {
return String(a * b);
},
{
name: 'multiply',
description: 'Multiply two numbers',
schema: z.object({ a: z.number(), b: z.number() }),
},
);

const agent = createReactAgent({
llm,
tools: [addTool, multiplyTool],
name: 'math_assistant',
});

await agent.invoke({
messages: [new HumanMessage('Calculate (3 + 5) * 4')],
});
});

await Sentry.flush(2000);
server.close();
}

run();
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import * as Sentry from '@sentry/node';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
dsn: 'https://public@dsn.ingest.sentry.io/1337',
release: '1.0',
tracesSampleRate: 1.0,
sendDefaultPii: true,
transport: loggingTransport,
beforeSendTransaction: event => {
// Filter out mock express server transactions
if (event.transaction && event.transaction.includes('/v1/messages')) {
return null;
}
return event;
},
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { ChatAnthropic } from '@langchain/anthropic';
import { END, MessagesAnnotation, START, StateGraph } from '@langchain/langgraph';
import * as Sentry from '@sentry/node';
import express from 'express';

function startMockAnthropicServer() {
const app = express();
app.use(express.json());

app.post('/v1/messages', (req, res) => {
res.json({
id: 'msg_stategraph_chat_1',
type: 'message',
role: 'assistant',
content: [{ type: 'text', text: 'Hello from mock.' }],
model: req.body.model,
stop_reason: 'end_turn',
usage: { input_tokens: 5, output_tokens: 3 },
});
});

return new Promise(resolve => {
const server = app.listen(0, () => resolve(server));
});
}

async function run() {
const server = await startMockAnthropicServer();
const baseUrl = `http://localhost:${server.address().port}`;

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const llm = new ChatAnthropic({
model: 'claude-3-5-sonnet-20241022',
apiKey: 'mock-api-key',
clientOptions: { baseURL: baseUrl },
});

const callLlm = async state => {
const response = await llm.invoke(state.messages);
return { messages: [response] };
};

const graph = new StateGraph(MessagesAnnotation)
.addNode('agent', callLlm)
.addEdge(START, 'agent')
.addEdge('agent', END)
.compile({ name: 'plain_assistant' });

await graph.invoke({ messages: [{ role: 'user', content: 'Hi.' }] });
});

await Sentry.flush(2000);
server.close();
}

run();
108 changes: 108 additions & 0 deletions dev-packages/node-integration-tests/suites/tracing/langgraph/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
GEN_AI_RESPONSE_TEXT_ATTRIBUTE,
GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE,
GEN_AI_SYSTEM_INSTRUCTIONS_ATTRIBUTE,
GEN_AI_TOOL_NAME_ATTRIBUTE,
GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE,
GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE,
GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE,
Expand Down Expand Up @@ -445,4 +446,111 @@ describe('LangGraph integration', () => {
});
},
);

// createReactAgent tests
const EXPECTED_TRANSACTION_REACT_AGENT = {
transaction: 'main',
spans: [
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'invoke_agent',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.langgraph',
[GEN_AI_AGENT_NAME_ATTRIBUTE]: 'helpful_assistant',
[GEN_AI_PIPELINE_NAME_ATTRIBUTE]: 'helpful_assistant',
}),
description: 'invoke_agent helpful_assistant',
op: 'gen_ai.invoke_agent',
origin: 'auto.ai.langgraph',
status: 'ok',
}),
expect.objectContaining({ op: 'http.client' }),
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_AGENT_NAME_ATTRIBUTE]: 'helpful_assistant',
}),
op: 'gen_ai.chat',
}),
],
};

createEsmAndCjsTests(__dirname, 'agent-scenario.mjs', 'instrument-agent.mjs', (createRunner, test) => {
test('should instrument createReactAgent with agent and chat spans', { timeout: 30000 }, async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_REACT_AGENT })
.start()
.completed();
});
});

// createReactAgent with tools - verifies tool execution spans
const EXPECTED_TRANSACTION_REACT_AGENT_TOOLS = {
transaction: 'main',
spans: [
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'invoke_agent',
[GEN_AI_AGENT_NAME_ATTRIBUTE]: 'math_assistant',
}),
op: 'gen_ai.invoke_agent',
status: 'ok',
}),
expect.objectContaining({ op: 'http.client' }),
expect.objectContaining({ op: 'gen_ai.chat' }),
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'execute_tool',
[GEN_AI_TOOL_NAME_ATTRIBUTE]: 'add',
'gen_ai.tool.type': 'function',
}),
description: 'execute_tool add',
op: 'gen_ai.execute_tool',
status: 'ok',
}),
expect.objectContaining({ op: 'http.client' }),
expect.objectContaining({ op: 'gen_ai.chat' }),
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'execute_tool',
[GEN_AI_TOOL_NAME_ATTRIBUTE]: 'multiply',
'gen_ai.tool.type': 'function',
}),
description: 'execute_tool multiply',
op: 'gen_ai.execute_tool',
status: 'ok',
}),
expect.objectContaining({ op: 'http.client' }),
expect.objectContaining({ op: 'gen_ai.chat' }),
],
};

createEsmAndCjsTests(__dirname, 'agent-tools-scenario.mjs', 'instrument-agent.mjs', (createRunner, test) => {
test('should create tool execution spans for createReactAgent with tools', { timeout: 30000 }, async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_REACT_AGENT_TOOLS })
.start()
.completed();
});
});
Comment thread
cursor[bot] marked this conversation as resolved.

createEsmAndCjsTests(__dirname, 'scenario-stategraph-chat.mjs', 'instrument-agent.mjs', (createRunner, test) => {
test('auto-injects langchain handler for plain StateGraph and emits chat spans', { timeout: 30000 }, async () => {
await createRunner()
.ignore('event')
.expect({
transaction: event => {
const spans = event.spans ?? [];
const chatSpans = spans.filter(s => s.op === 'gen_ai.chat');
expect(chatSpans).toHaveLength(1);
expect(chatSpans[0]?.data).toMatchObject({
[GEN_AI_AGENT_NAME_ATTRIBUTE]: 'plain_assistant',
});
},
})
.start()
.completed();
});
});
});
1 change: 1 addition & 0 deletions packages/browser/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ export {
instrumentOpenAiClient,
instrumentGoogleGenAIClient,
instrumentLangGraph,
instrumentCreateReactAgent,
createLangChainCallbackHandler,
instrumentLangChainEmbeddings,
logger,
Expand Down
1 change: 1 addition & 0 deletions packages/cloudflare/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ export {
withStreamedSpan,
spanStreamingIntegration,
instrumentLangGraph,
instrumentCreateReactAgent,
} from '@sentry/core';

export { withSentry } from './withSentry';
Expand Down
Loading
Loading