Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import * as Sentry from '@sentry/node';
import { loggingTransport } from '@sentry-internal/node-integration-tests';

Sentry.init({
dsn: 'https://public@dsn.ingest.sentry.io/1337',
release: '1.0',
tracesSampleRate: 1.0,
sendDefaultPii: true,
transport: loggingTransport,
integrations: [
Sentry.langChainIntegration({
enableTruncation: false,
recordInputs: true,
recordOutputs: true,
}),
],
beforeSendTransaction: event => {
// Filter out mock express server transactions
if (event.transaction.includes('/v1/messages') || event.transaction.includes('/v1/embeddings')) {
return null;
}
return event;
},
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { ChatAnthropic } from '@langchain/anthropic';
import * as Sentry from '@sentry/node';
import express from 'express';

function startMockAnthropicServer() {
const app = express();
app.use(express.json({ limit: '10mb' }));

app.post('/v1/messages', (req, res) => {
res.json({
id: 'msg_no_truncation_test',
type: 'message',
role: 'assistant',
content: [{ type: 'text', text: 'Response' }],
model: req.body.model,
stop_reason: 'end_turn',
stop_sequence: null,
usage: { input_tokens: 10, output_tokens: 5 },
});
});

return new Promise(resolve => {
const server = app.listen(0, () => {
resolve(server);
});
});
}

async function run() {
const server = await startMockAnthropicServer();
const baseUrl = `http://localhost:${server.address().port}`;

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const model = new ChatAnthropic({
model: 'claude-3-5-sonnet-20241022',
apiKey: 'mock-api-key',
clientOptions: {
baseURL: baseUrl,
},
});

// Long content that would normally be truncated
const longContent = 'A'.repeat(50_000);
await model.invoke([
{ role: 'user', content: longContent },
{ role: 'assistant', content: 'Some reply' },
{ role: 'user', content: 'Follow-up question' },
]);
});

await Sentry.flush(2000);

server.close();
}

run();
Original file line number Diff line number Diff line change
Expand Up @@ -549,4 +549,37 @@ describe('LangChain integration', () => {
.completed();
});
});

const longContent = 'A'.repeat(50_000);

const EXPECTED_TRANSACTION_NO_TRUNCATION = {
transaction: 'main',
spans: expect.arrayContaining([
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_INPUT_MESSAGES_ATTRIBUTE]: JSON.stringify([
{ role: 'user', content: longContent },
{ role: 'assistant', content: 'Some reply' },
{ role: 'user', content: 'Follow-up question' },
]),
[GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE]: 3,
}),
}),
]),
};

createEsmAndCjsTests(
__dirname,
'scenario-no-truncation.mjs',
'instrument-no-truncation.mjs',
(createRunner, test) => {
test('does not truncate input messages when enableTruncation is false', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_NO_TRUNCATION })
.start()
.completed();
});
},
);
});
6 changes: 6 additions & 0 deletions packages/core/.oxlintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,12 @@
"rules": {
"sdk/no-unsafe-random-apis": "off"
}
},
{
"files": ["src/tracing/langchain/utils.ts"],
"rules": {
"max-lines": "off"
}
}
],
"ignorePatterns": ["rollup.npm.config.mjs"]
Expand Down
3 changes: 3 additions & 0 deletions packages/core/src/tracing/langchain/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import {
*/
export function createLangChainCallbackHandler(options: LangChainOptions = {}): LangChainCallbackHandler {
const { recordInputs, recordOutputs } = resolveAIRecordingOptions(options);
const enableTruncation = options.enableTruncation ?? true;

// Internal state - single instance tracks all spans
const spanMap = new Map<string, Span>();
Expand Down Expand Up @@ -89,6 +90,7 @@ export function createLangChainCallbackHandler(options: LangChainOptions = {}):
llm as LangChainSerialized,
prompts,
recordInputs,
enableTruncation,
invocationParams,
metadata,
);
Expand Down Expand Up @@ -127,6 +129,7 @@ export function createLangChainCallbackHandler(options: LangChainOptions = {}):
llm as LangChainSerialized,
messages as LangChainMessage[][],
recordInputs,
enableTruncation,
invocationParams,
metadata,
);
Expand Down
6 changes: 6 additions & 0 deletions packages/core/src/tracing/langchain/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,12 @@ export interface LangChainOptions {
* @default false (respects sendDefaultPii option)
*/
recordOutputs?: boolean;

/**
* Enable or disable truncation of recorded input messages.
* Defaults to `true`.
*/
enableTruncation?: boolean;
}

/**
Expand Down
18 changes: 13 additions & 5 deletions packages/core/src/tracing/langchain/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ import {
GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE,
} from '../ai/gen-ai-attributes';
import { isContentMedia, stripInlineMediaFromSingleMessage } from '../ai/mediaStripping';
import { truncateGenAiMessages } from '../ai/messageTruncation';
import { extractSystemInstructions } from '../ai/utils';
import { extractSystemInstructions, getJsonString, getTruncatedJsonString } from '../ai/utils';
import { LANGCHAIN_ORIGIN, ROLE_MAP } from './constants';
import type { LangChainLLMResult, LangChainMessage, LangChainSerialized } from './types';

Expand Down Expand Up @@ -284,6 +283,7 @@ export function extractLLMRequestAttributes(
llm: LangChainSerialized,
prompts: string[],
recordInputs: boolean,
enableTruncation: boolean,
invocationParams?: Record<string, unknown>,
langSmithMetadata?: Record<string, unknown>,
): Record<string, SpanAttributeValue> {
Expand All @@ -295,7 +295,11 @@ export function extractLLMRequestAttributes(
if (recordInputs && Array.isArray(prompts) && prompts.length > 0) {
setIfDefined(attrs, GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE, prompts.length);
const messages = prompts.map(p => ({ role: 'user', content: p }));
setIfDefined(attrs, GEN_AI_INPUT_MESSAGES_ATTRIBUTE, asString(messages));
setIfDefined(
attrs,
GEN_AI_INPUT_MESSAGES_ATTRIBUTE,
enableTruncation ? getTruncatedJsonString(messages) : getJsonString(messages),
);
}

return attrs;
Expand All @@ -314,6 +318,7 @@ export function extractChatModelRequestAttributes(
llm: LangChainSerialized,
langChainMessages: LangChainMessage[][],
recordInputs: boolean,
enableTruncation: boolean,
invocationParams?: Record<string, unknown>,
langSmithMetadata?: Record<string, unknown>,
): Record<string, SpanAttributeValue> {
Expand All @@ -334,8 +339,11 @@ export function extractChatModelRequestAttributes(
const filteredLength = Array.isArray(filteredMessages) ? filteredMessages.length : 0;
setIfDefined(attrs, GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE, filteredLength);

const truncated = truncateGenAiMessages(filteredMessages as unknown[]);
setIfDefined(attrs, GEN_AI_INPUT_MESSAGES_ATTRIBUTE, asString(truncated));
setIfDefined(
attrs,
GEN_AI_INPUT_MESSAGES_ATTRIBUTE,
enableTruncation ? getTruncatedJsonString(filteredMessages) : getJsonString(filteredMessages),
);
}

return attrs;
Expand Down
2 changes: 1 addition & 1 deletion packages/core/test/lib/tracing/langchain-utils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ describe('extractChatModelRequestAttributes with multimodal content', () => {
],
];

const attrs = extractChatModelRequestAttributes(serialized, messages, true);
const attrs = extractChatModelRequestAttributes(serialized, messages, true, true);
const inputMessages = attrs[GEN_AI_INPUT_MESSAGES_ATTRIBUTE] as string | undefined;

expect(inputMessages).toBeDefined();
Expand Down
Loading