Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/datadog-instrumentations/src/anthropic.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ function wrapCreate (create) {
const options = arguments[0]
const stream = options.stream

const ctx = { options, resource: 'create' }
const ctx = { options, resource: 'create', baseUrl: this._client?.baseURL }

return anthropicTracingChannel.start.runStores(ctx, () => {
let apiPromise
Expand Down
1 change: 1 addition & 0 deletions packages/dd-trace/src/llmobs/constants/tags.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ module.exports = {

MODEL_NAME: '_ml_obs.meta.model_name',
MODEL_PROVIDER: '_ml_obs.meta.model_provider',
UNKNOWN_MODEL_PROVIDER: 'unknown',

INPUT_DOCUMENTS: '_ml_obs.meta.input.documents',
INPUT_MESSAGES: '_ml_obs.meta.input.messages',
Expand Down
13 changes: 11 additions & 2 deletions packages/dd-trace/src/llmobs/plugins/anthropic.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
'use strict'

const { UNKNOWN_MODEL_PROVIDER } = require('../constants/tags')
const LLMObsPlugin = require('./base')

const ALLOWED_METADATA_KEYS = new Set([
Expand Down Expand Up @@ -108,16 +109,24 @@ class AnthropicLLMObsPlugin extends LLMObsPlugin {
}

getLLMObsSpanRegisterOptions (ctx) {
const { options } = ctx
const { options, baseUrl } = ctx
const { model } = options
const modelProvider = this._getModelProvider(baseUrl)

return {
kind: 'llm',
modelName: model,
modelProvider: 'anthropic',
modelProvider,
}
}

_getModelProvider (baseUrl = '') {
if (baseUrl.includes('anthropic')) {
return 'anthropic'
}
return UNKNOWN_MODEL_PROVIDER
}

setLLMObsTags (ctx) {
const span = ctx.currentStore?.span
if (!span) return
Expand Down
5 changes: 4 additions & 1 deletion packages/dd-trace/src/llmobs/plugins/openai/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ const {
PROMPT_TRACKING_INSTRUMENTATION_METHOD,
PROMPT_MULTIMODAL,
INSTRUMENTATION_METHOD_AUTO,
UNKNOWN_MODEL_PROVIDER,
} = require('../../constants/tags')
const {
extractChatTemplateFromInstructions,
Expand Down Expand Up @@ -90,10 +91,12 @@ class OpenAiLLMObsPlugin extends LLMObsPlugin {
_getModelProviderAndClient (baseUrl = '') {
if (baseUrl.includes('azure')) {
return { modelProvider: 'azure_openai', client: 'AzureOpenAI' }
} else if (baseUrl.includes('openai')) {
return { modelProvider: 'openai', client: 'OpenAI' }
} else if (baseUrl.includes('deepseek')) {
return { modelProvider: 'deepseek', client: 'DeepSeek' }
}
return { modelProvider: 'openai', client: 'OpenAI' }
return { modelProvider: UNKNOWN_MODEL_PROVIDER, client: 'OpenAI' }
}

_extractMetrics (response) {
Expand Down
20 changes: 20 additions & 0 deletions packages/dd-trace/test/llmobs/plugins/anthropic/index.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,26 @@ describe('Plugin', () => {
assertLLMObsSpan(apmSpans, llmobsSpans)
})

it('sets model_provider to unknown for unrecognized base URLs', async () => {
const { Anthropic } = require(`../../../../../../versions/@anthropic-ai/sdk@${version}`).get()
const customClient = new Anthropic({ baseURL: 'http://localhost:8000' })

try {
await customClient.messages.create({
model: 'claude-3-7-sonnet-20250219',
messages: [{ role: 'user', content: 'Hello, world!' }],
max_tokens: 100,
temperature: 0.5,
})
} catch {
// expected error — no server is running
}

const { llmobsSpans } = await getEvents()

assert.equal(llmobsSpans[0].meta.model_provider, 'unknown', 'Model provider does not match')
})

describe('stream', () => {
it('creates a span', async () => {
const stream = await client.messages.create({
Expand Down
25 changes: 25 additions & 0 deletions packages/dd-trace/test/llmobs/plugins/openai/openaiv4.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -575,6 +575,31 @@ describe('integrations', () => {
assert.equal(llmobsSpans[0].meta.model_provider, 'deepseek', 'Model provider does not match')
})

it('sets model_provider to unknown for unrecognized base URLs', async () => {
const OpenAI = require(moduleRequirePath).get()
const customClient = new OpenAI({
apiKey: 'test',
baseURL: 'http://localhost:8000',
})

try {
await customClient.chat.completions.create({
model: 'gpt-3.5-turbo',
messages: [
{ role: 'user', content: 'Hello, OpenAI!' },
],
temperature: 0.5,
max_tokens: 100,
})
} catch {
// expected error — no server is running
}

const { llmobsSpans } = await getEvents()

assert.equal(llmobsSpans[0].meta.model_provider, 'unknown', 'Model provider does not match')
})

it('submits a chat completion span with cached token metrics', async () => {
const baseMessages = [{ role: 'system', content: 'You are an expert software engineer '.repeat(200) }]

Expand Down
Loading