diff --git a/packages/datadog-instrumentations/src/anthropic.js b/packages/datadog-instrumentations/src/anthropic.js index b19dd538564..d03a0c983dd 100644 --- a/packages/datadog-instrumentations/src/anthropic.js +++ b/packages/datadog-instrumentations/src/anthropic.js @@ -41,7 +41,7 @@ function wrapCreate (create) { const options = arguments[0] const stream = options.stream - const ctx = { options, resource: 'create' } + const ctx = { options, resource: 'create', baseUrl: this._client?.baseURL } return anthropicTracingChannel.start.runStores(ctx, () => { let apiPromise diff --git a/packages/dd-trace/src/llmobs/constants/tags.js b/packages/dd-trace/src/llmobs/constants/tags.js index 2bf1ab08707..67558487eb7 100644 --- a/packages/dd-trace/src/llmobs/constants/tags.js +++ b/packages/dd-trace/src/llmobs/constants/tags.js @@ -23,6 +23,7 @@ module.exports = { MODEL_NAME: '_ml_obs.meta.model_name', MODEL_PROVIDER: '_ml_obs.meta.model_provider', + UNKNOWN_MODEL_PROVIDER: 'unknown', INPUT_DOCUMENTS: '_ml_obs.meta.input.documents', INPUT_MESSAGES: '_ml_obs.meta.input.messages', diff --git a/packages/dd-trace/src/llmobs/plugins/anthropic.js b/packages/dd-trace/src/llmobs/plugins/anthropic.js index 6089ac5c3a8..de4f731adaf 100644 --- a/packages/dd-trace/src/llmobs/plugins/anthropic.js +++ b/packages/dd-trace/src/llmobs/plugins/anthropic.js @@ -1,5 +1,6 @@ 'use strict' +const { UNKNOWN_MODEL_PROVIDER } = require('../constants/tags') const LLMObsPlugin = require('./base') const ALLOWED_METADATA_KEYS = new Set([ @@ -108,16 +109,24 @@ class AnthropicLLMObsPlugin extends LLMObsPlugin { } getLLMObsSpanRegisterOptions (ctx) { - const { options } = ctx + const { options, baseUrl } = ctx const { model } = options + const modelProvider = this._getModelProvider(baseUrl) return { kind: 'llm', modelName: model, - modelProvider: 'anthropic', + modelProvider, } } + _getModelProvider (baseUrl = '') { + if (baseUrl.includes('anthropic')) { + return 'anthropic' + } + return UNKNOWN_MODEL_PROVIDER + } + setLLMObsTags (ctx) { const span = ctx.currentStore?.span if (!span) return diff --git a/packages/dd-trace/src/llmobs/plugins/openai/index.js b/packages/dd-trace/src/llmobs/plugins/openai/index.js index f62e4da822c..fd0f4bf6e4b 100644 --- a/packages/dd-trace/src/llmobs/plugins/openai/index.js +++ b/packages/dd-trace/src/llmobs/plugins/openai/index.js @@ -5,6 +5,7 @@ const { PROMPT_TRACKING_INSTRUMENTATION_METHOD, PROMPT_MULTIMODAL, INSTRUMENTATION_METHOD_AUTO, + UNKNOWN_MODEL_PROVIDER, } = require('../../constants/tags') const { extractChatTemplateFromInstructions, @@ -90,10 +91,12 @@ class OpenAiLLMObsPlugin extends LLMObsPlugin { _getModelProviderAndClient (baseUrl = '') { if (baseUrl.includes('azure')) { return { modelProvider: 'azure_openai', client: 'AzureOpenAI' } + } else if (baseUrl.includes('openai')) { + return { modelProvider: 'openai', client: 'OpenAI' } } else if (baseUrl.includes('deepseek')) { return { modelProvider: 'deepseek', client: 'DeepSeek' } } - return { modelProvider: 'openai', client: 'OpenAI' } + return { modelProvider: UNKNOWN_MODEL_PROVIDER, client: 'OpenAI' } } _extractMetrics (response) { diff --git a/packages/dd-trace/test/llmobs/plugins/anthropic/index.spec.js b/packages/dd-trace/test/llmobs/plugins/anthropic/index.spec.js index d693375d3e3..6028e3bf39d 100644 --- a/packages/dd-trace/test/llmobs/plugins/anthropic/index.spec.js +++ b/packages/dd-trace/test/llmobs/plugins/anthropic/index.spec.js @@ -69,6 +69,26 @@ describe('Plugin', () => { assertLLMObsSpan(apmSpans, llmobsSpans) }) + it('sets model_provider to unknown for unrecognized base URLs', async () => { + const { Anthropic } = require(`../../../../../../versions/@anthropic-ai/sdk@${version}`).get() + const customClient = new Anthropic({ baseURL: 'http://localhost:8000' }) + + try { + await customClient.messages.create({ + model: 'claude-3-7-sonnet-20250219', + messages: [{ role: 'user', content: 'Hello, world!' }], + max_tokens: 100, + temperature: 0.5, + }) + } catch { + // expected error — no server is running + } + + const { llmobsSpans } = await getEvents() + + assert.equal(llmobsSpans[0].meta.model_provider, 'unknown', 'Model provider does not match') + }) + describe('stream', () => { it('creates a span', async () => { const stream = await client.messages.create({ diff --git a/packages/dd-trace/test/llmobs/plugins/openai/openaiv4.spec.js b/packages/dd-trace/test/llmobs/plugins/openai/openaiv4.spec.js index 25e8caa6e0a..a21831ff5e7 100644 --- a/packages/dd-trace/test/llmobs/plugins/openai/openaiv4.spec.js +++ b/packages/dd-trace/test/llmobs/plugins/openai/openaiv4.spec.js @@ -575,6 +575,31 @@ describe('integrations', () => { assert.equal(llmobsSpans[0].meta.model_provider, 'deepseek', 'Model provider does not match') }) + it('sets model_provider to unknown for unrecognized base URLs', async () => { + const OpenAI = require(moduleRequirePath).get() + const customClient = new OpenAI({ + apiKey: 'test', + baseURL: 'http://localhost:8000', + }) + + try { + await customClient.chat.completions.create({ + model: 'gpt-3.5-turbo', + messages: [ + { role: 'user', content: 'Hello, OpenAI!' }, + ], + temperature: 0.5, + max_tokens: 100, + }) + } catch { + // expected error — no server is running + } + + const { llmobsSpans } = await getEvents() + + assert.equal(llmobsSpans[0].meta.model_provider, 'unknown', 'Model provider does not match') + }) + it('submits a chat completion span with cached token metrics', async () => { const baseMessages = [{ role: 'system', content: 'You are an expert software engineer '.repeat(200) }]