From bf6cdaf1cf7064a9b8c847cbdf72d93e5db79369 Mon Sep 17 00:00:00 2001 From: Ruben Bridgewater Date: Thu, 2 Apr 2026 01:40:01 +0200 Subject: [PATCH] refactor(config): generate runtime config from supported metadata Config model: Build the tracer runtime config from `supported-configurations.json`. The metadata now defines defaults, aliases, transforms, allowed values, and internal property mapping. Env vars, stable config, code options, remote config, and telemetry now go through the same parsing and normalization path instead of separate logic. Remote config and fallback: Normalize remote config into the same local option names before apply. When a remote value is removed, restore the earlier code value or the default for that one option, instead of keeping an old remote value. This also keeps config source tracking correct when remote config changes over time. Telemetry: Report the same normalized config view that the tracer really uses. Telemetry now keeps the source of each value, sends standard config names such as `DD_*` and `OTEL_*`, and serializes URLs, arrays, objects, functions, and rule sets in one consistent format for config change events and extended heartbeats. DD and OTEL precedence: Use one precedence model for Datadog and OpenTelemetry settings. Datadog config still wins when both are set. Generic `OTEL_EXPORTER_OTLP_*` endpoint, header, protocol, and timeout values now fill in logs and metrics config when the specific setting is not set. `b3` extraction keeps Datadog multi-header behavior when it comes from `DD_TRACE_PROPAGATION_STYLE`, and keeps single-header behavior when it comes from `OTEL_PROPAGATORS`. Config-specific updates: Parse and validate more individual settings through shared parsers. This now covers sample rates, propagation styles and behavior, `OTEL_RESOURCE_ATTRIBUTES`, `DD_TRACE_HEADER_TAGS` spacing, JSON sampling rules, AppSec blocked template file paths, and `DD_GRPC_CLIENT_ERROR_STATUSES` / `DD_GRPC_SERVER_ERROR_STATUSES` range values. Invalid values now warn the same way and fall back more predictably. Calculated behavior: Recalculate derived settings from the remembered source of each value. This keeps service and tag inference, socket and DogStatsD defaults, OTEL logs vs log injection, runtime metrics vs `OTEL_METRICS_EXPORTER=none`, AppSec-driven resource renaming, CI and serverless toggles, Lambda flush behavior, and agentless tracing overrides consistent when inputs change. Runtime consumers: Move profiling, logging, agent URL selection, session propagation, and other runtime code to read the resolved config instead of reparsing env vars. This also keeps proxy startup working when tracing is off but dynamic instrumentation or AppSec standalone still need runtime hooks, and makes profiling exporter and profiler settings follow the same config resolution path. --- benchmark/sirun/exporting-pipeline/index.js | 2 +- benchmark/sirun/statsd.js | 2 +- integration-tests/init.spec.js | 4 +- integration-tests/opentelemetry.spec.js | 145 +- integration-tests/package-guardrails.spec.js | 2 +- integration-tests/telemetry.spec.js | 8 +- packages/datadog-plugin-aws-sdk/src/base.js | 7 +- .../datadog-plugin-grpc/test/client.spec.js | 2 +- .../datadog-plugin-grpc/test/server.spec.js | 2 +- packages/dd-trace/src/agent/url.js | 4 +- .../evidence-redaction/sensitive-handler.js | 2 +- .../iast/vulnerabilities-formatter/utils.js | 2 +- packages/dd-trace/src/appsec/remote_config.js | 1 + packages/dd-trace/src/config/defaults.js | 448 ++-- packages/dd-trace/src/config/helper.js | 69 +- packages/dd-trace/src/config/index.js | 2069 +++++------------ packages/dd-trace/src/config/parsers.js | 254 ++ packages/dd-trace/src/config/remote_config.js | 61 +- .../src/config/supported-configurations.json | 771 +++--- packages/dd-trace/src/dogstatsd.js | 2 +- packages/dd-trace/src/index.js | 7 +- packages/dd-trace/src/llmobs/sdk.js | 18 +- packages/dd-trace/src/log/index.js | 81 +- packages/dd-trace/src/log/writer.js | 26 +- .../src/opentracing/propagation/text_map.js | 13 +- packages/dd-trace/src/profiling/config.js | 314 +-- .../dd-trace/src/profiling/exporter_cli.js | 5 +- .../profiling/exporters/event_serializer.js | 2 +- packages/dd-trace/src/profiling/profiler.js | 55 +- .../src/profiling/profilers/events.js | 5 +- packages/dd-trace/src/telemetry/index.js | 5 +- packages/dd-trace/src/telemetry/send-data.js | 24 +- .../src/telemetry/session-propagation.js | 63 +- packages/dd-trace/src/telemetry/telemetry.js | 197 +- packages/dd-trace/src/util.js | 9 - packages/dd-trace/test/agent/info.spec.js | 6 +- packages/dd-trace/test/agent/url.spec.js | 10 +- .../sensitive-handler.spec.js | 2 +- .../exporters/agent-proxy/agent-proxy.spec.js | 4 +- .../exporters/ci-visibility-exporter.spec.js | 4 +- .../config/generated-config-types.spec.js | 3 +- packages/dd-trace/test/config/helper.spec.js | 19 + packages/dd-trace/test/config/index.spec.js | 1250 ++++++---- .../test/config/remote_config.spec.js | 8 +- packages/dd-trace/test/dogstatsd.spec.js | 4 +- packages/dd-trace/test/helpers/config.js | 10 +- .../dd-trace/test/llmobs/sdk/index.spec.js | 4 +- packages/dd-trace/test/log.spec.js | 312 ++- .../test/opentelemetry/metrics.spec.js | 70 +- .../opentracing/propagation/text_map.spec.js | 78 +- .../dd-trace/test/profiling/config.spec.js | 592 +++-- .../dd-trace/test/profiling/profiler.spec.js | 34 +- .../test/profiling/profilers/events.spec.js | 12 +- packages/dd-trace/test/proxy.spec.js | 13 +- .../dd-trace/test/telemetry/index.spec.js | 128 +- .../dd-trace/test/telemetry/send-data.spec.js | 67 +- .../telemetry/session-propagation.spec.js | 320 +-- 57 files changed, 3574 insertions(+), 4057 deletions(-) create mode 100644 packages/dd-trace/src/config/parsers.js diff --git a/benchmark/sirun/exporting-pipeline/index.js b/benchmark/sirun/exporting-pipeline/index.js index b8588c62973..f3395667f00 100644 --- a/benchmark/sirun/exporting-pipeline/index.js +++ b/benchmark/sirun/exporting-pipeline/index.js @@ -7,7 +7,7 @@ const SpanProcessor = require('../../../packages/dd-trace/src/span_processor') const Exporter = require('../../../packages/dd-trace/src/exporters/agent/index') const PrioritySampler = require('../../../packages/dd-trace/src/priority_sampler') const id = require('../../../packages/dd-trace/src/id') -const defaults = require('../../../packages/dd-trace/src/config/defaults') +const { defaults } = require('../../../packages/dd-trace/src/config/defaults') const config = { url: `http://${defaults.hostname}:${defaults.port}`, diff --git a/benchmark/sirun/statsd.js b/benchmark/sirun/statsd.js index 462889874f1..dc71e6d71a3 100644 --- a/benchmark/sirun/statsd.js +++ b/benchmark/sirun/statsd.js @@ -1,7 +1,7 @@ 'use strict' const dgram = require('dgram') -const defaults = require('../../packages/dd-trace/src/config/defaults') +const { defaults } = require('../../packages/dd-trace/src/config/defaults') const port = process.env.SIRUN_STATSD_PORT || defaults['dogstatsd.port'] class StatsD { diff --git a/integration-tests/init.spec.js b/integration-tests/init.spec.js index 30674a338d8..79340120571 100644 --- a/integration-tests/init.spec.js +++ b/integration-tests/init.spec.js @@ -273,7 +273,9 @@ describe('init.js', () => { // or on 18.0.0 in particular. if (semver.satisfies(process.versions.node, '>=14.13.1')) { describe('initialize.mjs', () => { - setShouldKill(false) + // Node 20.0.0 can leave short-lived loader-based children alive after they + // print the expected output, so terminate them after a short grace period. + setShouldKill(process.versions.node === '20.0.0') useSandbox() stubTracerIfNeeded() diff --git a/integration-tests/opentelemetry.spec.js b/integration-tests/opentelemetry.spec.js index 2e4e93e1f84..b0feff6328e 100644 --- a/integration-tests/opentelemetry.spec.js +++ b/integration-tests/opentelemetry.spec.js @@ -50,10 +50,12 @@ function nearNow (ts, now = Date.now(), range = 1000) { return delta < range && delta >= 0 } -describe('opentelemetry', () => { - let agent +describe('opentelemetry', function () { + this.timeout(20000) + + let agent = /** @type {FakeAgent | null} */ (null) let proc - let cwd + let cwd = /** @type {string} */ ('') const timeout = 5000 const dependencies = [ '@opentelemetry/api@1.8.0', @@ -75,14 +77,14 @@ describe('opentelemetry', () => { after(async () => { await stopProc(proc) - await agent.stop() + await agent?.stop() }) it("should not capture telemetry DD and OTEL vars don't conflict", async () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', TIMEOUT: '1500', @@ -114,7 +116,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', TIMEOUT: '1500', @@ -147,42 +149,20 @@ describe('opentelemetry', () => { const otelHiding = metrics.series.filter(({ metric }) => metric === 'otel.env.hiding') const otelInvalid = metrics.series.filter(({ metric }) => metric === 'otel.env.invalid') - assert.strictEqual(otelHiding.length, 9) - assert.strictEqual(otelInvalid.length, 0) - - assert.deepStrictEqual(otelHiding[0].tags, [ - 'config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level', - ]) - assert.deepStrictEqual(otelHiding[1].tags, [ - 'config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators', - ]) - assert.deepStrictEqual(otelHiding[2].tags, [ - 'config_datadog:dd_service', 'config_opentelemetry:otel_service_name', - ]) - - assert.deepStrictEqual(otelHiding[3].tags, [ - 'config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler', - ]) - - assert.deepStrictEqual(otelHiding[4].tags, [ - 'config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg', - ]) - - assert.deepStrictEqual(otelHiding[5].tags, [ - 'config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter', - ]) - - assert.deepStrictEqual(otelHiding[6].tags, [ - 'config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter', - ]) - assert.deepStrictEqual(otelHiding[7].tags, [ - 'config_datadog:dd_tags', 'config_opentelemetry:otel_resource_attributes', - ]) + assert.deepStrictEqual(sortMetricTags(otelHiding), sortMetricTags([ + ['config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level'], + ['config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators'], + ['config_datadog:dd_service', 'config_opentelemetry:otel_service_name'], + ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler'], + ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg'], + ['config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter'], + ['config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter'], + ['config_datadog:dd_tags', 'config_opentelemetry:otel_resource_attributes'], + ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'], + ])) - assert.deepStrictEqual(otelHiding[8].tags, [ - 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled', - ]) + assert.deepStrictEqual(sortMetricTags(otelInvalid), []) for (const metric of otelHiding) { assert.strictEqual(metric.points[0][1], 1) @@ -194,7 +174,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', TIMEOUT: '1500', @@ -221,47 +201,20 @@ describe('opentelemetry', () => { const otelHiding = metrics.series.filter(({ metric }) => metric === 'otel.env.hiding') const otelInvalid = metrics.series.filter(({ metric }) => metric === 'otel.env.invalid') - assert.strictEqual(otelHiding.length, 1) - assert.strictEqual(otelInvalid.length, 8) - - assert.deepStrictEqual(otelHiding[0].tags, [ - 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled', - ]) - - assert.deepStrictEqual(otelInvalid[0].tags, [ - 'config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level', - ]) - - assert.deepStrictEqual(otelInvalid[1].tags, [ - 'config_datadog:dd_trace_sample_rate', - 'config_opentelemetry:otel_traces_sampler', - ]) - - assert.deepStrictEqual(otelInvalid[2].tags, [ - 'config_datadog:dd_trace_sample_rate', - 'config_opentelemetry:otel_traces_sampler_arg', - ]) - assert.deepStrictEqual(otelInvalid[3].tags, [ - 'config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter', - ]) - - assert.deepStrictEqual(otelInvalid[4].tags, [ - 'config_datadog:dd_runtime_metrics_enabled', - 'config_opentelemetry:otel_metrics_exporter', - ]) - - assert.deepStrictEqual(otelInvalid[5].tags, [ - 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled', - ]) - - assert.deepStrictEqual(otelInvalid[6].tags, [ - 'config_opentelemetry:otel_logs_exporter', - ]) - - assert.deepStrictEqual(otelInvalid[7].tags, [ - 'config_datadog:dd_trace_propagation_style', - 'config_opentelemetry:otel_propagators', - ]) + assert.deepStrictEqual(sortMetricTags(otelHiding), sortMetricTags([ + ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'], + ])) + + assert.deepStrictEqual(sortMetricTags(otelInvalid), sortMetricTags([ + ['config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level'], + ['config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators'], + ['config_opentelemetry:otel_logs_exporter'], + ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler'], + ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg'], + ['config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter'], + ['config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter'], + ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'], + ])) for (const metric of otelInvalid) { assert.strictEqual(metric.points[0][1], 1) @@ -273,7 +226,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, }, }) await check(agent, proc, timeout, ({ payload }) => { @@ -292,7 +245,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', TIMEOUT: '1500', @@ -334,7 +287,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/auto-instrumentation.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', SERVER_PORT, DD_TRACE_DISABLED_INSTRUMENTATIONS: 'http,dns,express,net', @@ -378,7 +331,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/server.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, }, }) await check(agent, proc, timeout, ({ payload }) => { @@ -407,7 +360,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/auto-instrumentation.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', SERVER_PORT, DD_TRACE_DISABLED_INSTRUMENTATIONS: 'http,dns,express,net', @@ -456,18 +409,12 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/env-var.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, }, }) await check(agent, proc, timeout, ({ payload }) => { - // Should have a single trace with a single span - assert.strictEqual(payload.length, 1) - const [trace] = payload - assert.strictEqual(trace.length, 1) - const [span] = trace - - // Should be the expected otel span - assert.strictEqual(span.name, 'otel-sub') + const trace = payload.find(trace => trace.length === 1 && trace[0].name === 'otel-sub') + assert.ok(trace) }) }) }) @@ -477,3 +424,9 @@ function isChildOf (childSpan, parentSpan) { assert.notStrictEqual(childSpan.span_id.toString(), parentSpan.span_id.toString()) assert.strictEqual(childSpan.parent_id.toString(), parentSpan.span_id.toString()) } + +function sortMetricTags (metrics) { + return metrics + .map(metric => Array.isArray(metric) ? metric : metric.tags) + .sort((a, b) => a.join(',').localeCompare(b.join(','))) +} diff --git a/integration-tests/package-guardrails.spec.js b/integration-tests/package-guardrails.spec.js index 7b8ec191930..db2db8b621e 100644 --- a/integration-tests/package-guardrails.spec.js +++ b/integration-tests/package-guardrails.spec.js @@ -13,7 +13,7 @@ const { const NODE_OPTIONS = '--require dd-trace/init.js' const DD_TRACE_DEBUG = 'true' const DD_INJECTION_ENABLED = 'tracing' -const DD_LOG_LEVEL = 'error' +const DD_LOG_LEVEL = 'info' const NODE_MAJOR = Number(process.versions.node.split('.')[0]) const FASTIFY_DEP = NODE_MAJOR < 20 ? 'fastify@4' : 'fastify' diff --git a/integration-tests/telemetry.spec.js b/integration-tests/telemetry.spec.js index 462b36bc777..617d45c3486 100644 --- a/integration-tests/telemetry.spec.js +++ b/integration-tests/telemetry.spec.js @@ -26,7 +26,7 @@ describe('telemetry', () => { proc = await spawnProc(startupTestFile, { cwd, env: { - AGENT_PORT: agent.port, + AGENT_PORT: String(agent.port), DD_LOGS_INJECTION: 'true', }, }) @@ -66,9 +66,9 @@ describe('telemetry', () => { await agent.assertTelemetryReceived(msg => { const { configuration } = msg.payload.payload assertObjectContains(configuration, [ - { name: 'DD_LOG_INJECTION', value: true, origin: 'default' }, - { name: 'DD_LOG_INJECTION', value: true, origin: 'env_var' }, - { name: 'DD_LOG_INJECTION', value: false, origin: 'code' }, + { name: 'DD_LOGS_INJECTION', value: true, origin: 'default' }, + { name: 'DD_LOGS_INJECTION', value: true, origin: 'env_var' }, + { name: 'DD_LOGS_INJECTION', value: false, origin: 'code' }, ]) }, 'app-started', 5_000, 1) }) diff --git a/packages/datadog-plugin-aws-sdk/src/base.js b/packages/datadog-plugin-aws-sdk/src/base.js index da8e40a56f1..b349ba99a80 100644 --- a/packages/datadog-plugin-aws-sdk/src/base.js +++ b/packages/datadog-plugin-aws-sdk/src/base.js @@ -23,12 +23,13 @@ class BaseAwsSdkPlugin extends ClientPlugin { return id } + /** @type {import('../../dd-trace/src/config/config-types').ConfigProperties['cloudPayloadTagging']} */ get cloudTaggingConfig () { return this._tracerConfig.cloudPayloadTagging } get payloadTaggingRules () { - return this.cloudTaggingConfig.rules.aws?.[this.constructor.id] + return this.cloudTaggingConfig.rules?.aws?.[this.constructor.id] } constructor (...args) { @@ -78,7 +79,7 @@ class BaseAwsSdkPlugin extends ClientPlugin { this.requestInject(span, request) }) - if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.requestsEnabled) { + if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.request) { const maxDepth = this.cloudTaggingConfig.maxDepth const requestTags = tagsFromRequest(this.payloadTaggingRules, request.params, { maxDepth }) span.addTags(requestTags) @@ -215,7 +216,7 @@ class BaseAwsSdkPlugin extends ClientPlugin { span.addTags(tags) - if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.responsesEnabled) { + if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.response) { const maxDepth = this.cloudTaggingConfig.maxDepth const responseBody = this.extractResponseBody(response) const responseTags = tagsFromResponse(this.payloadTaggingRules, responseBody, { maxDepth }) diff --git a/packages/datadog-plugin-grpc/test/client.spec.js b/packages/datadog-plugin-grpc/test/client.spec.js index 6c1eef00667..bad1443d9f4 100644 --- a/packages/datadog-plugin-grpc/test/client.spec.js +++ b/packages/datadog-plugin-grpc/test/client.spec.js @@ -12,7 +12,7 @@ const loader = require('../../../versions/@grpc/proto-loader').get() const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const agent = require('../../dd-trace/test/plugins/agent') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') -const defaults = require('../../dd-trace/src/config/defaults') +const { defaults } = require('../../dd-trace/src/config/defaults') const { NODE_MAJOR } = require('../../../version') const getService = require('./service') diff --git a/packages/datadog-plugin-grpc/test/server.spec.js b/packages/datadog-plugin-grpc/test/server.spec.js index 1c75183879d..f7c638d22fb 100644 --- a/packages/datadog-plugin-grpc/test/server.spec.js +++ b/packages/datadog-plugin-grpc/test/server.spec.js @@ -11,7 +11,7 @@ const { assertObjectContains } = require('../../../integration-tests/helpers') const { withNamingSchema, withVersions } = require('../../dd-trace/test/setup/mocha') const agent = require('../../dd-trace/test/plugins/agent') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') -const defaults = require('../../dd-trace/src/config/defaults') +const { defaults } = require('../../dd-trace/src/config/defaults') const { NODE_MAJOR } = require('../../../version') const GRPC_SERVER_ERROR_STATUSES = defaults['grpc.server.error.statuses'] diff --git a/packages/dd-trace/src/agent/url.js b/packages/dd-trace/src/agent/url.js index 82f734d9a9a..f2460ce24d6 100644 --- a/packages/dd-trace/src/agent/url.js +++ b/packages/dd-trace/src/agent/url.js @@ -1,7 +1,7 @@ 'use strict' const { URL, format } = require('url') -const defaults = require('../config/defaults') +const { defaults } = require('../config/defaults') module.exports = { getAgentUrl } @@ -12,7 +12,7 @@ module.exports = { getAgentUrl } /** * Gets the agent URL from config, constructing it from hostname/port if needed - * @param {ReturnType} config - Tracer configuration object + * @param {Partial} config - Tracer configuration object * @returns {URL} The agent URL */ function getAgentUrl (config) { diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js index 5039f2bb544..1964e333d98 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js @@ -3,7 +3,7 @@ const log = require('../../../../log') const vulnerabilities = require('../../vulnerabilities') -const defaults = require('../../../../config/defaults') +const { defaults } = require('../../../../config/defaults') const { contains, intersects, remove } = require('./range-utils') diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js index 6e1c483a967..f9628c2673b 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js @@ -2,7 +2,7 @@ const crypto = require('crypto') -const defaults = require('../../../config/defaults') +const { defaults } = require('../../../config/defaults') const STRINGIFY_RANGE_KEY = 'DD_' + crypto.randomBytes(20).toString('hex') const STRINGIFY_SENSITIVE_KEY = STRINGIFY_RANGE_KEY + 'SENSITIVE' diff --git a/packages/dd-trace/src/appsec/remote_config.js b/packages/dd-trace/src/appsec/remote_config.js index a56465bd3a8..5db2d30f8d8 100644 --- a/packages/dd-trace/src/appsec/remote_config.js +++ b/packages/dd-trace/src/appsec/remote_config.js @@ -76,6 +76,7 @@ function enableOrDisableAppsec (action, rcConfig, config, appsec) { appsec.disable() } + // TODO: Use configWithOrigin /generateTelemetry instead of manually constructing the change. updateConfig([ { name: 'appsec.enabled', diff --git a/packages/dd-trace/src/config/defaults.js b/packages/dd-trace/src/config/defaults.js index 4c2af2c9bc2..c441e0e37bf 100644 --- a/packages/dd-trace/src/config/defaults.js +++ b/packages/dd-trace/src/config/defaults.js @@ -1,177 +1,333 @@ 'use strict' -const pkg = require('../pkg') -const { isFalse, isTrue } = require('../util') -const { DD_MAJOR } = require('../../../../version') -const { getEnvironmentVariable: getEnv } = require('./helper') +const dns = require('dns') +const util = require('util') +const { DD_MAJOR } = require('../../../../version') +const { parsers, transformers, telemetryTransformers, setWarnInvalidValue } = require('./parsers') const { supportedConfigurations, } = /** @type {import('./helper').SupportedConfigurationsJson} */ (require('./supported-configurations.json')) -const service = getEnv('AWS_LAMBDA_FUNCTION_NAME') || - getEnv('FUNCTION_NAME') || // Google Cloud Function Name set by deprecated runtimes - getEnv('K_SERVICE') || // Google Cloud Function Name set by newer runtimes - getEnv('WEBSITE_SITE_NAME') || // set by Azure Functions - pkg.name || - 'node' +let log +let seqId = 0 +const configWithOrigin = new Map() +const parseErrors = new Map() + +if (DD_MAJOR >= 6) { + // Programmatic configuration of DD_IAST_SECURITY_CONTROLS_CONFIGURATION is not supported + // in newer major versions. This is special handled here until a better solution is found. + // TODO: Remove the programmatic configuration from supported-configurations.json once v5 is not supported anymore. + supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].internalPropertyName = + supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].configurationNames?.[0] + delete supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].configurationNames +} else { + // Default value for DD_TRACE_STARTUP_LOGS is 'false' in older major versions. + // This is special handled here until a better solution is found. + // TODO: Remove this here once v5 is not supported anymore. + supportedConfigurations.DD_TRACE_STARTUP_LOGS[0].default = 'false' +} /** - * @param {string|null} raw - * @param {string} type - * @returns {string|number|boolean|Record|unknown[]|undefined} + * Warns about an invalid value for an option and adds the error to the last telemetry entry if it is not already set. + * Logging happens only if the error is not already set or the option name is different from the last telemetry entry. + * + * @param {unknown} value - The value that is invalid. + * @param {string} optionName - The name of the option. + * @param {string} source - The source of the value. + * @param {string} baseMessage - The base message to use for the warning. + * @param {Error} [error] - An error that was thrown while parsing the value. */ -function parseDefaultByType (raw, type) { - if (raw === null) { - return +function warnInvalidValue (value, optionName, source, baseMessage, error) { + const canonicalName = (optionsTable[optionName]?.canonicalName ?? optionName) + source + // Lazy load log module to avoid circular dependency + if (!parseErrors.has(canonicalName)) { + // TODO: Rephrase: It will fallback to former source (or default if not set) + let message = `${baseMessage}: ${util.inspect(value)} for ${optionName} (source: ${source}), picked default` + if (error) { + error.stack = error.toString() + message += `\n\n${util.inspect(error)}` + } + parseErrors.set(canonicalName, { message }) + log ??= require('../log') + const logLevel = error ? 'error' : 'warn' + log[logLevel](message) } +} +setWarnInvalidValue(warnInvalidValue) + +/** @type {import('./config-types').ConfigDefaults} */ +const defaults = { + instrumentationSource: 'manual', + isServiceUserProvided: false, + isServiceNameInferred: true, + plugins: true, + isCiVisibility: false, + lookup: dns.lookup, + logger: undefined, +} + +for (const [name, value] of Object.entries(defaults)) { + configWithOrigin.set(`${name}default`, { + name, + value: value ?? null, + origin: 'default', + seq_id: seqId++, + }) +} - switch (type) { - case 'boolean': - if (isTrue(raw)) return true - if (isFalse(raw)) return false - // TODO: What should we do with these? - return - case 'int': - case 'decimal': { - return Number(raw) +/** + * @param {unknown} value + * @param {string} origin + * @param {string} optionName + */ +function generateTelemetry (value = null, origin, optionName) { + const { type, canonicalName = optionName } = configurationsTable[optionName] ?? { type: typeof value } + // TODO: Consider adding a preParser hook to the parsers object. + if (canonicalName === 'OTEL_RESOURCE_ATTRIBUTES') { + value = telemetryTransformers.MAP(value) + } + // TODO: Should we not send defaults to telemetry to reduce size? + // TODO: How to handle aliases/actual names in the future? Optional fields? Normalize the name at intake? + // TODO: Validate that space separated tags are parsed by the backend. Optimizations would be possible with that. + // TODO: How to handle telemetry reporting for aliases? + if (value !== null) { + if (telemetryTransformers[type]) { + value = telemetryTransformers[type](value) + } else if (typeof value === 'object' && value !== null) { + value = value instanceof URL + ? String(value) + : JSON.stringify(value) + } else if (typeof value === 'function') { + value = value.name || 'function' } - case 'array': { - if (!raw || raw.length === 0) return [] - // TODO: Make the parsing a helper that is reused. - return raw.split(',').map(item => { - const colonIndex = item.indexOf(':') - if (colonIndex === -1) { - return item.trim() + } + const telemetryEntry = { + name: canonicalName, + value, + origin, + seq_id: seqId++, + } + const error = parseErrors.get(`${canonicalName}${origin}`) + if (error) { + parseErrors.delete(`${canonicalName}${origin}`) + telemetryEntry.error = error + } + configWithOrigin.set(`${canonicalName}${origin}`, telemetryEntry) +} + +// Iterate over the object and always handle the leaf properties as lookup. +// Example entries: +// +// cloudPayloadTagging: { +// nestedProperties: [ +// 'rules', +// 'requestsEnabled', +// 'responses', +// ], +// option: { +// property: 'rules', +// parser: parsers.JSON, +// canonicalName: 'DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING', +// transformer: transformers.toCamelCase, +// }, +// }, +// 'cloudPayloadTagging.responses': { +// nestedProperties: [ +// 'enabled', +// ], +// }, +// 'cloudPayloadTagging.rules': {}, +// 'cloudPayloadTagging.requestsEnabled': {}, +// 'cloudPayloadTagging.responses.enabled': {} +const optionsTable = { + // Additional properties that are not supported by the supported-configurations.json file. + lookup: { + transformer (value) { + if (typeof value === 'function') { + return value + } + }, + property: 'lookup', + }, + logger: { + transformer (object) { + if (typeof object === 'object' && + object !== null && + Object.values(object).some(value => typeof value === 'function')) { + return object + } + }, + property: 'logger', + }, + isCiVisibility: { + property: 'isCiVisibility', + }, + plugins: { + property: 'plugins', + }, +} + +const parser = (value, optionName, source) => { + const { type, canonicalName = optionName } = configurationsTable[optionName] + const parsed = parsers[type](value, canonicalName) + if (parsed === undefined) { + warnInvalidValue(value, optionName, source, `Invalid ${type} input`) + } + return parsed +} + +/** + * @template {import('./config-types').ConfigPath} TPath + * @type {Partial unknown, + * canonicalName?: string, + * transformer?: (value: unknown, optionName: string, source: string) => unknown, + * telemetryTransformer?: (value: unknown) => unknown + * }>>} ConfigurationsTable + */ +const configurationsTable = {} + +// One way aliases. Must be applied in apply calculated entries. +const fallbackConfigurations = new Map() + +const regExps = {} + +for (const [canonicalName, entries] of Object.entries(supportedConfigurations)) { + if (entries.length !== 1) { + // TODO: Determine if we really want to support multiple entries for a canonical name. + // This would be needed to show official support for multiple diverging implementations + // at a time with by checking for another configuration that is not the canonical name. + throw new Error( + `Multiple entries found for canonical name: ${canonicalName}. ` + + 'This is currently not supported and must be implemented, if needed.' + ) + } + for (const entry of entries) { + const configurationNames = entry.internalPropertyName ? [entry.internalPropertyName] : entry.configurationNames + const fullPropertyName = configurationNames?.[0] ?? canonicalName + const type = entry.type.toUpperCase() + + let transformer = transformers[entry.transform] + if (entry.allowed) { + regExps[entry.allowed] ??= new RegExp(`^(${entry.allowed})$`, 'i') + const allowed = regExps[entry.allowed] + const originalTransform = transformer + transformer = (value, optionName, source) => { + if (!allowed.test(value)) { + warnInvalidValue(value, optionName, source, 'Invalid value') + return } - const key = item.slice(0, colonIndex).trim() - const value = item.slice(colonIndex + 1).trim() - return `${key}:${value}` - }) + if (originalTransform) { + value = originalTransform(value) + } + return value + } } - case 'map': { - if (!raw || raw.length === 0) return {} - // TODO: Make the parsing a helper that is reused. - /** @type {Record} */ - const entries = {} - for (const item of raw.split(',')) { - const colonIndex = item.indexOf(':') - if (colonIndex === -1) { - const key = item.trim() - if (key.length > 0) { - entries[key] = '' - } + + const option = { parser, type } + + if (fullPropertyName !== canonicalName) { + option.property = fullPropertyName + option.canonicalName = canonicalName + configurationsTable[fullPropertyName] = option + } + if (transformer) { + option.transformer = transformer + } + if (entry.configurationNames) { + addOption(option, type, entry.configurationNames) + } + configurationsTable[canonicalName] = option + + if (entry.default === null) { + defaults[fullPropertyName] = undefined + } else { + let parsedDefault = parser(entry.default, fullPropertyName, 'default') + if (entry.transform) { + parsedDefault = transformer(parsedDefault, fullPropertyName, 'default') + } + defaults[fullPropertyName] = parsedDefault + } + generateTelemetry(defaults[fullPropertyName], 'default', fullPropertyName) + + if (entry.aliases) { + for (const alias of entry.aliases) { + if (!supportedConfigurations[alias]) { + // An actual alias has no matching entry continue } - const key = item.slice(0, colonIndex).trim() - const value = item.slice(colonIndex + 1).trim() - if (key.length > 0) { - entries[key] = value + if (!supportedConfigurations[alias].aliases?.includes(canonicalName)) { + // Alias will be replaced with the full property name of the alias, if it exists. + fallbackConfigurations.set(fullPropertyName, alias) } } - return entries } - default: - return raw } } -/** @type {Record} */ -const metadataDefaults = {} -for (const entries of Object.values(supportedConfigurations)) { - for (const entry of entries) { - // TODO: Replace $dynamic with method names that would be called and that - // are also called when the user passes through the value. That way the - // handling is unified and methods can be declared as default. - // The name of that method should be expressive for users. - // TODO: Add handling for all environment variable names. They should not - // need a configuration name for being listed with their default. - if (!Array.isArray(entry.configurationNames)) { - continue - } +// Replace the alias with the canonical property name. +for (const [fullPropertyName, alias] of fallbackConfigurations) { + if (configurationsTable[alias].property) { + fallbackConfigurations.set(fullPropertyName, configurationsTable[alias].property) + } +} + +function addOption (option, type, configurationNames) { + for (const name of configurationNames) { + let index = -1 + let lastNestedProperties + while (true) { + const nextIndex = name.indexOf('.', index + 1) + const intermediateName = nextIndex === -1 ? name : name.slice(0, nextIndex) + if (lastNestedProperties) { + lastNestedProperties.add(intermediateName.slice(index + 1)) + } - const parsedValue = parseDefaultByType(entry.default, entry.type) - for (const configurationName of entry.configurationNames) { - metadataDefaults[configurationName] = entry.default === null ? undefined : parsedValue + if (nextIndex === -1) { + if (optionsTable[name]) { + if (optionsTable[name].nestedProperties && !optionsTable[name].option) { + optionsTable[name].option = option + break + } + throw new Error(`Duplicate configuration name: ${name}`) + } + optionsTable[name] = option + break + } + + lastNestedProperties = new Set() + index = nextIndex + + if (!optionsTable[intermediateName]) { + optionsTable[intermediateName] = { + nestedProperties: lastNestedProperties, + } + } else if (optionsTable[intermediateName].nestedProperties) { + lastNestedProperties = optionsTable[intermediateName].nestedProperties + } else { + optionsTable[intermediateName] = { + nestedProperties: lastNestedProperties, + option: optionsTable[intermediateName], + } + } } } } -// Defaults required by JS config merge/applyCalculated that are not represented in supported-configurations. -const defaultsWithoutSupportedConfigurationEntry = { - 'cloudPayloadTagging.rules': [], - 'cloudPayloadTagging.requestsEnabled': false, - 'cloudPayloadTagging.responsesEnabled': false, - isAzureFunction: false, - isCiVisibility: false, - isGCPFunction: false, - instrumentationSource: 'manual', - isServiceUserProvided: false, - isServiceNameInferred: true, - lookup: undefined, - plugins: true, -} +module.exports = { + configurationsTable, -// These values are documented in supported-configurations as CI Visibility -// defaults. Keep startup baseline false and let #applyCalculated() switch them -// when CI Visibility is active. -// TODO: These entries should be removed. They are off by default -// because they rely on other configs. -const defaultsWithConditionalRuntimeBehavior = { - startupLogs: DD_MAJOR >= 6, - isGitUploadEnabled: false, - isImpactedTestsEnabled: false, - isIntelligentTestRunnerEnabled: false, - isManualApiEnabled: false, - isTestManagementEnabled: false, - // TODO: These are not conditional, they would just be of type number. - 'dogstatsd.port': '8125', - port: '8126', - // Override due to expecting numbers, not strings. TODO: Replace later. - 'grpc.client.error.statuses': [ - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - ], - 'grpc.server.error.statuses': [ - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - ], -} + defaults, -/** @type {Record} */ -const defaults = { - ...defaultsWithoutSupportedConfigurationEntry, - ...metadataDefaults, - ...defaultsWithConditionalRuntimeBehavior, - service, - version: pkg.version, -} + fallbackConfigurations, -module.exports = defaults + optionsTable, + + configWithOrigin, + + parseErrors, + + generateTelemetry, +} diff --git a/packages/dd-trace/src/config/helper.js b/packages/dd-trace/src/config/helper.js index 0ba7b197758..011fa0caa1a 100644 --- a/packages/dd-trace/src/config/helper.js +++ b/packages/dd-trace/src/config/helper.js @@ -9,6 +9,9 @@ * @property {string|number|boolean|null|object|unknown[]} default * @property {string[]} [aliases] * @property {string[]} [configurationNames] + * @property {string} [internalPropertyName] + * @property {string} [transform] + * @property {string} [allowed] * @property {string|boolean} [deprecated] */ @@ -57,6 +60,13 @@ for (const [canonical, configuration] of Object.entries(supportedConfigurations) const aliasToCanonical = {} for (const canonical of Object.keys(aliases)) { for (const alias of aliases[canonical]) { + if (supportedConfigurations[alias]) { + // Allow 'fallback' aliases to be used for other configurations. + // This is used to handle the case where an alias could be used for multiple configurations. + // For example, OTEL_EXPORTER_OTLP_ENDPOINT is used for OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + // and OTEL_EXPORTER_OTLP_METRICS_ENDPOINT. + continue + } if (aliasToCanonical[alias]) { throw new Error(`The alias ${alias} is already used for ${aliasToCanonical[alias]}.`) } @@ -99,22 +109,37 @@ function loadStableConfig () { } function getValueFromSource (name, source) { - const value = source[name] + if (source[name] !== undefined) { + return source[name] + } - if (value === undefined && aliases[name]) { + if (aliases[name]) { for (const alias of aliases[name]) { if (source[alias] !== undefined) { return source[alias] } } } +} - return value +function getEnvNameFromSource (name, source) { + if (source[name] !== undefined) { + return name + } + + if (aliases[name]) { + for (const alias of aliases[name]) { + if (source[alias] !== undefined) { + return alias + } + } + } } function validateAccess (name) { - if ((name.startsWith('DD_') || name.startsWith('OTEL_') || aliasToCanonical[name]) && - !supportedConfigurations[name]) { + if ((name.startsWith('DD_') || name.startsWith('OTEL_')) && + !supportedConfigurations[name] && + !aliasToCanonical[name]) { throw new Error(`Missing ${name} env/configuration in "supported-configurations.json" file.`) } } @@ -144,10 +169,9 @@ module.exports = { * * @returns {TracerEnv} The environment variables */ - getEnvironmentVariables () { + getEnvironmentVariables (source = process.env, internalOnly = false) { const configs = {} - for (const [key, value] of Object.entries(process.env)) { - // TODO(BridgeAR): Handle telemetry reporting for aliases. + for (const [key, value] of Object.entries(source)) { if (key.startsWith('DD_') || key.startsWith('OTEL_') || aliasToCanonical[key]) { if (supportedConfigurations[key]) { configs[key] = value @@ -155,7 +179,7 @@ module.exports = { // The alias should only be used if the actual configuration is not set // In case that more than a single alias exist, use the one defined first in our own order for (const alias of aliases[aliasToCanonical[key]]) { - if (process.env[alias] !== undefined) { + if (source[alias] !== undefined) { configs[aliasToCanonical[key]] = value break } @@ -165,9 +189,10 @@ module.exports = { // debug( // `Missing configuration ${env} in supported-configurations file. The environment variable is ignored.` // ) + // This could be moved inside the main config logic. } deprecationMethods[key]?.() - } else { + } else if (!internalOnly) { configs[key] = value } } @@ -211,4 +236,28 @@ module.exports = { return getValueFromSource(name, localStableConfig) } }, + + /** + * Returns the actual environment variable name used for a supported configuration + * from a specific environment-based source. + * + * @param {string} name Environment variable name + * @returns {string|undefined} + */ + getConfiguredEnvName (name) { + validateAccess(name) + + if (!stableConfigLoaded) { + loadStableConfig() + } + + for (const source of [fleetStableConfig, process.env, localStableConfig]) { + if (source !== undefined) { + const fromSource = getEnvNameFromSource(name, source) + if (fromSource !== undefined) { + return fromSource + } + } + } + }, } diff --git a/packages/dd-trace/src/config/index.js b/packages/dd-trace/src/config/index.js index 797d983e644..2c6222e5622 100644 --- a/packages/dd-trace/src/config/index.js +++ b/packages/dd-trace/src/config/index.js @@ -5,1682 +5,745 @@ const os = require('node:os') const { URL } = require('node:url') const path = require('node:path') +const rfdc = require('../../../../vendor/dist/rfdc')({ proto: false, circles: false }) const uuid = require('../../../../vendor/dist/crypto-randomuuid') // we need to keep the old uuid dep because of cypress - const set = require('../../../datadog-core/src/utils/src/set') const { DD_MAJOR } = require('../../../../version') const log = require('../log') -const tagger = require('../tagger') -const { isTrue, isFalse, normalizeProfilingEnabledValue } = require('../util') +const pkg = require('../pkg') +const { isTrue } = require('../util') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags') -const { updateConfig } = require('../telemetry') +const telemetry = require('../telemetry') const telemetryMetrics = require('../telemetry/metrics') const { IS_SERVERLESS, getIsGCPFunction, getIsAzureFunction, - enableGCPPubSubPushSubscription, } = require('../serverless') const { ORIGIN_KEY } = require('../constants') const { appendRules } = require('../payload-tagging/config') const { getGitMetadataFromGitProperties, removeUserSensitiveInfo, getRemoteOriginURL, resolveGitHeadSHA } = require('./git_properties') -const { getEnvironmentVariable: getEnv, getEnvironmentVariables, getStableConfigSources } = require('./helper') -const defaults = require('./defaults') - -const TELEMETRY_COUNTERS = new Map([ - ['otel.env.hiding', {}], - ['otel.env.invalid', {}], -]) -const OTEL_DD_ENV_MAPPING = new Map([ - ['OTEL_LOG_LEVEL', 'DD_TRACE_LOG_LEVEL'], - ['OTEL_PROPAGATORS', 'DD_TRACE_PROPAGATION_STYLE'], - ['OTEL_SERVICE_NAME', 'DD_SERVICE'], - ['OTEL_TRACES_SAMPLER', 'DD_TRACE_SAMPLE_RATE'], - ['OTEL_TRACES_SAMPLER_ARG', 'DD_TRACE_SAMPLE_RATE'], - ['OTEL_TRACES_EXPORTER', 'DD_TRACE_ENABLED'], - ['OTEL_METRICS_EXPORTER', 'DD_RUNTIME_METRICS_ENABLED'], - ['OTEL_RESOURCE_ATTRIBUTES', 'DD_TAGS'], - ['OTEL_SDK_DISABLED', 'DD_TRACE_OTEL_ENABLED'], - ['OTEL_LOGS_EXPORTER', undefined], -]) -const VALID_PROPAGATION_STYLES = new Set(['datadog', 'tracecontext', 'b3', 'b3 single header', 'none']) -const VALID_PROPAGATION_BEHAVIOR_EXTRACT = new Set(['continue', 'restart', 'ignore']) -const VALID_LOG_LEVELS = new Set(['debug', 'info', 'warn', 'error']) -const DEFAULT_OTLP_PORT = 4318 +const ConfigBase = require('./config-base') +const { + getEnvironmentVariable, + getEnvironmentVariables, + getStableConfigSources, + getValueFromEnvSources, +} = require('./helper') +const { + defaults, + fallbackConfigurations, + configurationsTable, + optionsTable, + configWithOrigin, + parseErrors, + generateTelemetry, +} = require('./defaults') +const { transformers } = require('./parsers') + const RUNTIME_ID = uuid() -// eslint-disable-next-line eslint-rules/eslint-process-env -- internal propagation, not user config -const ROOT_SESSION_ID = process.env.DD_ROOT_JS_SESSION_ID || RUNTIME_ID -const NAMING_VERSIONS = new Set(['v0', 'v1']) -const DEFAULT_NAMING_VERSION = 'v0' const tracerMetrics = telemetryMetrics.manager.namespace('tracers') -const changeTracker = {} +/** + * @typedef {'default' + * | 'code' + * | 'remote_config' + * | 'calculated' + * | 'env_var' + * | 'local_stable_config' + * | 'fleet_stable_config'} TelemetrySource + * @typedef {'remote_config' | 'calculated'} RevertibleTelemetrySource + * @typedef {import('../../../../index').TracerOptions} TracerOptions + * @typedef {import('./config-types').ConfigKey} ConfigKey + * @typedef {import('./config-types').ConfigPath} ConfigPath + * @typedef {{ + * value: import('./config-types').ConfigPathValue, + * source: TelemetrySource + * }} TrackedConfigEntry + * @typedef {{ + * baseValuesByPath: Partial>, + * remote_config: Set, + * calculated: Set, + * }} ChangeTracker + */ + +/** @type {Config | null} */ let configInstance = null +// An entry that is undefined means it is the default value. +/** @type {Map} */ +const trackedConfigOrigins = new Map() + +// ChangeTracker tracks the changes to the config up to programmatic options (code). +/** @type {ChangeTracker} */ +const changeTracker = { + baseValuesByPath: {}, + remote_config: new Set(), + calculated: new Set(), +} + +/** + * @param {Config} config + * @param {RevertibleTelemetrySource} source + */ +function undo (config, source) { + for (const name of changeTracker[source]) { + const entry = changeTracker.baseValuesByPath[name] ?? { source: 'default', value: defaults[name] } + setAndTrack(config, name, entry.value, undefined, entry.source) + } +} + +function get (object, path) { + // Fast path for simple property access. + if (object[path] !== undefined) { + return object[path] + } + let index = 0 + while (true) { + const nextIndex = path.indexOf('.', index) + if (nextIndex === -1) { + return object[path.slice(index)] + } + object = object[path.slice(index, nextIndex)] + index = nextIndex + 1 + } +} + +/** + * @param {Config} config + * @template {ConfigPath} TPath + * @param {TPath} name + * @param {import('./config-types').ConfigPathValue} value + * @param {unknown} [rawValue] + * @param {TelemetrySource} [source] + */ +function setAndTrack (config, name, value, rawValue = value, source = 'calculated') { + // envs can not be undefined + if (value == null) { + // TODO: This works as before while ignoring undefined programmatic options is not ideal. + if (source !== 'default') { + return + } + } else if (source === 'calculated' || source === 'remote_config') { + if (source === 'calculated' && value === get(config, name)) { + return + } + changeTracker[source].add(name) + } else { + const copy = typeof value === 'object' && value !== null ? rfdc(value) : value + changeTracker.baseValuesByPath[name] = { value: copy, source } + } + set(config, name, value) + + generateTelemetry(rawValue, source, name) + if (source === 'default') { + trackedConfigOrigins.delete(name) + } else { + trackedConfigOrigins.set(name, source) + } +} + module.exports = getConfig -class Config { +// We extend from ConfigBase to make our types work +class Config extends ConfigBase { /** * parsed DD_TAGS, usable as a standalone tag set across products * @type {Record} */ - #parsedDdTags = {} - #envUnprocessed = {} - #optsUnprocessed = {} - #remoteUnprocessed = {} - #env = {} - #options = {} - #remote = {} - #defaults = {} - #optionsArg = {} - #localStableConfig = {} - #fleetStableConfig = {} - #calculated = {} + #parsedDdTags + /** + * @type {Record} + */ + get parsedDdTags () { + return this.#parsedDdTags + } + + /** + * @param {TracerOptions} [options={}] + */ constructor (options = {}) { - if (!IS_SERVERLESS) { - const configEnvSources = getStableConfigSources() - this.stableConfig = { - fleetEntries: configEnvSources.fleetStableConfig, - localEntries: configEnvSources.localStableConfig, - warnings: configEnvSources.stableConfigWarnings, - } - } + super() - options = { - ...options, - // TODO(BridgeAR): Remove the experimental prefix once we have a major version. - // That also applies to index.d.ts - appsec: options.appsec == null ? options.experimental?.appsec : options.appsec, - iast: options.iast == null ? options.experimental?.iast : options.iast, + const configEnvSources = getStableConfigSources() + this.stableConfig = { + fleetEntries: configEnvSources.fleetStableConfig ?? {}, + localEntries: configEnvSources.localStableConfig ?? {}, + warnings: configEnvSources.stableConfigWarnings, } // Configure the logger first so it can be used to warn about other configs - const logConfig = log.getConfig() - this.debug = log.isEnabled( - this.stableConfig?.fleetEntries?.DD_TRACE_DEBUG, - this.stableConfig?.localEntries?.DD_TRACE_DEBUG - ) - this.logger = options.logger ?? logConfig.logger - this.logLevel = log.getLogLevel( - options.logLevel, - this.stableConfig?.fleetEntries?.DD_TRACE_LOG_LEVEL, - this.stableConfig?.localEntries?.DD_TRACE_LOG_LEVEL - ) - log.use(this.logger) - log.toggle(this.debug, this.logLevel) + // TODO: Implement auto buffering of inside of log module before first + // configure call. That way the logger is always available and the + // application doesn't need to configure it first and the configuration + // happens inside of config instead of inside of log module. If the logger + // is not deactivated, the buffered logs would be discarded. That way stable + // config warnings can also be logged directly and do not need special + // handling. + this.debug = log.configure(options) // Process stable config warnings, if any for (const warning of this.stableConfig?.warnings ?? []) { log.warn(warning) } - checkIfBothOtelAndDdEnvVarSet() - - if (typeof options.appsec === 'boolean') { - options.appsec = { - enabled: options.appsec, - } - } - - if (typeof options.runtimeMetrics === 'boolean') { - options.runtimeMetrics = { - enabled: options.runtimeMetrics, - } - } - - this.#defaults = defaults this.#applyDefaults() - this.#applyStableConfig(this.stableConfig?.localEntries ?? {}, this.#localStableConfig) - this.#applyEnvironment() - this.#applyStableConfig(this.stableConfig?.fleetEntries ?? {}, this.#fleetStableConfig) - this.#applyOptions(options) + // TODO: Update origin documentation to list all valid sources. Add local_stable_config and fleet_stable_config. + this.#applyEnvs(getEnvironmentVariables(this.stableConfig.localEntries, true), 'local_stable_config') + this.#applyEnvs(getEnvironmentVariables(undefined, true), 'env_var') + this.#applyEnvs(getEnvironmentVariables(this.stableConfig.fleetEntries, true), 'fleet_stable_config') + + // Experimental options are applied first, so they can be overridden by non-experimental options. + // TODO: When using programmatic options, check if there is a higher + // priority name in the same options object. Use the highest priority name. + const { experimental, ...rest } = options + if (experimental) { + // @ts-expect-error - Difficult to type this correctly. + this.#applyOptions(experimental, 'code', 'experimental') + } + this.#applyOptions(rest, 'code') this.#applyCalculated() - this.#merge() - tagger.add(this.tags, { - service: this.service, - env: this.env, - version: this.version, - 'runtime-id': RUNTIME_ID, - }) + warnWrongOtelSettings() + + if (this.gitMetadataEnabled) { + this.#loadGitMetadata() + } - this.rootSessionId = ROOT_SESSION_ID + parseErrors.clear() + } - if (this.isCiVisibility) { - tagger.add(this.tags, { - [ORIGIN_KEY]: 'ciapp-test', - }) + #applyDefaults () { + for (const [name, value] of Object.entries(defaults)) { + set(this, name, value) } + } - if (this.gitMetadataEnabled) { - this.#loadGitMetadata() + /** + * @param {import('./helper').TracerEnv} envs + * @param {'env_var' | 'local_stable_config' | 'fleet_stable_config'} source + */ + #applyEnvs (envs, source) { + for (const [name, value] of Object.entries(envs)) { + const entry = configurationsTable[name] + // TracePropagationStyle is a special case. It is a single option that is used to set both inject and extract. + // TODO: Consider what to do with this later + if (name === 'DD_TRACE_PROPAGATION_STYLE') { + if ( + getValueFromEnvSources('DD_TRACE_PROPAGATION_STYLE_INJECT') !== undefined || + getValueFromEnvSources('DD_TRACE_PROPAGATION_STYLE_EXTRACT') !== undefined + ) { + log.warn( + // eslint-disable-next-line @stylistic/max-len + 'Use either DD_TRACE_PROPAGATION_STYLE or separate DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables' + ) + continue + } + this.#applyEnvs({ DD_TRACE_PROPAGATION_STYLE_INJECT: value, DD_TRACE_PROPAGATION_STYLE_EXTRACT: value }, source) + continue + } + const parsed = entry.parser(value, name, source) + const transformed = parsed !== undefined && entry.transformer ? entry.transformer(parsed, name, source) : parsed + const rawValue = transformed !== null && typeof transformed === 'object' ? value : parsed + setAndTrack(this, entry.property ?? name, transformed, rawValue, source) } } - get parsedDdTags () { - return this.#parsedDdTags + /** + * @param {TracerOptions} options + * @param {'code' | 'remote_config'} source + * @param {string} [root] + */ + #applyOptions (options, source, root = '') { + for (const [name, value] of Object.entries(options)) { + const fullName = root ? `${root}.${name}` : name + let entry = optionsTable[fullName] + if (!entry) { + // TODO: Fix this by by changing remote config to use env styles. + if (name !== 'tracing' || source !== 'remote_config') { + log.warn('Unknown option %s with value %o', fullName, value) + continue + } + // @ts-expect-error - The entry is defined in the configurationsTable. + entry = configurationsTable.tracing + } + + if (entry.nestedProperties) { + let matched = false + if (typeof value === 'object' && value !== null) { + for (const nestedProperty of entry.nestedProperties) { + // WARNING: if the property name might be part of the value we look at, this could conflict! + // Defining an option that receives an object as value may not contain a property that is also + // potentially a nested property! + if (Object.hasOwn(value, nestedProperty)) { + this.#applyOptions(value, source, fullName) + matched = true + break + } + } + } + if (matched) { + continue + } + if (entry.option) { + entry = entry.option + } else { + if (fullName === 'tracePropagationStyle') { + // TracePropagationStyle is special. It is a single option that is used to set both inject and extract. + // @ts-expect-error - Difficult to type this correctly. + this.#applyOptions({ inject: value, extract: value }, source, 'tracePropagationStyle') + } else { + log.warn('Unknown option %s with value %o', fullName, value) + } + continue + } + } + // TODO: Coerce mismatched types to the expected type, if possible. E.g., strings <> numbers + const transformed = value !== undefined && entry.transformer ? entry.transformer(value, fullName, source) : value + setAndTrack(this, entry.property, transformed, value, source) + } } /** * Set the configuration with remote config settings. * Applies remote configuration, recalculates derived values, and merges all configuration sources. * - * @param {import('./remote_config').RemoteConfigOptions|null} options - Configurations received via Remote + * @param {TracerOptions|null} options - Configurations received via Remote * Config or null to reset all remote configuration */ setRemoteConfig (options) { // Clear all RC-managed fields to ensure previous values don't persist. // State is instead managed by the `RCClientLibConfigManager` class - this.#remote = {} - this.#remoteUnprocessed = {} + undo(this, 'remote_config') // Special case: if options is null, nothing to apply // This happens when all remote configs are removed if (options !== null) { - this.#applyRemoteConfig(options) + this.#applyOptions(options, 'remote_config') } this.#applyCalculated() - this.#merge() } - // TODO: Remove the `updateOptions` method. We don't want to support updating the config this way /** - * Updates the configuration with new programmatic options. - * - * @deprecated This method should not be used and will be removed in a future version. - * @param {object} options - Configuration options to apply (same format as tracer init options) + * @param {ConfigPath} name */ - updateOptions (options) { - this.#applyOptions(options) - this.#applyCalculated() - this.#merge() - } - getOrigin (name) { - for (const { container, origin } of this.#getSourcesInOrder()) { - const value = container[name] - if (value != null || container === this.#defaults) { - return origin - } - } - } - - #getSourcesInOrder () { - return [ - { container: this.#remote, origin: 'remote_config', unprocessed: this.#remoteUnprocessed }, - { container: this.#options, origin: 'code', unprocessed: this.#optsUnprocessed }, - { container: this.#fleetStableConfig, origin: 'fleet_stable_config' }, - { container: this.#env, origin: 'env_var', unprocessed: this.#envUnprocessed }, - { container: this.#localStableConfig, origin: 'local_stable_config' }, - { container: this.#calculated, origin: 'calculated' }, - { container: this.#defaults, origin: 'default' }, - ] - } - - #applyStableConfig (config, obj) { - this.#applyConfigValues(config, obj, {}) + return trackedConfigOrigins.get(name) ?? 'default' } - // Set environment-dependent defaults that can be overridden by users - #applyDefaults () { - const defaults = this.#defaults - - if (IS_SERVERLESS) { - setBoolean(defaults, 'crashtracking.enabled', false) - setString(defaults, 'profiling.enabled', 'false') - setBoolean(defaults, 'telemetry.enabled', false) - setBoolean(defaults, 'remoteConfig.enabled', false) - } else { - setBoolean(defaults, 'crashtracking.enabled', true) + // Handles values calculated from a mixture of options and env vars + #applyCalculated () { + undo(this, 'calculated') + + if (this.DD_CIVISIBILITY_AGENTLESS_URL || + this.url || + os.type() !== 'Windows_NT' && + !trackedConfigOrigins.has('hostname') && + !trackedConfigOrigins.has('port') && + !this.DD_CIVISIBILITY_AGENTLESS_ENABLED && + fs.existsSync('/var/run/datadog/apm.socket')) { + setAndTrack( + this, + 'url', + new URL(this.DD_CIVISIBILITY_AGENTLESS_URL || this.url || 'unix:///var/run/datadog/apm.socket') + ) } - if (getEnv('JEST_WORKER_ID')) { - setBoolean(defaults, 'telemetry.enabled', false) + if (this.isCiVisibility) { + setAndTrack(this, 'isServiceUserProvided', trackedConfigOrigins.has('service')) + this.tags[ORIGIN_KEY] = 'ciapp-test' } - } - - #applyEnvironment () { - this.#applyConfigValues(getEnvironmentVariables(), this.#env, this.#envUnprocessed) - } + // Compute OTLP logs and metrics URLs to send payloads to the active Datadog Agent + const agentHostname = this.hostname || /** @type {URL} */ (this.url).hostname - #applyConfigValues (source, target, unprocessedTarget) { - const { - AWS_LAMBDA_FUNCTION_NAME, - DD_AGENT_HOST, - DD_AI_GUARD_ENABLED, - DD_AI_GUARD_ENDPOINT, - DD_AI_GUARD_MAX_CONTENT_SIZE, - DD_AI_GUARD_MAX_MESSAGES_LENGTH, - DD_AI_GUARD_TIMEOUT, - DD_API_KEY, - DD_API_SECURITY_ENABLED, - DD_API_SECURITY_SAMPLE_DELAY, - DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED, - DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT, - DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE, - DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS, - DD_APM_TRACING_ENABLED, - DD_APP_KEY, - DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE, - DD_APPSEC_COLLECT_ALL_HEADERS, - DD_APPSEC_ENABLED, - DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON, - DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED, - DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML, - DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON, - DD_APPSEC_MAX_COLLECTED_HEADERS, - DD_APPSEC_MAX_STACK_TRACES, - DD_APPSEC_MAX_STACK_TRACE_DEPTH, - DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP, - DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP, - DD_APPSEC_RULES, - DD_APPSEC_SCA_ENABLED, - DD_APPSEC_STACK_TRACE_ENABLED, - DD_APPSEC_RASP_ENABLED, - DD_APPSEC_RASP_COLLECT_REQUEST_BODY, - DD_APPSEC_TRACE_RATE_LIMIT, - DD_APPSEC_WAF_TIMEOUT, - DD_CRASHTRACKING_ENABLED, - DD_CODE_ORIGIN_FOR_SPANS_ENABLED, - DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED, - DD_DATA_STREAMS_ENABLED, - DD_DBM_PROPAGATION_MODE, - DD_DBM_INJECT_SQL_BASEHASH, - DD_DOGSTATSD_HOST, - DD_DOGSTATSD_PORT, - DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS, - DD_DYNAMIC_INSTRUMENTATION_ENABLED, - DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE, - DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS, - DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS, - DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS, - DD_ENV, - DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED, - DD_EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED, - DD_PROFILING_ENABLED, - DD_GRPC_CLIENT_ERROR_STATUSES, - DD_GRPC_SERVER_ERROR_STATUSES, - DD_HEAP_SNAPSHOT_COUNT, - DD_HEAP_SNAPSHOT_DESTINATION, - DD_HEAP_SNAPSHOT_INTERVAL, - DD_IAST_DB_ROWS_TO_TAINT, - DD_IAST_DEDUPLICATION_ENABLED, - DD_IAST_ENABLED, - DD_IAST_MAX_CONCURRENT_REQUESTS, - DD_IAST_MAX_CONTEXT_OPERATIONS, - DD_IAST_REDACTION_ENABLED, - DD_IAST_REDACTION_NAME_PATTERN, - DD_IAST_REDACTION_VALUE_PATTERN, - DD_IAST_REQUEST_SAMPLING, - DD_IAST_SECURITY_CONTROLS_CONFIGURATION, - DD_IAST_TELEMETRY_VERBOSITY, - DD_IAST_STACK_TRACE_ENABLED, - DD_INJECTION_ENABLED, - DD_INJECT_FORCE, - DD_ENABLE_NX_SERVICE_NAME, - DD_INSTRUMENTATION_TELEMETRY_ENABLED, - DD_INSTRUMENTATION_CONFIG_ID, - DD_LOGS_INJECTION, - DD_LOGS_OTEL_ENABLED, - DD_METRICS_OTEL_ENABLED, - DD_LANGCHAIN_SPAN_CHAR_LIMIT, - DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE, - DD_LLMOBS_AGENTLESS_ENABLED, - DD_LLMOBS_ENABLED, - DD_LLMOBS_ML_APP, - DD_OPENAI_LOGS_ENABLED, - DD_OPENAI_SPAN_CHAR_LIMIT, - DD_PROFILING_EXPORTERS, - DD_PROFILING_SOURCE_MAP, - DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD, - DD_INSTRUMENTATION_INSTALL_ID, - DD_INSTRUMENTATION_INSTALL_TIME, - DD_INSTRUMENTATION_INSTALL_TYPE, - DD_REMOTE_CONFIGURATION_ENABLED, - DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS, - DD_RUNTIME_METRICS_ENABLED, - DD_RUNTIME_METRICS_EVENT_LOOP_ENABLED, - DD_RUNTIME_METRICS_GC_ENABLED, - DD_SERVICE, - DD_SERVICE_MAPPING, - DD_SITE, - DD_SPAN_SAMPLING_RULES, - DD_SPAN_SAMPLING_RULES_FILE, - DD_TAGS, - DD_TELEMETRY_DEBUG, - DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED, - DD_TELEMETRY_HEARTBEAT_INTERVAL, - DD_TELEMETRY_LOG_COLLECTION_ENABLED, - DD_TELEMETRY_METRICS_ENABLED, - DD_TEST_TIA_KEEP_COV_CONFIG, - DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED, - DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED, - DD_TRACE_AGENT_PORT, - DD_TRACE_AGENT_PROTOCOL_VERSION, - DD_TRACE_AWS_ADD_SPAN_POINTERS, - DD_TRACE_BAGGAGE_MAX_BYTES, - DD_TRACE_BAGGAGE_MAX_ITEMS, - DD_TRACE_BAGGAGE_TAG_KEYS, - DD_TRACE_CLIENT_IP_ENABLED, - DD_TRACE_CLIENT_IP_HEADER, - DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING, - DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING, - DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH, - DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS, - DD_TRACE_ENABLED, - DD_TRACE_EXPERIMENTAL_EXPORTER, - DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED, - DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED, - DD_TRACE_GIT_METADATA_ENABLED, - DD_TRACE_GRAPHQL_ERROR_EXTENSIONS, - DD_TRACE_HEADER_TAGS, - DD_TRACE_LEGACY_BAGGAGE_ENABLED, - DD_TRACE_MEMCACHED_COMMAND_ENABLED, - DD_TRACE_MIDDLEWARE_TRACING_ENABLED, - DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP, - DD_TRACE_PARTIAL_FLUSH_MIN_SPANS, - DD_TRACE_FLUSH_INTERVAL, - DD_TRACE_PEER_SERVICE_MAPPING, - DD_TRACE_PROPAGATION_EXTRACT_FIRST, - DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT, - DD_TRACE_PROPAGATION_STYLE, - DD_TRACE_PROPAGATION_STYLE_INJECT, - DD_TRACE_PROPAGATION_STYLE_EXTRACT, - DD_TRACE_RATE_LIMIT, - DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED, - DD_TRACE_REPORT_HOSTNAME, - DD_TRACE_RESOURCE_RENAMING_ENABLED, - DD_TRACE_SAMPLE_RATE, - DD_TRACE_SAMPLING_RULES, - DD_TRACE_SCOPE, - DD_TRACE_SPAN_ATTRIBUTE_SCHEMA, - DD_TRACE_SPAN_LEAK_DEBUG, - DD_TRACE_STARTUP_LOGS, - DD_TRACE_TAGS, - DD_TRACE_WEBSOCKET_MESSAGES_ENABLED, - DD_TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING, - DD_TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES, - DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH, - DD_TRACING_ENABLED, - DD_VERSION, - DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE, - DD_VERTEXAI_SPAN_CHAR_LIMIT, - DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED, - DD_TRACE_NATIVE_SPAN_EVENTS, - OTEL_METRICS_EXPORTER, - OTEL_PROPAGATORS, - OTEL_RESOURCE_ATTRIBUTES, - OTEL_SERVICE_NAME, - OTEL_TRACES_SAMPLER, - OTEL_TRACES_SAMPLER_ARG, - DD_EXPERIMENTAL_FLAGGING_PROVIDER_ENABLED, - DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS, - OTEL_EXPORTER_OTLP_LOGS_ENDPOINT, - OTEL_EXPORTER_OTLP_LOGS_HEADERS, - OTEL_EXPORTER_OTLP_LOGS_PROTOCOL, - OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, - OTEL_EXPORTER_OTLP_METRICS_ENDPOINT, - OTEL_EXPORTER_OTLP_METRICS_HEADERS, - OTEL_EXPORTER_OTLP_METRICS_PROTOCOL, - OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, - OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE, - OTEL_METRIC_EXPORT_TIMEOUT, - OTEL_EXPORTER_OTLP_PROTOCOL, - OTEL_EXPORTER_OTLP_ENDPOINT, - OTEL_EXPORTER_OTLP_HEADERS, - OTEL_EXPORTER_OTLP_TIMEOUT, - OTEL_BSP_SCHEDULE_DELAY, - OTEL_BSP_MAX_EXPORT_BATCH_SIZE, - OTEL_BSP_MAX_QUEUE_SIZE, - OTEL_METRIC_EXPORT_INTERVAL, - NX_TASK_TARGET_PROJECT, - } = source - - const tags = {} - - tagger.add(tags, parseSpaceSeparatedTags(handleOtel(OTEL_RESOURCE_ATTRIBUTES))) - tagger.add(tags, parseSpaceSeparatedTags(DD_TAGS)) - tagger.add(tags, DD_TRACE_TAGS) - - Object.assign(this.#parsedDdTags, tags) - - setString(target, 'apiKey', DD_API_KEY) - setBoolean(target, 'otelLogsEnabled', DD_LOGS_OTEL_ENABLED) - // Set OpenTelemetry logs configuration with specific _LOGS_ vars taking precedence over generic _EXPORTERS_ vars - if (OTEL_EXPORTER_OTLP_ENDPOINT) { - // Only set if there's a custom URL, otherwise let calc phase handle the default - setString(target, 'otelUrl', OTEL_EXPORTER_OTLP_ENDPOINT) + if (!trackedConfigOrigins.has('dogstatsd.hostname')) { + setAndTrack(this, 'dogstatsd.hostname', agentHostname) } - if (OTEL_EXPORTER_OTLP_ENDPOINT || OTEL_EXPORTER_OTLP_LOGS_ENDPOINT) { - setString(target, 'otelLogsUrl', OTEL_EXPORTER_OTLP_LOGS_ENDPOINT || target.otelUrl) + // Disable log injection when OTEL logs are enabled + // OTEL logs and DD log injection are mutually exclusive + if (this.otelLogsEnabled) { + setAndTrack(this, 'logInjection', false) } - setString(target, 'otelHeaders', OTEL_EXPORTER_OTLP_HEADERS) - setString(target, 'otelLogsHeaders', OTEL_EXPORTER_OTLP_LOGS_HEADERS || target.otelHeaders) - setString(target, 'otelProtocol', OTEL_EXPORTER_OTLP_PROTOCOL) - setString(target, 'otelLogsProtocol', OTEL_EXPORTER_OTLP_LOGS_PROTOCOL || target.otelProtocol) - const otelTimeout = nonNegInt(OTEL_EXPORTER_OTLP_TIMEOUT, 'OTEL_EXPORTER_OTLP_TIMEOUT') - if (otelTimeout !== undefined) { - target.otelTimeout = otelTimeout + if (this.otelMetricsEnabled && + trackedConfigOrigins.has('OTEL_METRICS_EXPORTER') && + this.OTEL_METRICS_EXPORTER === 'none') { + setAndTrack(this, 'otelMetricsEnabled', false) } - const otelLogsTimeout = nonNegInt(OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, 'OTEL_EXPORTER_OTLP_LOGS_TIMEOUT') - target.otelLogsTimeout = otelLogsTimeout === undefined ? target.otelTimeout : otelLogsTimeout - const otelBatchTimeout = nonNegInt(OTEL_BSP_SCHEDULE_DELAY, 'OTEL_BSP_SCHEDULE_DELAY', false) - if (otelBatchTimeout !== undefined) { - target.otelBatchTimeout = otelBatchTimeout + + if (this.telemetry.heartbeatInterval) { + setAndTrack(this, 'telemetry.heartbeatInterval', Math.floor(this.telemetry.heartbeatInterval * 1000)) } - target.otelMaxExportBatchSize = nonNegInt(OTEL_BSP_MAX_EXPORT_BATCH_SIZE, 'OTEL_BSP_MAX_EXPORT_BATCH_SIZE', false) - target.otelMaxQueueSize = nonNegInt(OTEL_BSP_MAX_QUEUE_SIZE, 'OTEL_BSP_MAX_QUEUE_SIZE', false) - - const otelMetricsExporterEnabled = OTEL_METRICS_EXPORTER?.toLowerCase() !== 'none' - setBoolean( - target, - 'otelMetricsEnabled', - DD_METRICS_OTEL_ENABLED && isTrue(DD_METRICS_OTEL_ENABLED) && otelMetricsExporterEnabled - ) - // Set OpenTelemetry metrics configuration with specific _METRICS_ vars - // taking precedence over generic _EXPORTERS_ vars - if (OTEL_EXPORTER_OTLP_ENDPOINT || OTEL_EXPORTER_OTLP_METRICS_ENDPOINT) { - setString(target, 'otelMetricsUrl', OTEL_EXPORTER_OTLP_METRICS_ENDPOINT || target.otelUrl) + + // Enable resourceRenamingEnabled when appsec is enabled and only + // if DD_TRACE_RESOURCE_RENAMING_ENABLED is not explicitly set + if (!trackedConfigOrigins.has('resourceRenamingEnabled')) { + setAndTrack(this, 'resourceRenamingEnabled', this.appsec.enabled ?? false) } - setString(target, 'otelMetricsHeaders', OTEL_EXPORTER_OTLP_METRICS_HEADERS || target.otelHeaders) - setString(target, 'otelMetricsProtocol', OTEL_EXPORTER_OTLP_METRICS_PROTOCOL || target.otelProtocol) - const otelMetricsTimeout = nonNegInt(OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, 'OTEL_EXPORTER_OTLP_METRICS_TIMEOUT') - target.otelMetricsTimeout = otelMetricsTimeout === undefined ? target.otelTimeout : otelMetricsTimeout - target.otelMetricsExportTimeout = nonNegInt(OTEL_METRIC_EXPORT_TIMEOUT, 'OTEL_METRIC_EXPORT_TIMEOUT') - target.otelMetricsExportInterval = nonNegInt(OTEL_METRIC_EXPORT_INTERVAL, 'OTEL_METRIC_EXPORT_INTERVAL', false) - - // Parse temporality preference (default to DELTA for Datadog) - if (OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE) { - const temporalityPref = OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE.toUpperCase() - if (['DELTA', 'CUMULATIVE', 'LOWMEMORY'].includes(temporalityPref)) { - setString(target, 'otelMetricsTemporalityPreference', temporalityPref) - } + + if (!trackedConfigOrigins.has('spanComputePeerService') && this.spanAttributeSchema !== 'v0') { + setAndTrack(this, 'spanComputePeerService', true) + } + + if (!this.apmTracingEnabled) { + setAndTrack(this, 'stats.enabled', false) + } else if (!trackedConfigOrigins.has('stats.enabled')) { + setAndTrack(this, 'stats.enabled', getIsGCPFunction() || getIsAzureFunction()) } - setBoolean( - target, - 'apmTracingEnabled', - DD_APM_TRACING_ENABLED ?? - (DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED && isFalse(DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED)) - ) - setBoolean(target, 'propagateProcessTags.enabled', DD_EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED) - setString(target, 'appKey', DD_APP_KEY) - setBoolean(target, 'appsec.apiSecurity.enabled', DD_API_SECURITY_ENABLED && isTrue(DD_API_SECURITY_ENABLED)) - target['appsec.apiSecurity.sampleDelay'] = maybeFloat(DD_API_SECURITY_SAMPLE_DELAY) - setBoolean(target, 'appsec.apiSecurity.endpointCollectionEnabled', - DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED) - target['appsec.apiSecurity.endpointCollectionMessageLimit'] = - maybeInt(DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT) - target['appsec.blockedTemplateGraphql'] = maybeFile(DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON) - target['appsec.blockedTemplateHtml'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML) - unprocessedTarget['appsec.blockedTemplateHtml'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML - target['appsec.blockedTemplateJson'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON) - unprocessedTarget['appsec.blockedTemplateJson'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON - setBoolean(target, 'appsec.enabled', DD_APPSEC_ENABLED) - setString(target, 'appsec.eventTracking.mode', DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE) - // TODO appsec.extendedHeadersCollection are deprecated, to delete in a major - setBoolean(target, 'appsec.extendedHeadersCollection.enabled', DD_APPSEC_COLLECT_ALL_HEADERS) - setBoolean( - target, - 'appsec.extendedHeadersCollection.redaction', - DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED - ) - target['appsec.extendedHeadersCollection.maxHeaders'] = maybeInt(DD_APPSEC_MAX_COLLECTED_HEADERS) - unprocessedTarget['appsec.extendedHeadersCollection.maxHeaders'] = DD_APPSEC_MAX_COLLECTED_HEADERS - setString(target, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP) - setString(target, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP) - setBoolean(target, 'appsec.rasp.enabled', DD_APPSEC_RASP_ENABLED) - // TODO Deprecated, to delete in a major - setBoolean(target, 'appsec.rasp.bodyCollection', DD_APPSEC_RASP_COLLECT_REQUEST_BODY) - target['appsec.rateLimit'] = maybeInt(DD_APPSEC_TRACE_RATE_LIMIT) - unprocessedTarget['appsec.rateLimit'] = DD_APPSEC_TRACE_RATE_LIMIT - setString(target, 'appsec.rules', DD_APPSEC_RULES) - // DD_APPSEC_SCA_ENABLED is never used locally, but only sent to the backend - setBoolean(target, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED) - setBoolean(target, 'appsec.stackTrace.enabled', DD_APPSEC_STACK_TRACE_ENABLED) - target['appsec.stackTrace.maxDepth'] = maybeInt(DD_APPSEC_MAX_STACK_TRACE_DEPTH) - unprocessedTarget['appsec.stackTrace.maxDepth'] = DD_APPSEC_MAX_STACK_TRACE_DEPTH - target['appsec.stackTrace.maxStackTraces'] = maybeInt(DD_APPSEC_MAX_STACK_TRACES) - unprocessedTarget['appsec.stackTrace.maxStackTraces'] = DD_APPSEC_MAX_STACK_TRACES - target['appsec.wafTimeout'] = maybeInt(DD_APPSEC_WAF_TIMEOUT) - unprocessedTarget['appsec.wafTimeout'] = DD_APPSEC_WAF_TIMEOUT - target['appsec.apiSecurity.downstreamBodyAnalysisSampleRate'] = - maybeFloat(DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE) - target['appsec.apiSecurity.maxDownstreamRequestBodyAnalysis'] = - maybeInt(DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS) - target.baggageMaxBytes = DD_TRACE_BAGGAGE_MAX_BYTES - target.baggageMaxItems = DD_TRACE_BAGGAGE_MAX_ITEMS - setArray(target, 'baggageTagKeys', DD_TRACE_BAGGAGE_TAG_KEYS) - setBoolean(target, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED) - setString(target, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER?.toLowerCase()) - if (DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING || DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) { - if (DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING) { - setBoolean(target, 'cloudPayloadTagging.requestsEnabled', true) + + // TODO: Remove the experimental env vars as a major or deprecate the option? + if (this.experimental?.b3) { + if (!this.tracePropagationStyle.inject.includes('b3')) { + this.tracePropagationStyle.inject.push('b3') } - if (DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) { - setBoolean(target, 'cloudPayloadTagging.responsesEnabled', true) + if (!this.tracePropagationStyle.extract.includes('b3')) { + this.tracePropagationStyle.extract.push('b3') } - target['cloudPayloadTagging.rules'] = appendRules( - splitJSONPathRules(DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING), - splitJSONPathRules(DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) - ) - } - if (DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH) { - target['cloudPayloadTagging.maxDepth'] = maybeInt(DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH) - } - setBoolean(target, 'crashtracking.enabled', DD_CRASHTRACKING_ENABLED) - setBoolean(target, 'codeOriginForSpans.enabled', DD_CODE_ORIGIN_FOR_SPANS_ENABLED) - setBoolean( - target, - 'codeOriginForSpans.experimental.exit_spans.enabled', - DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED - ) - setString(target, 'dbmPropagationMode', DD_DBM_PROPAGATION_MODE) - setBoolean(target, 'dbm.injectSqlBaseHash', DD_DBM_INJECT_SQL_BASEHASH) - setString(target, 'dogstatsd.hostname', DD_DOGSTATSD_HOST) - setString(target, 'dogstatsd.port', DD_DOGSTATSD_PORT) - setBoolean(target, 'dsmEnabled', DD_DATA_STREAMS_ENABLED) - target['dynamicInstrumentation.captureTimeoutMs'] = maybeInt(DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS) - unprocessedTarget['dynamicInstrumentation.captureTimeoutMs'] = DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS - setBoolean(target, 'dynamicInstrumentation.enabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED) - setString(target, 'dynamicInstrumentation.probeFile', DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE) - setArray(target, 'dynamicInstrumentation.redactedIdentifiers', - DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS) - setArray( - target, - 'dynamicInstrumentation.redactionExcludedIdentifiers', - DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS - ) - target['dynamicInstrumentation.uploadIntervalSeconds'] = - maybeFloat(DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS) - unprocessedTarget['dynamicInstrumentation.uploadInterval'] = DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS - setString(target, 'env', DD_ENV || tags.env) - setBoolean( - target, - 'experimental.flaggingProvider.enabled', - DD_EXPERIMENTAL_FLAGGING_PROVIDER_ENABLED - ) - if (DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS != null) { - target['experimental.flaggingProvider.initializationTimeoutMs'] = - maybeInt(DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS) + if (!this.tracePropagationStyle.inject.includes('b3 single header')) { + this.tracePropagationStyle.inject.push('b3 single header') + } + if (!this.tracePropagationStyle.extract.includes('b3 single header')) { + this.tracePropagationStyle.extract.push('b3 single header') + } + setAndTrack(this, 'tracePropagationStyle.inject', this.tracePropagationStyle.inject) + setAndTrack(this, 'tracePropagationStyle.extract', this.tracePropagationStyle.extract) } - setBoolean(target, 'traceEnabled', DD_TRACE_ENABLED) - setBoolean(target, 'experimental.aiguard.enabled', DD_AI_GUARD_ENABLED) - setString(target, 'experimental.aiguard.endpoint', DD_AI_GUARD_ENDPOINT) - target['experimental.aiguard.maxContentSize'] = maybeInt(DD_AI_GUARD_MAX_CONTENT_SIZE) - unprocessedTarget['experimental.aiguard.maxContentSize'] = DD_AI_GUARD_MAX_CONTENT_SIZE - target['experimental.aiguard.maxMessagesLength'] = maybeInt(DD_AI_GUARD_MAX_MESSAGES_LENGTH) - unprocessedTarget['experimental.aiguard.maxMessagesLength'] = DD_AI_GUARD_MAX_MESSAGES_LENGTH - target['experimental.aiguard.timeout'] = maybeInt(DD_AI_GUARD_TIMEOUT) - unprocessedTarget['experimental.aiguard.timeout'] = DD_AI_GUARD_TIMEOUT - setBoolean(target, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED) - setString(target, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER) - if (AWS_LAMBDA_FUNCTION_NAME) { - target.flushInterval = 0 - } else if (DD_TRACE_FLUSH_INTERVAL) { - target.flushInterval = maybeInt(DD_TRACE_FLUSH_INTERVAL) + + if (getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME')) { + setAndTrack(this, 'flushInterval', 0) } - target.flushMinSpans = maybeInt(DD_TRACE_PARTIAL_FLUSH_MIN_SPANS) - unprocessedTarget.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS - setBoolean(target, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED) - setIntegerRangeSet(target, 'grpc.client.error.statuses', DD_GRPC_CLIENT_ERROR_STATUSES) - setIntegerRangeSet(target, 'grpc.server.error.statuses', DD_GRPC_SERVER_ERROR_STATUSES) - setArray(target, 'headerTags', DD_TRACE_HEADER_TAGS) - target['heapSnapshot.count'] = maybeInt(DD_HEAP_SNAPSHOT_COUNT) - setString(target, 'heapSnapshot.destination', DD_HEAP_SNAPSHOT_DESTINATION) - target['heapSnapshot.interval'] = maybeInt(DD_HEAP_SNAPSHOT_INTERVAL) - setString(target, 'hostname', DD_AGENT_HOST) - target['iast.dbRowsToTaint'] = maybeInt(DD_IAST_DB_ROWS_TO_TAINT) - setBoolean(target, 'iast.deduplicationEnabled', DD_IAST_DEDUPLICATION_ENABLED) - setBoolean(target, 'iast.enabled', DD_IAST_ENABLED) - target['iast.maxConcurrentRequests'] = maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS) - unprocessedTarget['iast.maxConcurrentRequests'] = DD_IAST_MAX_CONCURRENT_REQUESTS - target['iast.maxContextOperations'] = maybeInt(DD_IAST_MAX_CONTEXT_OPERATIONS) - unprocessedTarget['iast.maxContextOperations'] = DD_IAST_MAX_CONTEXT_OPERATIONS - setBoolean(target, 'iast.redactionEnabled', DD_IAST_REDACTION_ENABLED && !isFalse(DD_IAST_REDACTION_ENABLED)) - setString(target, 'iast.redactionNamePattern', DD_IAST_REDACTION_NAME_PATTERN) - setString(target, 'iast.redactionValuePattern', DD_IAST_REDACTION_VALUE_PATTERN) - const iastRequestSampling = maybeInt(DD_IAST_REQUEST_SAMPLING) - if (iastRequestSampling !== undefined && iastRequestSampling > -1 && iastRequestSampling < 101) { - target['iast.requestSampling'] = iastRequestSampling + + if (!trackedConfigOrigins.has('apmTracingEnabled') && + trackedConfigOrigins.has('experimental.appsec.standalone.enabled')) { + setAndTrack(this, 'apmTracingEnabled', !this.experimental.appsec.standalone.enabled) } - unprocessedTarget['iast.requestSampling'] = DD_IAST_REQUEST_SAMPLING - setString(target, 'iast.securityControlsConfiguration', DD_IAST_SECURITY_CONTROLS_CONFIGURATION) - setString(target, 'iast.telemetryVerbosity', DD_IAST_TELEMETRY_VERBOSITY) - setBoolean(target, 'iast.stackTrace.enabled', DD_IAST_STACK_TRACE_ENABLED) - setString(target, 'installSignature.id', DD_INSTRUMENTATION_INSTALL_ID) - setString(target, 'installSignature.time', DD_INSTRUMENTATION_INSTALL_TIME) - setString(target, 'installSignature.type', DD_INSTRUMENTATION_INSTALL_TYPE) - // TODO: Why is DD_INJECTION_ENABLED a comma separated list? - setArray(target, 'injectionEnabled', DD_INJECTION_ENABLED) - if (DD_INJECTION_ENABLED !== undefined) { - setString(target, 'instrumentationSource', DD_INJECTION_ENABLED ? 'ssi' : 'manual') + + if (this.cloudPayloadTagging?.request || this.cloudPayloadTagging?.response) { + setAndTrack(this, 'cloudPayloadTagging.rules', appendRules( + this.cloudPayloadTagging.request, + this.cloudPayloadTagging.response + )) } - setBoolean(target, 'injectForce', DD_INJECT_FORCE) - setBoolean(target, 'isAzureFunction', getIsAzureFunction()) - setBoolean(target, 'isGCPFunction', getIsGCPFunction()) - setBoolean(target, 'gcpPubSubPushSubscriptionEnabled', enableGCPPubSubPushSubscription()) - target['langchain.spanCharLimit'] = maybeInt(DD_LANGCHAIN_SPAN_CHAR_LIMIT) - target['langchain.spanPromptCompletionSampleRate'] = maybeFloat(DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE) - setBoolean(target, 'legacyBaggageEnabled', DD_TRACE_LEGACY_BAGGAGE_ENABLED) - setBoolean(target, 'llmobs.agentlessEnabled', DD_LLMOBS_AGENTLESS_ENABLED) - setBoolean(target, 'llmobs.enabled', DD_LLMOBS_ENABLED) - setString(target, 'llmobs.mlApp', DD_LLMOBS_ML_APP) - setBoolean(target, 'logInjection', DD_LOGS_INJECTION) - // Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent - setBoolean(target, 'memcachedCommandEnabled', DD_TRACE_MEMCACHED_COMMAND_ENABLED) - setBoolean(target, 'middlewareTracingEnabled', DD_TRACE_MIDDLEWARE_TRACING_ENABLED) - setBoolean(target, 'openAiLogsEnabled', DD_OPENAI_LOGS_ENABLED) - target['openai.spanCharLimit'] = maybeInt(DD_OPENAI_SPAN_CHAR_LIMIT) - unprocessedTarget.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT - if (DD_TRACE_PEER_SERVICE_MAPPING) { - target.peerServiceMapping = Object.fromEntries( - DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':')) - ) - unprocessedTarget.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING + + if (this.injectionEnabled) { + setAndTrack(this, 'instrumentationSource', 'ssi') } - setString(target, 'port', DD_TRACE_AGENT_PORT) - const profilingEnabled = normalizeProfilingEnabledValue(DD_PROFILING_ENABLED) - setString(target, 'profiling.enabled', profilingEnabled) - setString(target, 'profiling.exporters', DD_PROFILING_EXPORTERS) - setBoolean(target, 'profiling.sourceMap', DD_PROFILING_SOURCE_MAP && !isFalse(DD_PROFILING_SOURCE_MAP)) - if (DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) { - // This is only used in testing to not have to wait 30s - target['profiling.longLivedThreshold'] = Number(DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) + + if (!trackedConfigOrigins.has('runtimeMetrics.enabled') && this.OTEL_METRICS_EXPORTER === 'none') { + setAndTrack(this, 'runtimeMetrics.enabled', false) } - setString(target, 'protocolVersion', DD_TRACE_AGENT_PROTOCOL_VERSION) - setString(target, 'queryStringObfuscation', DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP) - setBoolean(target, 'remoteConfig.enabled', DD_REMOTE_CONFIGURATION_ENABLED) - target['remoteConfig.pollInterval'] = maybeFloat(DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS) - unprocessedTarget['remoteConfig.pollInterval'] = DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS - setBoolean(target, 'reportHostname', DD_TRACE_REPORT_HOSTNAME) - if (DD_TRACE_RESOURCE_RENAMING_ENABLED !== undefined) { - setBoolean(target, 'resourceRenamingEnabled', DD_TRACE_RESOURCE_RENAMING_ENABLED) + if (!trackedConfigOrigins.has('sampleRate') && trackedConfigOrigins.has('OTEL_TRACES_SAMPLER')) { + setAndTrack(this, 'sampleRate', getFromOtelSamplerMap(this.OTEL_TRACES_SAMPLER, this.OTEL_TRACES_SAMPLER_ARG)) } - // only used to explicitly set runtimeMetrics to false - const otelSetRuntimeMetrics = String(OTEL_METRICS_EXPORTER).toLowerCase() === 'none' - ? false - : undefined - setBoolean(target, 'runtimeMetrics.enabled', DD_RUNTIME_METRICS_ENABLED || - otelSetRuntimeMetrics) - setBoolean(target, 'runtimeMetrics.eventLoop', DD_RUNTIME_METRICS_EVENT_LOOP_ENABLED) - setBoolean(target, 'runtimeMetrics.gc', DD_RUNTIME_METRICS_GC_ENABLED) - setBoolean(target, 'runtimeMetricsRuntimeId', DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED) - setArray(target, 'sampler.spanSamplingRules', reformatSpanSamplingRules( - maybeJsonFile(DD_SPAN_SAMPLING_RULES_FILE) ?? - safeJsonParse(DD_SPAN_SAMPLING_RULES) - )) - setUnit( - target, - 'sampleRate', - DD_TRACE_SAMPLE_RATE || getFromOtelSamplerMap(OTEL_TRACES_SAMPLER, OTEL_TRACES_SAMPLER_ARG) - ) - target['sampler.rateLimit'] = DD_TRACE_RATE_LIMIT - setSamplingRule(target, 'sampler.rules', safeJsonParse(DD_TRACE_SAMPLING_RULES)) - unprocessedTarget['sampler.rules'] = DD_TRACE_SAMPLING_RULES - setString(target, 'scope', DD_TRACE_SCOPE) - // Priority: - // DD_SERVICE > tags.service > OTEL_SERVICE_NAME > NX_TASK_TARGET_PROJECT (if DD_ENABLE_NX_SERVICE_NAME) > default - let serviceName = DD_SERVICE || tags.service || OTEL_SERVICE_NAME - let isServiceNameInferred - if (!serviceName && NX_TASK_TARGET_PROJECT) { - if (isTrue(DD_ENABLE_NX_SERVICE_NAME)) { - isServiceNameInferred = true - serviceName = NX_TASK_TARGET_PROJECT - } else if (DD_MAJOR < 6) { - // Warn about v6 behavior change for Nx projects - log.warn( - // eslint-disable-next-line @stylistic/max-len - 'NX_TASK_TARGET_PROJECT is set but no service name was configured. In v6, NX_TASK_TARGET_PROJECT will be used as the default service name. Set DD_ENABLE_NX_SERVICE_NAME=true to opt-in to this behavior now, or set a service name explicitly.' - ) + + if (this.DD_SPAN_SAMPLING_RULES_FILE) { + try { + // TODO: Should we log a warning in case this is defined next to spanSamplingRules? + setAndTrack(this, 'spanSamplingRules', transformers.toCamelCase(JSON.parse(this.DD_SPAN_SAMPLING_RULES_FILE))) + } catch (error) { + log.warn('Error reading span sampling rules file %s; %o', this.DD_SPAN_SAMPLING_RULES_FILE, error) } } - setString(target, 'service', serviceName) - if (serviceName) setBoolean(target, 'isServiceNameInferred', isServiceNameInferred ?? false) - if (DD_SERVICE_MAPPING) { - target.serviceMapping = Object.fromEntries( - DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':')) - ) - } - setString(target, 'site', DD_SITE) - if (DD_TRACE_SPAN_ATTRIBUTE_SCHEMA) { - setString(target, 'spanAttributeSchema', validateNamingVersion(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA)) - unprocessedTarget.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA - } - // 0: disabled, 1: logging, 2: garbage collection + logging - target.spanLeakDebug = maybeInt(DD_TRACE_SPAN_LEAK_DEBUG) - setBoolean(target, 'spanRemoveIntegrationFromService', DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED) - setBoolean(target, 'startupLogs', DD_TRACE_STARTUP_LOGS) - setTags(target, 'tags', tags) - target.tagsHeaderMaxLength = DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH - setBoolean(target, 'telemetry.enabled', DD_INSTRUMENTATION_TELEMETRY_ENABLED) - setString(target, 'instrumentation_config_id', DD_INSTRUMENTATION_CONFIG_ID) - setBoolean(target, 'telemetry.debug', DD_TELEMETRY_DEBUG) - setBoolean(target, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED) - target['telemetry.heartbeatInterval'] = maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000)) - unprocessedTarget['telemetry.heartbeatInterval'] = DD_TELEMETRY_HEARTBEAT_INTERVAL - setBoolean(target, 'telemetry.logCollection', DD_TELEMETRY_LOG_COLLECTION_ENABLED) - setBoolean(target, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED) - setBoolean(target, 'isKeepingCoverageConfiguration', DD_TEST_TIA_KEEP_COV_CONFIG) - setBoolean(target, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED) - setBoolean(target, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED) - warnIfPropagationStyleConflict( - DD_TRACE_PROPAGATION_STYLE, - DD_TRACE_PROPAGATION_STYLE_INJECT, - DD_TRACE_PROPAGATION_STYLE_EXTRACT - ) - if (DD_TRACE_PROPAGATION_STYLE !== undefined) { - setArray(target, 'tracePropagationStyle.inject', normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE)) - setArray(target, 'tracePropagationStyle.extract', normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE)) - } - if (DD_TRACE_PROPAGATION_STYLE_INJECT !== undefined) { - setArray(target, 'tracePropagationStyle.inject', - normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE_INJECT)) + + // All sampler options are tracked as individual values. No need to track the sampler object as a whole. + this.sampler = { + rules: this.samplingRules, + rateLimit: this.rateLimit, + sampleRate: this.sampleRate, + spanSamplingRules: this.spanSamplingRules, } - if (DD_TRACE_PROPAGATION_STYLE_EXTRACT !== undefined) { - setArray(target, 'tracePropagationStyle.extract', - normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE_EXTRACT)) + + // For LLMObs, we want to auto enable it when other llmobs options are defined. + if (!this.llmobs.enabled && + !trackedConfigOrigins.has('llmobs.enabled') && + (trackedConfigOrigins.has('llmobs.agentlessEnabled') || + trackedConfigOrigins.has('llmobs.mlApp'))) { + setAndTrack(this, 'llmobs.enabled', true) } - setBoolean(target, 'tracePropagationExtractFirst', DD_TRACE_PROPAGATION_EXTRACT_FIRST) - if (DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT !== undefined) { - const stringPropagationBehaviorExtract = String(DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT) - target.tracePropagationBehaviorExtract = - VALID_PROPAGATION_BEHAVIOR_EXTRACT.has(stringPropagationBehaviorExtract) - ? stringPropagationBehaviorExtract - : 'continue' + + if (this.OTEL_RESOURCE_ATTRIBUTES) { + for (const [key, value] of Object.entries(this.OTEL_RESOURCE_ATTRIBUTES)) { + // Not replacing existing tags keeps the order of the tags as before. + if (!this.tags[key]) { + this.tags[key] = value + } + } } - if (DD_TRACE_PROPAGATION_STYLE !== undefined || - DD_TRACE_PROPAGATION_STYLE_INJECT !== undefined || - DD_TRACE_PROPAGATION_STYLE_EXTRACT !== undefined || - OTEL_PROPAGATORS !== undefined) { - // At least one var is defined, calculate value using truthy logic - const useDdStyle = DD_TRACE_PROPAGATION_STYLE || - DD_TRACE_PROPAGATION_STYLE_INJECT || - DD_TRACE_PROPAGATION_STYLE_EXTRACT - setBoolean(target, 'tracePropagationStyle.otelPropagators', - useDdStyle ? false : !!OTEL_PROPAGATORS) - - // Use OTEL_PROPAGATORS if no DD-specific vars are set - if (!useDdStyle && OTEL_PROPAGATORS) { - const otelStyles = normalizePropagationStyle(OTEL_PROPAGATORS) - // Validate OTEL propagators - for (const style of otelStyles || []) { - if (!VALID_PROPAGATION_STYLES.has(style)) { - log.warn('unexpected value %s for OTEL_PROPAGATORS environment variable', style) - getCounter('otel.env.invalid', 'DD_TRACE_PROPAGATION_STYLE', 'OTEL_PROPAGATORS').inc() + if (this.DD_TRACE_TAGS) { + // TODO: This is a hack to keep the order of the tags as before. + // That hack is not sufficient, since it does not handle other cases where the tags are set by the user. + if (trackedConfigOrigins.get('tags') === 'code') { + for (const [key, value] of Object.entries(this.DD_TRACE_TAGS)) { + // Not replacing existing tags keeps the order of the tags as before. + if (!this.tags[key]) { + this.tags[key] = value } } - // Set inject/extract from OTEL_PROPAGATORS - if (otelStyles) { - setArray(target, 'tracePropagationStyle.inject', otelStyles) - setArray(target, 'tracePropagationStyle.extract', otelStyles) - } + } else { + Object.assign(this.tags, this.DD_TRACE_TAGS) } } - setBoolean(target, 'traceWebsocketMessagesEnabled', DD_TRACE_WEBSOCKET_MESSAGES_ENABLED) - setBoolean(target, 'traceWebsocketMessagesInheritSampling', DD_TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING) - setBoolean(target, 'traceWebsocketMessagesSeparateTraces', DD_TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES) - setBoolean(target, 'tracing', DD_TRACING_ENABLED) - setString(target, 'version', DD_VERSION || tags.version) - setBoolean(target, 'inferredProxyServicesEnabled', DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED) - setBoolean(target, 'trace.aws.addSpanPointers', DD_TRACE_AWS_ADD_SPAN_POINTERS) - setString(target, 'trace.dynamoDb.tablePrimaryKeys', DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS) - setArray(target, 'graphqlErrorExtensions', DD_TRACE_GRAPHQL_ERROR_EXTENSIONS) - setBoolean(target, 'trace.nativeSpanEvents', DD_TRACE_NATIVE_SPAN_EVENTS) - target['vertexai.spanPromptCompletionSampleRate'] = maybeFloat(DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE) - target['vertexai.spanCharLimit'] = maybeInt(DD_VERTEXAI_SPAN_CHAR_LIMIT) - } - #applyOptions (options) { - const opts = this.#options - const tags = {} - - options = this.#optionsArg = { ingestion: {}, ...options, ...opts } - - tagger.add(tags, options.tags) - - setBoolean(opts, 'apmTracingEnabled', options.apmTracingEnabled ?? - (options.experimental?.appsec?.standalone && !options.experimental.appsec.standalone.enabled) - ) - setBoolean(opts, 'appsec.apiSecurity.enabled', options.appsec?.apiSecurity?.enabled) - setBoolean(opts, 'appsec.apiSecurity.endpointCollectionEnabled', - options.appsec?.apiSecurity?.endpointCollectionEnabled) - opts['appsec.apiSecurity.endpointCollectionMessageLimit'] = - maybeInt(options.appsec?.apiSecurity?.endpointCollectionMessageLimit) - opts['appsec.blockedTemplateGraphql'] = maybeFile(options.appsec?.blockedTemplateGraphql) - opts['appsec.blockedTemplateHtml'] = maybeFile(options.appsec?.blockedTemplateHtml) - this.#optsUnprocessed['appsec.blockedTemplateHtml'] = options.appsec?.blockedTemplateHtml - opts['appsec.blockedTemplateJson'] = maybeFile(options.appsec?.blockedTemplateJson) - this.#optsUnprocessed['appsec.blockedTemplateJson'] = options.appsec?.blockedTemplateJson - setBoolean(opts, 'appsec.enabled', options.appsec?.enabled) - setString(opts, 'appsec.eventTracking.mode', options.appsec?.eventTracking?.mode) - setBoolean( - opts, - 'appsec.extendedHeadersCollection.enabled', - options.appsec?.extendedHeadersCollection?.enabled - ) - setBoolean( - opts, - 'appsec.extendedHeadersCollection.redaction', - options.appsec?.extendedHeadersCollection?.redaction - ) - opts['appsec.extendedHeadersCollection.maxHeaders'] = options.appsec?.extendedHeadersCollection?.maxHeaders - setString(opts, 'appsec.obfuscatorKeyRegex', options.appsec?.obfuscatorKeyRegex) - setString(opts, 'appsec.obfuscatorValueRegex', options.appsec?.obfuscatorValueRegex) - setBoolean(opts, 'appsec.rasp.enabled', options.appsec?.rasp?.enabled) - setBoolean(opts, 'appsec.rasp.bodyCollection', options.appsec?.rasp?.bodyCollection) - opts['appsec.rateLimit'] = maybeInt(options.appsec?.rateLimit) - this.#optsUnprocessed['appsec.rateLimit'] = options.appsec?.rateLimit - setString(opts, 'appsec.rules', options.appsec?.rules) - setBoolean(opts, 'appsec.stackTrace.enabled', options.appsec?.stackTrace?.enabled) - opts['appsec.stackTrace.maxDepth'] = maybeInt(options.appsec?.stackTrace?.maxDepth) - this.#optsUnprocessed['appsec.stackTrace.maxDepth'] = options.appsec?.stackTrace?.maxDepth - opts['appsec.stackTrace.maxStackTraces'] = maybeInt(options.appsec?.stackTrace?.maxStackTraces) - this.#optsUnprocessed['appsec.stackTrace.maxStackTraces'] = options.appsec?.stackTrace?.maxStackTraces - opts['appsec.wafTimeout'] = maybeInt(options.appsec?.wafTimeout) - this.#optsUnprocessed['appsec.wafTimeout'] = options.appsec?.wafTimeout - setBoolean(opts, 'clientIpEnabled', options.clientIpEnabled) - setString(opts, 'clientIpHeader', options.clientIpHeader?.toLowerCase()) - if (options.cloudPayloadTagging?.request || options.cloudPayloadTagging?.response) { - if (options.cloudPayloadTagging.request) { - setBoolean(opts, 'cloudPayloadTagging.requestsEnabled', true) - } - if (options.cloudPayloadTagging.response) { - setBoolean(opts, 'cloudPayloadTagging.responsesEnabled', true) - } - opts['cloudPayloadTagging.rules'] = appendRules( - splitJSONPathRules(options.cloudPayloadTagging.request), - splitJSONPathRules(options.cloudPayloadTagging.response) - ) - } - if (options.cloudPayloadTagging?.requestsEnabled !== undefined) { - setBoolean(opts, 'cloudPayloadTagging.requestsEnabled', options.cloudPayloadTagging.requestsEnabled) - } - if (options.cloudPayloadTagging?.responsesEnabled !== undefined) { - setBoolean(opts, 'cloudPayloadTagging.responsesEnabled', options.cloudPayloadTagging.responsesEnabled) + if (!this.#parsedDdTags) { + this.#parsedDdTags = rfdc(this.tags) } - opts['cloudPayloadTagging.maxDepth'] = maybeInt(options.cloudPayloadTagging?.maxDepth) - opts.baggageMaxBytes = options.baggageMaxBytes - opts.baggageMaxItems = options.baggageMaxItems - setArray(opts, 'baggageTagKeys', options.baggageTagKeys) - setBoolean(opts, 'codeOriginForSpans.enabled', options.codeOriginForSpans?.enabled) - setBoolean( - opts, - 'codeOriginForSpans.experimental.exit_spans.enabled', - options.codeOriginForSpans?.experimental?.exit_spans?.enabled - ) - setString(opts, 'dbmPropagationMode', options.dbmPropagationMode) - setBoolean(opts, 'dbm.injectSqlBaseHash', options.dbm?.injectSqlBaseHash) - if (options.dogstatsd) { - setString(opts, 'dogstatsd.hostname', options.dogstatsd.hostname) - setString(opts, 'dogstatsd.port', options.dogstatsd.port) - } - setBoolean(opts, 'dsmEnabled', options.dsmEnabled) - opts['dynamicInstrumentation.captureTimeoutMs'] = maybeInt(options.dynamicInstrumentation?.captureTimeoutMs) - this.#optsUnprocessed['dynamicInstrumentation.captureTimeoutMs'] = options.dynamicInstrumentation?.captureTimeoutMs - setBoolean(opts, 'dynamicInstrumentation.enabled', options.dynamicInstrumentation?.enabled) - setString(opts, 'dynamicInstrumentation.probeFile', options.dynamicInstrumentation?.probeFile) - setArray( - opts, - 'dynamicInstrumentation.redactedIdentifiers', - options.dynamicInstrumentation?.redactedIdentifiers - ) - setArray( - opts, - 'dynamicInstrumentation.redactionExcludedIdentifiers', - options.dynamicInstrumentation?.redactionExcludedIdentifiers - ) - opts['dynamicInstrumentation.uploadIntervalSeconds'] = - maybeFloat(options.dynamicInstrumentation?.uploadIntervalSeconds) - this.#optsUnprocessed['dynamicInstrumentation.uploadIntervalSeconds'] = - options.dynamicInstrumentation?.uploadIntervalSeconds - setString(opts, 'env', options.env || tags.env) - setBoolean(opts, 'experimental.aiguard.enabled', options.experimental?.aiguard?.enabled) - setString(opts, 'experimental.aiguard.endpoint', options.experimental?.aiguard?.endpoint) - opts['experimental.aiguard.maxMessagesLength'] = maybeInt(options.experimental?.aiguard?.maxMessagesLength) - this.#optsUnprocessed['experimental.aiguard.maxMessagesLength'] = options.experimental?.aiguard?.maxMessagesLength - opts['experimental.aiguard.maxContentSize'] = maybeInt(options.experimental?.aiguard?.maxContentSize) - this.#optsUnprocessed['experimental.aiguard.maxContentSize'] = options.experimental?.aiguard?.maxContentSize - opts['experimental.aiguard.timeout'] = maybeInt(options.experimental?.aiguard?.timeout) - this.#optsUnprocessed['experimental.aiguard.timeout'] = options.experimental?.aiguard?.timeout - setBoolean(opts, 'experimental.enableGetRumData', options.experimental?.enableGetRumData) - setString(opts, 'experimental.exporter', options.experimental?.exporter) - setBoolean(opts, 'experimental.flaggingProvider.enabled', options.experimental?.flaggingProvider?.enabled) - opts['experimental.flaggingProvider.initializationTimeoutMs'] = maybeInt( - options.experimental?.flaggingProvider?.initializationTimeoutMs - ) - this.#optsUnprocessed['experimental.flaggingProvider.initializationTimeoutMs'] = - options.experimental?.flaggingProvider?.initializationTimeoutMs - opts.flushInterval = maybeInt(options.flushInterval) - this.#optsUnprocessed.flushInterval = options.flushInterval - opts.flushMinSpans = maybeInt(options.flushMinSpans) - this.#optsUnprocessed.flushMinSpans = options.flushMinSpans - setArray(opts, 'headerTags', options.headerTags) - setString(opts, 'hostname', options.hostname) - opts['iast.dbRowsToTaint'] = maybeInt(options.iast?.dbRowsToTaint) - setBoolean(opts, 'iast.deduplicationEnabled', options.iast && options.iast.deduplicationEnabled) - setBoolean(opts, 'iast.enabled', - options.iast && (options.iast === true || options.iast.enabled === true)) - opts['iast.maxConcurrentRequests'] = maybeInt(options.iast?.maxConcurrentRequests) - this.#optsUnprocessed['iast.maxConcurrentRequests'] = options.iast?.maxConcurrentRequests - opts['iast.maxContextOperations'] = maybeInt(options.iast?.maxContextOperations) - this.#optsUnprocessed['iast.maxContextOperations'] = options.iast?.maxContextOperations - setBoolean(opts, 'iast.redactionEnabled', options.iast?.redactionEnabled) - setString(opts, 'iast.redactionNamePattern', options.iast?.redactionNamePattern) - setString(opts, 'iast.redactionValuePattern', options.iast?.redactionValuePattern) - const iastRequestSampling = maybeInt(options.iast?.requestSampling) - if (iastRequestSampling !== undefined && iastRequestSampling > -1 && iastRequestSampling < 101) { - opts['iast.requestSampling'] = iastRequestSampling - this.#optsUnprocessed['iast.requestSampling'] = options.iast?.requestSampling - } - if (DD_MAJOR < 6) { - opts['iast.securityControlsConfiguration'] = options.iast?.securityControlsConfiguration + + if (!this.env && this.tags.env !== undefined) { + setAndTrack(this, 'env', this.tags.env) } - setBoolean(opts, 'iast.stackTrace.enabled', options.iast?.stackTrace?.enabled) - setString(opts, 'iast.telemetryVerbosity', options.iast && options.iast.telemetryVerbosity) - setBoolean(opts, 'isCiVisibility', options.isCiVisibility) - setBoolean(opts, 'legacyBaggageEnabled', options.legacyBaggageEnabled) - setBoolean(opts, 'llmobs.agentlessEnabled', options.llmobs?.agentlessEnabled) - setString(opts, 'llmobs.mlApp', options.llmobs?.mlApp) - setBoolean(opts, 'logInjection', options.logInjection) - opts.lookup = options.lookup - setBoolean(opts, 'middlewareTracingEnabled', options.middlewareTracingEnabled) - setBoolean(opts, 'openAiLogsEnabled', options.openAiLogsEnabled) - opts.peerServiceMapping = options.peerServiceMapping - setBoolean(opts, 'plugins', options.plugins) - setString(opts, 'port', options.port) - const strProfiling = String(options.profiling) - if (['true', 'false', 'auto'].includes(strProfiling)) { - setString(opts, 'profiling.enabled', strProfiling) + + if (!this.version) { + setAndTrack(this, 'version', this.tags.version || pkg.version) + this.tags.version ??= pkg.version } - setString(opts, 'protocolVersion', options.protocolVersion) - if (options.remoteConfig) { - opts['remoteConfig.pollInterval'] = maybeFloat(options.remoteConfig.pollInterval) - this.#optsUnprocessed['remoteConfig.pollInterval'] = options.remoteConfig.pollInterval + + let isServiceNameInferred = false + if (!trackedConfigOrigins.has('service')) { + if (this.tags.service) { + setAndTrack(this, 'service', this.tags.service) + } else { + const NX_TASK_TARGET_PROJECT = getEnvironmentVariable('NX_TASK_TARGET_PROJECT') + if (NX_TASK_TARGET_PROJECT) { + if (this.DD_ENABLE_NX_SERVICE_NAME) { + setAndTrack(this, 'service', NX_TASK_TARGET_PROJECT) + isServiceNameInferred = true + } else if (DD_MAJOR < 6) { + log.warn( + // eslint-disable-next-line eslint-rules/eslint-log-printf-style + 'NX_TASK_TARGET_PROJECT is set but no service name was configured. In v6, NX_TASK_TARGET_PROJECT will ' + + 'be used as the default service name. Set DD_ENABLE_NX_SERVICE_NAME=true to opt-in to this behavior ' + + 'now, or set a service name explicitly.' + ) + } + } + } + + if (!this.service) { + const serverlessName = IS_SERVERLESS + ? ( + getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') || + getEnvironmentVariable('FUNCTION_NAME') || // Google Cloud Function Name set by deprecated runtimes + getEnvironmentVariable('K_SERVICE') || // Google Cloud Function Name set by newer runtimes + getEnvironmentVariable('WEBSITE_SITE_NAME') // set by Azure Functions + ) + : undefined + + setAndTrack(this, 'service', serverlessName || pkg.name || 'node') + this.tags.service ??= /** @type {string} */ (this.service) + isServiceNameInferred = true + } } - setBoolean(opts, 'reportHostname', options.reportHostname) - setBoolean(opts, 'runtimeMetrics.enabled', options.runtimeMetrics?.enabled) - setBoolean(opts, 'runtimeMetrics.eventLoop', options.runtimeMetrics?.eventLoop) - setBoolean(opts, 'runtimeMetrics.gc', options.runtimeMetrics?.gc) - setBoolean(opts, 'runtimeMetricsRuntimeId', options.runtimeMetricsRuntimeId) - setArray(opts, 'sampler.spanSamplingRules', reformatSpanSamplingRules(options.spanSamplingRules)) - setUnit(opts, 'sampleRate', options.sampleRate ?? options.ingestion.sampleRate) - opts['sampler.rateLimit'] = maybeInt(options.rateLimit ?? options.ingestion.rateLimit) - setSamplingRule(opts, 'sampler.rules', options.samplingRules) - const optService = options.service || tags.service - setString(opts, 'service', optService) - if (optService) { - setBoolean(opts, 'isServiceNameInferred', false) + setAndTrack(this, 'isServiceNameInferred', isServiceNameInferred) + + // Add missing tags, in case they are defined otherwise. + if (this.service) { + this.tags.service = this.service } - opts.serviceMapping = options.serviceMapping - setString(opts, 'site', options.site) - if (options.spanAttributeSchema) { - setString(opts, 'spanAttributeSchema', validateNamingVersion(options.spanAttributeSchema)) - this.#optsUnprocessed.spanAttributeSchema = options.spanAttributeSchema + if (this.env) { + this.tags.env = this.env } - setBoolean(opts, 'spanRemoveIntegrationFromService', options.spanRemoveIntegrationFromService) - setBoolean(opts, 'startupLogs', options.startupLogs) - setTags(opts, 'tags', tags) - setBoolean(opts, 'traceId128BitGenerationEnabled', options.traceId128BitGenerationEnabled) - setBoolean(opts, 'traceId128BitLoggingEnabled', options.traceId128BitLoggingEnabled) - setBoolean(opts, 'traceWebsocketMessagesEnabled', options.traceWebsocketMessagesEnabled) - setBoolean(opts, 'traceWebsocketMessagesInheritSampling', options.traceWebsocketMessagesInheritSampling) - setBoolean(opts, 'traceWebsocketMessagesSeparateTraces', options.traceWebsocketMessagesSeparateTraces) - setString(opts, 'version', options.version || tags.version) - setBoolean(opts, 'inferredProxyServicesEnabled', options.inferredProxyServicesEnabled) - setBoolean(opts, 'graphqlErrorExtensions', options.graphqlErrorExtensions) - setBoolean(opts, 'trace.nativeSpanEvents', options.trace?.nativeSpanEvents) - if (options.tracePropagationStyle) { - setArray(opts, 'tracePropagationStyle.inject', - normalizePropagationStyle(options.tracePropagationStyle.inject ?? options.tracePropagationStyle)) - setArray(opts, 'tracePropagationStyle.extract', - normalizePropagationStyle(options.tracePropagationStyle.extract ?? options.tracePropagationStyle)) + if (this.version) { + this.tags.version = this.version } + this.tags['runtime-id'] = RUNTIME_ID - // For LLMObs, we want the environment variable to take precedence over the options. - // This is reliant on environment config being set before options. - // This is to make sure the origins of each value are tracked appropriately for telemetry. - // We'll only set `llmobs.enabled` on the opts when it's not set on the environment, and options.llmobs is provided. - if (this.#env['llmobs.enabled'] == null && options.llmobs) { - setBoolean(opts, 'llmobs.enabled', true) + if (IS_SERVERLESS) { + setAndTrack(this, 'telemetry.enabled', false) + setAndTrack(this, 'crashtracking.enabled', false) + setAndTrack(this, 'remoteConfig.enabled', false) } - } - - #isCiVisibility () { - return this.#optionsArg.isCiVisibility ?? this.#defaults.isCiVisibility - } - - #getHostname () { - const DD_CIVISIBILITY_AGENTLESS_URL = getEnv('DD_CIVISIBILITY_AGENTLESS_URL') - const url = DD_CIVISIBILITY_AGENTLESS_URL - ? new URL(DD_CIVISIBILITY_AGENTLESS_URL) - : getAgentUrl(this.#getTraceAgentUrl(), this.#optionsArg) - const DD_AGENT_HOST = this.#optionsArg.hostname ?? - getEnv('DD_AGENT_HOST') ?? - defaults.hostname - return DD_AGENT_HOST || url?.hostname - } - - #getSpanComputePeerService () { - const DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = validateNamingVersion( - this.#optionsArg.spanAttributeSchema ?? - getEnv('DD_TRACE_SPAN_ATTRIBUTE_SCHEMA') - ) - - const peerServiceSet = ( - this.#optionsArg.hasOwnProperty('spanComputePeerService') || - getEnv('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED') !== undefined - ) - const peerServiceValue = this.#optionsArg.spanComputePeerService ?? - getEnv('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED') - - const spanComputePeerService = ( - DD_TRACE_SPAN_ATTRIBUTE_SCHEMA === 'v0' - // In v0, peer service is computed only if it is explicitly set to true - ? peerServiceSet && isTrue(peerServiceValue) - // In >v0, peer service is false only if it is explicitly set to false - : (peerServiceSet ? !isFalse(peerServiceValue) : true) - ) - - return spanComputePeerService - } - - #isTraceStatsComputationEnabled () { - const apmTracingEnabled = this.#options.apmTracingEnabled !== false && - this.#env.apmTracingEnabled !== false - - return apmTracingEnabled && ( - this.#optionsArg.stats ?? - getEnv('DD_TRACE_STATS_COMPUTATION_ENABLED') ?? - (getIsGCPFunction() || getIsAzureFunction()) - ) - } - - #getTraceAgentUrl () { - return this.#optionsArg.url ?? - getEnv('DD_TRACE_AGENT_URL') ?? - null - } - - // handles values calculated from a mixture of options and env vars - #applyCalculated () { - const calc = this.#calculated - const DD_CIVISIBILITY_AGENTLESS_URL = getEnv('DD_CIVISIBILITY_AGENTLESS_URL') - - calc.url = DD_CIVISIBILITY_AGENTLESS_URL - ? new URL(DD_CIVISIBILITY_AGENTLESS_URL) - : getAgentUrl(this.#getTraceAgentUrl(), this.#optionsArg) + // TODO: Should this unconditionally be disabled? + if (getEnvironmentVariable('JEST_WORKER_ID') && !trackedConfigOrigins.has('telemetry.enabled')) { + setAndTrack(this, 'telemetry.enabled', false) + } // Experimental agentless APM span intake // When enabled, sends spans directly to Datadog intake without an agent - const agentlessEnabled = isTrue(getEnv('_DD_APM_TRACING_AGENTLESS_ENABLED')) + // TODO: Replace this with a proper configuration + const agentlessEnabled = isTrue(getEnvironmentVariable('_DD_APM_TRACING_AGENTLESS_ENABLED')) if (agentlessEnabled) { - setString(calc, 'experimental.exporter', 'agentless') - // Disable rate limiting - server-side sampling will be used - calc['sampler.rateLimit'] = -1 + setAndTrack(this, 'experimental.exporter', 'agentless') // Disable client-side stats computation - setBoolean(calc, 'stats.enabled', false) + setAndTrack(this, 'stats.enabled', false) // Enable hostname reporting - setBoolean(calc, 'reportHostname', true) + setAndTrack(this, 'reportHostname', true) + // Disable rate limiting - server-side sampling will be used + setAndTrack(this, 'sampler.rateLimit', -1) // Clear sampling rules - server-side sampling handles this - calc['sampler.rules'] = [] + setAndTrack(this, 'sampler.rules', []) // Agentless intake only accepts 64-bit trace IDs; disable 128-bit generation - setBoolean(calc, 'traceId128BitGenerationEnabled', false) - } - - if (this.#isCiVisibility()) { - setBoolean(calc, 'isEarlyFlakeDetectionEnabled', - getEnv('DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED') ?? true) - setBoolean(calc, 'isFlakyTestRetriesEnabled', getEnv('DD_CIVISIBILITY_FLAKY_RETRY_ENABLED') ?? true) - calc.flakyTestRetriesCount = maybeInt(getEnv('DD_CIVISIBILITY_FLAKY_RETRY_COUNT')) ?? 5 - setBoolean(calc, 'isIntelligentTestRunnerEnabled', isTrue(isCiVisibilityItrEnabled())) - setBoolean(calc, 'isManualApiEnabled', !isFalse(getEnv('DD_CIVISIBILITY_MANUAL_API_ENABLED'))) - setString(calc, 'ciVisibilityTestSessionName', getEnv('DD_TEST_SESSION_NAME')) - setBoolean(calc, 'ciVisAgentlessLogSubmissionEnabled', - isTrue(getEnv('DD_AGENTLESS_LOG_SUBMISSION_ENABLED'))) - setBoolean(calc, 'isTestDynamicInstrumentationEnabled', - !isFalse(getEnv('DD_TEST_FAILED_TEST_REPLAY_ENABLED'))) - setBoolean(calc, 'isServiceUserProvided', !!this.#env.service) - setBoolean(calc, 'isTestManagementEnabled', !isFalse(getEnv('DD_TEST_MANAGEMENT_ENABLED'))) - calc.testManagementAttemptToFixRetries = maybeInt(getEnv('DD_TEST_MANAGEMENT_ATTEMPT_TO_FIX_RETRIES')) ?? 20 - setBoolean(calc, 'isImpactedTestsEnabled', - !isFalse(getEnv('DD_CIVISIBILITY_IMPACTED_TESTS_DETECTION_ENABLED'))) - } - - // Disable log injection when OTEL logs are enabled - // OTEL logs and DD log injection are mutually exclusive - if (this.#env.otelLogsEnabled) { - setBoolean(calc, 'logInjection', false) + if (!trackedConfigOrigins.has('traceId128BitGenerationEnabled')) { + setAndTrack(this, 'traceId128BitGenerationEnabled', false) + } } - calc['dogstatsd.hostname'] = this.#getHostname() - - // Compute OTLP logs and metrics URLs to send payloads to the active Datadog Agent - const agentHostname = this.#getHostname() - calc.otelLogsUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}` - calc.otelMetricsUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}/v1/metrics` - calc.otelUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}` - calc['telemetry.heartbeatInterval'] = maybeInt(Math.floor(this.#defaults['telemetry.heartbeatInterval'] * 1000)) - - setBoolean(calc, 'isGitUploadEnabled', - calc.isIntelligentTestRunnerEnabled && !isFalse(getEnv('DD_CIVISIBILITY_GIT_UPLOAD_ENABLED'))) - - // Enable resourceRenamingEnabled when appsec is enabled and only - // if DD_TRACE_RESOURCE_RENAMING_ENABLED is not explicitly set - if (this.#env.resourceRenamingEnabled === undefined) { - const appsecEnabled = this.#options['appsec.enabled'] ?? this.#env['appsec.enabled'] - if (appsecEnabled) { - setBoolean(calc, 'resourceRenamingEnabled', true) + // Apply all fallbacks to the calculated config. + for (const [configName, alias] of fallbackConfigurations) { + if (!trackedConfigOrigins.has(configName) && trackedConfigOrigins.has(alias)) { + setAndTrack(this, configName, this[alias]) } } - setBoolean(calc, 'spanComputePeerService', this.#getSpanComputePeerService()) - setBoolean(calc, 'stats.enabled', this.#isTraceStatsComputationEnabled()) - const defaultPropagationStyle = getDefaultPropagationStyle(this.#optionsArg) - if (defaultPropagationStyle.length > 2) { - // b3 was added, so update defaults to include it - // This will only be used if no other source (options, env, stable config) set the value - calc['tracePropagationStyle.inject'] = defaultPropagationStyle - calc['tracePropagationStyle.extract'] = defaultPropagationStyle + const DEFAULT_OTLP_PORT = '4318' + if (!this.otelLogsUrl) { + setAndTrack(this, 'otelLogsUrl', `http://${agentHostname}:${DEFAULT_OTLP_PORT}`) } - } - - /** - * Applies remote configuration options from APM_TRACING configs. - * - * @param {import('./remote_config').RemoteConfigOptions} options - Configurations received via Remote Config - */ - #applyRemoteConfig (options) { - const opts = this.#remote - - setBoolean(opts, 'dynamicInstrumentation.enabled', options.dynamic_instrumentation_enabled) - setBoolean(opts, 'codeOriginForSpans.enabled', options.code_origin_enabled) - setUnit(opts, 'sampleRate', options.tracing_sampling_rate) - setBoolean(opts, 'logInjection', options.log_injection_enabled) - setBoolean(opts, 'tracing', options.tracing_enabled) - this.#remoteUnprocessed['sampler.rules'] = options.tracing_sampling_rules - setSamplingRule(opts, 'sampler.rules', reformatTagsFromRC(options.tracing_sampling_rules)) - - opts.headerTags = options.tracing_header_tags?.map(tag => { - return tag.tag_name ? `${tag.header}:${tag.tag_name}` : tag.header - }) - - const tags = {} - tagger.add(tags, options.tracing_tags) - if (Object.keys(tags).length) { - tags['runtime-id'] = RUNTIME_ID + if (!this.otelMetricsUrl) { + setAndTrack(this, 'otelMetricsUrl', `http://${agentHostname}:${DEFAULT_OTLP_PORT}/v1/metrics`) } - setTags(opts, 'tags', tags) - } - - #setAndTrackChange ({ name, value, origin, unprocessedValue, changes }) { - set(this, name, value) - if (!changeTracker[name]) { - changeTracker[name] = {} + if (process.platform === 'win32') { + // OOM monitoring does not work properly on Windows, so it will be disabled. + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED') + // Profiler sampling contexts are not available on Windows, so features + // depending on those (code hotspots and endpoint collection) need to be disabled on Windows. + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_CODEHOTSPOTS_ENABLED') + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_ENDPOINT_COLLECTION_ENABLED') + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_CPU_ENABLED') + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_TIMELINE_ENABLED') + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED') } - const originExists = origin in changeTracker[name] - const oldValue = changeTracker[name][origin] + // Single tags update is tracked as a calculated value. + setAndTrack(this, 'tags', this.tags) - if (!originExists || oldValue !== value) { - changeTracker[name][origin] = value - changes.push({ - name, - value: unprocessedValue || value, - origin, - }) - } - } - - // TODO: Report origin changes and errors to telemetry. - // TODO: Deeply merge configurations. - // TODO: Move change tracking to telemetry. - // for telemetry reporting, `name`s in `containers` need to be keys from: - // https://github.com/DataDog/dd-go/blob/prod/trace/apps/tracer-telemetry-intake/telemetry-payload/static/config_norm_rules.json - #merge () { - const changes = [] - const sources = this.#getSourcesInOrder() - - for (const name of Object.keys(this.#defaults)) { - // Use reverse order for merge (lowest priority first) - for (let i = sources.length - 1; i >= 0; i--) { - const { container, origin, unprocessed } = sources[i] - const value = container[name] - if (value != null || container === this.#defaults) { - this.#setAndTrackChange({ - name, - value, - origin, - unprocessedValue: unprocessed?.[name], - changes, - }) - } - } - } - this.sampler.sampleRate = this.sampleRate - updateConfig(changes, this) + telemetry.updateConfig([...configWithOrigin.values()], this) } + // TODO: Move outside of config. This is unrelated to the config system. #loadGitMetadata () { - // try to read Git metadata from the environment variables - this.repositoryUrl = removeUserSensitiveInfo( - getEnv('DD_GIT_REPOSITORY_URL') ?? this.tags[GIT_REPOSITORY_URL] - ) - this.commitSHA = getEnv('DD_GIT_COMMIT_SHA') ?? this.tags[GIT_COMMIT_SHA] + // Try to read Git metadata from the environment variables + this.repositoryUrl = removeUserSensitiveInfo(this.DD_GIT_REPOSITORY_URL ?? this.tags[GIT_REPOSITORY_URL]) + this.commitSHA = this.DD_GIT_COMMIT_SHA ?? this.tags[GIT_COMMIT_SHA] - // otherwise, try to read Git metadata from the git.properties file + // Otherwise, try to read Git metadata from the git.properties file if (!this.repositoryUrl || !this.commitSHA) { - const DD_GIT_PROPERTIES_FILE = getEnv('DD_GIT_PROPERTIES_FILE') + const DD_GIT_PROPERTIES_FILE = this.DD_GIT_PROPERTIES_FILE const gitPropertiesFile = DD_GIT_PROPERTIES_FILE ?? `${process.cwd()}/git.properties` - let gitPropertiesString try { - gitPropertiesString = fs.readFileSync(gitPropertiesFile, 'utf8') - } catch (e) { + const gitPropertiesString = fs.readFileSync(gitPropertiesFile, 'utf8') + const { commitSHA, repositoryUrl } = getGitMetadataFromGitProperties(gitPropertiesString) + this.commitSHA ??= commitSHA + this.repositoryUrl ??= repositoryUrl + } catch (error) { // Only log error if the user has set a git.properties path if (DD_GIT_PROPERTIES_FILE) { - log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', gitPropertiesFile, e) + log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', gitPropertiesFile, error) } } - if (gitPropertiesString) { - const { commitSHA, repositoryUrl } = getGitMetadataFromGitProperties(gitPropertiesString) - this.commitSHA = this.commitSHA || commitSHA - this.repositoryUrl = this.repositoryUrl || repositoryUrl - } } - // otherwise, try to read Git metadata from the .git/ folder - if (!this.repositoryUrl || !this.commitSHA) { - const DD_GIT_FOLDER_PATH = getEnv('DD_GIT_FOLDER_PATH') - const gitFolderPath = DD_GIT_FOLDER_PATH ?? path.join(process.cwd(), '.git') - if (!this.repositoryUrl) { - // try to read git config (repository URL) - const gitConfigPath = path.join(gitFolderPath, 'config') - try { - const gitConfigContent = fs.readFileSync(gitConfigPath, 'utf8') - if (gitConfigContent) { - this.repositoryUrl = getRemoteOriginURL(gitConfigContent) - } - } catch (e) { - // Only log error if the user has set a .git/ path - if (DD_GIT_FOLDER_PATH) { - log.error('Error reading git config: %s', gitConfigPath, e) - } + + // Otherwise, try to read Git metadata from the .git/ folder + const DD_GIT_FOLDER_PATH = this.DD_GIT_FOLDER_PATH + const gitFolderPath = DD_GIT_FOLDER_PATH ?? path.join(process.cwd(), '.git') + + if (!this.repositoryUrl) { + // Try to read git config (repository URL) + const gitConfigPath = path.join(gitFolderPath, 'config') + try { + const gitConfigContent = fs.readFileSync(gitConfigPath, 'utf8') + if (gitConfigContent) { + this.repositoryUrl = getRemoteOriginURL(gitConfigContent) } - } - if (!this.commitSHA) { - // try to read git HEAD (commit SHA) - const gitHeadSha = resolveGitHeadSHA(gitFolderPath) - if (gitHeadSha) { - this.commitSHA = gitHeadSha + } catch (error) { + // Only log error if the user has set a .git/ path + if (DD_GIT_FOLDER_PATH) { + log.error('Error reading git config: %s', gitConfigPath, error) } } } + // Try to read git HEAD (commit SHA) + this.commitSHA ??= resolveGitHeadSHA(gitFolderPath) } } -function getCounter (event, ddVar, otelVar) { - const counters = TELEMETRY_COUNTERS.get(event) - const tags = [] - const ddVarPrefix = 'config_datadog:' - const otelVarPrefix = 'config_opentelemetry:' - if (ddVar) { - ddVar = ddVarPrefix + ddVar.toLowerCase() - tags.push(ddVar) - } - if (otelVar) { - otelVar = otelVarPrefix + otelVar.toLowerCase() - tags.push(otelVar) - } - - if (!(otelVar in counters)) counters[otelVar] = {} - - const counter = tracerMetrics.count(event, tags) - counters[otelVar][ddVar] = counter - return counter -} - -function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) { - const OTEL_TRACES_SAMPLER_MAPPING = { - always_on: '1.0', - always_off: '0.0', - traceidratio: otelTracesSamplerArg, - parentbased_always_on: '1.0', - parentbased_always_off: '0.0', - parentbased_traceidratio: otelTracesSamplerArg, - } - return OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler] -} - /** - * Validate the type of an environment variable - * @param {string} envVar - The name of the environment variable - * @param {string} [value] - The value of the environment variable - * @returns {boolean} - True if the value is valid, false otherwise + * @param {Config} config + * @param {ConfigKey} envVar */ -function isInvalidOtelEnvironmentVariable (envVar, value) { - // Skip validation if the value is undefined (it was not set as environment variable) - if (value === undefined) return false - - switch (envVar) { - case 'OTEL_LOG_LEVEL': - return !VALID_LOG_LEVELS.has(value) - case 'OTEL_PROPAGATORS': - case 'OTEL_RESOURCE_ATTRIBUTES': - case 'OTEL_SERVICE_NAME': - return typeof value !== 'string' - case 'OTEL_TRACES_SAMPLER': - return getFromOtelSamplerMap(value, getEnv('OTEL_TRACES_SAMPLER_ARG')) === undefined - case 'OTEL_TRACES_SAMPLER_ARG': - return Number.isNaN(Number.parseFloat(value)) - case 'OTEL_SDK_DISABLED': - return value.toLowerCase() !== 'true' && value.toLowerCase() !== 'false' - case 'OTEL_TRACES_EXPORTER': - case 'OTEL_METRICS_EXPORTER': - case 'OTEL_LOGS_EXPORTER': - return value.toLowerCase() !== 'none' - default: - return true - } -} - -function checkIfBothOtelAndDdEnvVarSet () { - for (const [otelEnvVar, ddEnvVar] of OTEL_DD_ENV_MAPPING) { - const otelValue = getEnv(otelEnvVar) - - if (ddEnvVar && getEnv(ddEnvVar) && otelValue) { - log.warn('both %s and %s environment variables are set', ddEnvVar, otelEnvVar) - getCounter('otel.env.hiding', ddEnvVar, otelEnvVar).inc() - } - - if (isInvalidOtelEnvironmentVariable(otelEnvVar, otelValue)) { - log.warn('unexpected value %s for %s environment variable', otelValue, otelEnvVar) - getCounter('otel.env.invalid', ddEnvVar, otelEnvVar).inc() +function deactivateIfEnabledAndWarnOnWindows (config, envVar) { + if (config[envVar]) { + const source = trackedConfigOrigins.get(envVar) + setAndTrack(config, envVar, false) + // TODO: Should we log even for default values? + if (source) { + log.warn('%s is not supported on Windows. Deactivating. (source: %s)', envVar, source) } } } -function maybeFile (filepath) { - if (!filepath) return - try { - return fs.readFileSync(filepath, 'utf8') - } catch (e) { - log.error('Error reading file %s', filepath, e) - } -} - -function maybeJsonFile (filepath) { - const file = maybeFile(filepath) - if (!file) return - try { - return JSON.parse(file) - } catch (e) { - log.error('Error parsing JSON file %s', filepath, e) - } -} - -function safeJsonParse (input) { - try { - return JSON.parse(input) - } catch {} -} - -function validateNamingVersion (versionString) { - if (!versionString) { - return DEFAULT_NAMING_VERSION - } - if (!NAMING_VERSIONS.has(versionString)) { - log.warn('Unexpected input for config.spanAttributeSchema, picked default', DEFAULT_NAMING_VERSION) - return DEFAULT_NAMING_VERSION - } - return versionString -} - -/** - * Given a string of comma-separated paths, return the array of paths. - * If a blank path is provided a null is returned to signal that the feature is disabled. - * An empty array means the feature is enabled but that no rules need to be applied. - * - * @param {string | string[]} input - */ -function splitJSONPathRules (input) { - if (!input || input === '$') return - if (Array.isArray(input)) return input - if (input === 'all') return [] - return input.split(',') -} - -// Shallow clone with property name remapping -function remapify (input, mappings) { - if (!input) return - const output = {} - for (const [key, value] of Object.entries(input)) { - output[key in mappings ? mappings[key] : key] = value +function increaseCounter (event, ddVar, otelVar) { + const tags = [] + if (ddVar) { + tags.push(`config_datadog:${ddVar.toLowerCase()}`) } - return output + tags.push(`config_opentelemetry:${otelVar.toLowerCase()}`) + tracerMetrics.count(event, tags).inc() } -/** - * Normalizes propagation style values to a lowercase array. - * Handles both string (comma-separated) and array inputs. - */ -function normalizePropagationStyle (value) { - if (Array.isArray(value)) { - return value.map(v => v.toLowerCase()) - } - if (typeof value === 'string') { - return value.split(',') - .filter(v => v !== '') - .map(v => v.trim().toLowerCase()) - } - if (value !== undefined) { - log.warn('Unexpected input for config.tracePropagationStyle') +function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) { + const OTEL_TRACES_SAMPLER_MAPPING = { + always_on: 1, + always_off: 0, + parentbased_always_on: 1, + parentbased_always_off: 0, } -} -/** - * Warns if both DD_TRACE_PROPAGATION_STYLE and specific inject/extract vars are set. - */ -function warnIfPropagationStyleConflict (general, inject, extract) { - if (general && (inject || extract)) { - log.warn( - // eslint-disable-next-line @stylistic/max-len - 'Use either the DD_TRACE_PROPAGATION_STYLE environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables' - ) + const result = OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler] ?? otelTracesSamplerArg + if (result === undefined) { + increaseCounter('otel.env.invalid', 'DD_TRACE_SAMPLE_RATE', 'OTEL_TRACES_SAMPLER') } + return result } -function reformatSpanSamplingRules (rules) { - if (!rules) return rules - return rules.map(rule => { - return remapify(rule, { - sample_rate: 'sampleRate', - max_per_second: 'maxPerSecond', - }) - }) -} - -function getDefaultPropagationStyle (options) { - // TODO: Remove the experimental env vars as a major? - const DD_TRACE_B3_ENABLED = options.experimental?.b3 ?? - getEnv('DD_TRACE_EXPERIMENTAL_B3_ENABLED') - const defaultPropagationStyle = ['datadog', 'tracecontext'] - if (isTrue(DD_TRACE_B3_ENABLED)) { - defaultPropagationStyle.push('b3', 'b3 single header') - } - return defaultPropagationStyle -} - -function isCiVisibilityItrEnabled () { - return getEnv('DD_CIVISIBILITY_ITR_ENABLED') ?? true -} - -function reformatTagsFromRC (samplingRules) { - for (const rule of (samplingRules || [])) { - if (rule.tags) { - const reformattedTags = {} - for (const tag of rule.tags) { - reformattedTags[tag.key] = tag.value_glob +function warnWrongOtelSettings () { + // This mostly works for non-aliased environment variables only. + // TODO: Adjust this to work across all sources. + for (const [otelEnvVar, ddEnvVar, key] of [ + // eslint-disable-next-line eslint-rules/eslint-env-aliases + ['OTEL_LOG_LEVEL', 'DD_TRACE_LOG_LEVEL'], + // eslint-disable-next-line eslint-rules/eslint-env-aliases + ['OTEL_PROPAGATORS', 'DD_TRACE_PROPAGATION_STYLE'], + // eslint-disable-next-line eslint-rules/eslint-env-aliases + ['OTEL_SERVICE_NAME', 'DD_SERVICE', 'service'], + ['OTEL_TRACES_SAMPLER', 'DD_TRACE_SAMPLE_RATE'], + ['OTEL_TRACES_SAMPLER_ARG', 'DD_TRACE_SAMPLE_RATE'], + ['OTEL_TRACES_EXPORTER', 'DD_TRACE_ENABLED'], + ['OTEL_METRICS_EXPORTER', 'DD_RUNTIME_METRICS_ENABLED'], + ['OTEL_RESOURCE_ATTRIBUTES', 'DD_TAGS'], + ['OTEL_SDK_DISABLED', 'DD_TRACE_OTEL_ENABLED'], + ['OTEL_LOGS_EXPORTER'], + ]) { + // eslint-disable-next-line eslint-rules/eslint-process-env + const envs = process.env + const otelSource = trackedConfigOrigins.get(/** @type {ConfigPath} */ (key ?? otelEnvVar)) + const otelEnvValue = envs[otelEnvVar] + if (otelEnvValue) { + if (envs[ddEnvVar]) { + log.warn('Conflicting %s and %s environment variables are set for %s', ddEnvVar, otelEnvVar, otelSource) + increaseCounter('otel.env.hiding', ddEnvVar, otelEnvVar) } - rule.tags = reformattedTags - } - } - return samplingRules -} - -function setBoolean (obj, name, value) { - if (value === undefined || value === null) { - obj[name] = value - } else if (isTrue(value)) { - obj[name] = true - } else if (isFalse(value)) { - obj[name] = false - } -} - -function setUnit (obj, name, value) { - if (value === null || value === undefined) { - obj[name] = value - return - } - - value = Number.parseFloat(value) - - if (!Number.isNaN(value)) { - // TODO: Ignore out of range values instead of normalizing them. - obj[name] = Math.min(Math.max(value, 0), 1) - } -} - -function setArray (obj, name, value) { - if (value == null) { - obj[name] = null - return - } - - if (typeof value === 'string') { - value = value.split(',').map(item => { - // Trim each item and remove whitespace around the colon - const [key, val] = item.split(':').map(part => part.trim()) - return val === undefined ? key : `${key}:${val}` - }) - } - if (Array.isArray(value)) { - obj[name] = value - } -} - -function setIntegerRangeSet (obj, name, value) { - if (value == null) { - obj[name] = null - return - } - value = value.split(',') - const result = [] - - for (const val of value) { - if (val.includes('-')) { - const [start, end] = val.split('-').map(Number) - for (let i = start; i <= end; i++) { - result.push(i) + // eslint-disable-next-line eslint-rules/eslint-env-aliases + const invalidOtelValue = otelEnvVar === 'OTEL_PROPAGATORS' + ? trackedConfigOrigins.get(/** @type {ConfigPath} */ ('tracePropagationStyle.inject')) !== otelSource && + !envs[ddEnvVar] + : !otelSource + if (invalidOtelValue) { + increaseCounter('otel.env.invalid', ddEnvVar, otelEnvVar) } - } else { - result.push(Number(val)) } } - obj[name] = result -} - -function setSamplingRule (obj, name, value) { - if (value == null) { - obj[name] = null - return - } - - if (typeof value === 'string') { - value = value.split(',') - } - - if (Array.isArray(value)) { - value = value.map(rule => { - return remapify(rule, { - sample_rate: 'sampleRate', - }) - }) - obj[name] = value - } -} - -function setString (obj, name, value) { - obj[name] = value ? String(value) : undefined // unset for empty strings -} - -function setTags (obj, name, value) { - if (!value || Object.keys(value).length === 0) { - obj[name] = null - return - } - - obj[name] = value -} - -function handleOtel (tagString) { - return tagString - ?.replace(/(^|,)deployment\.environment=/, '$1env:') - .replace(/(^|,)service\.name=/, '$1service:') - .replace(/(^|,)service\.version=/, '$1version:') - .replaceAll('=', ':') -} - -function parseSpaceSeparatedTags (tagString) { - if (tagString && !tagString.includes(',')) { - tagString = tagString.replaceAll(/\s+/g, ',') - } - return tagString -} - -function maybeInt (number) { - const parsed = Number.parseInt(number) - return Number.isNaN(parsed) ? undefined : parsed -} - -function maybeFloat (number) { - const parsed = Number.parseFloat(number) - return Number.isNaN(parsed) ? undefined : parsed -} - -function nonNegInt (value, envVarName, allowZero = true) { - if (value === undefined) return - const parsed = Number.parseInt(value) - if (Number.isNaN(parsed) || parsed < 0 || (parsed === 0 && !allowZero)) { - log.warn('Invalid value %d for %s. Using default value.', parsed, envVarName) - return - } - return parsed -} - -function getAgentUrl (url, options) { - if (url) return new URL(url) - - if (os.type() === 'Windows_NT') return - - if ( - !options.hostname && - !options.port && - !getEnv('DD_AGENT_HOST') && - !getEnv('DD_TRACE_AGENT_PORT') && - !isTrue(getEnv('DD_CIVISIBILITY_AGENTLESS_ENABLED')) && - fs.existsSync('/var/run/datadog/apm.socket') - ) { - return new URL('unix:///var/run/datadog/apm.socket') - } } +/** + * @param {TracerOptions} [options] + */ function getConfig (options) { if (!configInstance) { configInstance = new Config(options) diff --git a/packages/dd-trace/src/config/parsers.js b/packages/dd-trace/src/config/parsers.js new file mode 100644 index 00000000000..fbaa78accdd --- /dev/null +++ b/packages/dd-trace/src/config/parsers.js @@ -0,0 +1,254 @@ +'use strict' + +const fs = require('fs') + +const tagger = require('../tagger') + +let warnInvalidValue +function setWarnInvalidValue (fn) { + warnInvalidValue = fn +} + +const VALID_PROPAGATION_STYLES = new Set([ + 'datadog', 'tracecontext', 'b3', 'b3 single header', 'b3multi', 'baggage', 'none', +]) + +function toCase (value, methodName) { + if (Array.isArray(value)) { + return value.map(item => { + return transformers[methodName](item) + }) + } + return value[methodName]() +} + +const transformers = { + setIntegerRangeSet (value) { + if (value == null) { + return + } + value = value.split(',') + const result = [] + + for (const val of value) { + if (val.includes('-')) { + const [start, end] = val.split('-').map(Number) + for (let i = start; i <= end; i++) { + result.push(i) + } + } else { + result.push(Number(val)) + } + } + return result + }, + toLowerCase (value) { + return toCase(value, 'toLowerCase') + }, + toUpperCase (value) { + return toCase(value, 'toUpperCase') + }, + toCamelCase (value) { + if (Array.isArray(value)) { + return value.map(item => { + return transformers.toCamelCase(item) + }) + } + if (typeof value === 'object' && value !== null) { + const result = {} + for (const [key, innerValue] of Object.entries(value)) { + const camelCaseKey = key.replaceAll(/_(\w)/g, (_, letter) => letter.toUpperCase()) + result[camelCaseKey] = transformers.toCamelCase(innerValue) + } + return result + } + return value + }, + parseOtelTags (value, optionName) { + return parsers.MAP(value + ?.replace(/(^|,)deployment\.environment=/, '$1env:') + .replace(/(^|,)service\.name=/, '$1service:') + .replace(/(^|,)service\.version=/, '$1version:') + .replaceAll('=', ':'), optionName) + }, + normalizeProfilingEnabled (configValue) { + if (configValue == null) { + return + } + if (configValue === 'true' || configValue === '1') { + return 'true' + } + if (configValue === 'false' || configValue === '0') { + return 'false' + } + const lowercased = String(configValue).toLowerCase() + if (lowercased !== configValue) { + return transformers.normalizeProfilingEnabled(lowercased) + } + return configValue + }, + sampleRate (value, optionName, source) { + const number = Number(value) + if (Number.isNaN(number) || value === null) { + warnInvalidValue(value, optionName, source, 'Sample rate invalid') + return + } + const clamped = Math.min(Math.max(number, 0), 1) + if (clamped !== number) { + warnInvalidValue(value, optionName, source, 'Sample rate out of range between 0 and 1') + return clamped + } + return number + }, + readFilePath (raw, optionName, source) { + const { stackTraceLimit } = Error + Error.stackTraceLimit = 0 + try { + return fs.readFileSync(raw, 'utf8') + } catch (error) { + warnInvalidValue(raw, optionName, source, 'Error reading path', error) + } finally { + Error.stackTraceLimit = stackTraceLimit + } + }, + /** + * Given a string of comma-separated paths, return the array of paths. + * If a blank path is provided a null is returned to signal that the feature is disabled. + * An empty array means the feature is enabled but that no rules need to be applied. + * + * @param {string | string[]} input + */ + splitJSONPathRules (input) { + if (!input || input === '$') return + if (Array.isArray(input)) return input + if (input === 'all') return [] + return input.split(',') + }, + stripColonWhitespace (value) { + if (Array.isArray(value)) { + return value.map(item => { + return transformers.stripColonWhitespace(item) + }) + } + return value.replaceAll(/\s*:\s*/g, ':') + }, + validatePropagationStyles (value, optionName) { + value = transformers.toLowerCase(value) + for (const propagator of value) { + if (!VALID_PROPAGATION_STYLES.has(propagator)) { + warnInvalidValue(propagator, optionName, optionName, 'Invalid propagator') + return + } + } + return value + }, +} + +const telemetryTransformers = { + JSON (object) { + return (typeof object !== 'object' || object === null) ? object : JSON.stringify(object) + }, + MAP (object) { + if (typeof object !== 'object' || object === null) { + return object + } + let result = '' + for (const [key, value] of Object.entries(object)) { + result += `${key}:${value},` + } + return result.slice(0, -1) + }, + ARRAY (array) { + return Array.isArray(array) ? array.join(',') : array + }, +} + +const parsers = { + BOOLEAN (raw) { + if (raw === 'true' || raw === '1') { + return true + } + if (raw === 'false' || raw === '0') { + return false + } + const lowercased = raw.toLowerCase() + if (lowercased !== raw) { + return parsers.BOOLEAN(lowercased) + } + }, + INT (raw) { + const parsed = Math.trunc(raw) + if (Number.isNaN(parsed)) { + return + } + return parsed + }, + DECIMAL (raw) { + const parsed = Number(raw) + if (Number.isNaN(parsed)) { + return + } + return parsed + }, + ARRAY (raw) { + // TODO: Make the parsing a helper that is reused everywhere. + const result = [] + if (!raw) { + return result + } + let valueStart = 0 + for (let i = 0; i < raw.length; i++) { + const char = raw[i] + if (char === ',') { + const value = raw.slice(valueStart, i).trim() + // Auto filter empty entries. + if (value.length > 0) { + result.push(value) + } + valueStart = i + 1 + } + } + if (valueStart < raw.length) { + const value = raw.slice(valueStart).trim() + // Auto filter empty entries. + if (value.length > 0) { + result.push(value) + } + } + return result + }, + MAP (raw, optionName) { + /** @type {Record} */ + const entries = {} + if (!raw) { + return entries + } + // DD_TAGS is a special case. It may be a map of key-value pairs separated by spaces. + if (optionName === 'DD_TAGS' && !raw.includes(',')) { + raw = raw.replaceAll(/\s+/g, ',') + } + tagger.add(entries, raw) + return entries + }, + JSON (raw) { + const { stackTraceLimit } = Error + Error.stackTraceLimit = 0 + try { + return JSON.parse(raw) + } catch { + // ignore + } finally { + Error.stackTraceLimit = stackTraceLimit + } + }, + STRING (raw) { + return raw + }, +} + +module.exports = { + parsers, + transformers, + telemetryTransformers, + setWarnInvalidValue, +} diff --git a/packages/dd-trace/src/config/remote_config.js b/packages/dd-trace/src/config/remote_config.js index 54f4b3067b4..f981dd37fef 100644 --- a/packages/dd-trace/src/config/remote_config.js +++ b/packages/dd-trace/src/config/remote_config.js @@ -2,6 +2,7 @@ const RemoteConfigCapabilities = require('../remote_config/capabilities') const log = require('../log') +const tagger = require('../tagger') module.exports = { enable, @@ -194,10 +195,66 @@ function enable (rc, config, onConfigUpdated) { transaction.ack(item.path) } - // Get merged config and apply it - const mergedLibConfig = rcClientLibConfigManager.getMergedLibConfig() + /** @type {import('../config').TracerOptions|null|RemoteConfigOptions} */ + let mergedLibConfig = rcClientLibConfigManager.getMergedLibConfig() + + if (mergedLibConfig) { + mergedLibConfig = transformRemoteConfigToLocalOption(mergedLibConfig) + } + config.setRemoteConfig(mergedLibConfig) onConfigUpdated() }) } + +/** + * @param {RemoteConfigOptions} libConfig + * @returns {import('../config').TracerOptions} + */ +function transformRemoteConfigToLocalOption (libConfig) { + const normalizedConfig = {} + for (const [name, value] of Object.entries(libConfig)) { + if (value !== null) { + normalizedConfig[optionLookupTable[name] ?? name] = transformers[name]?.(value) ?? value + } + } + return normalizedConfig +} + +// This is intermediate solution until remote config is reworked to handle all known entries with proper names +const optionLookupTable = { + dynamic_instrumentation_enabled: 'dynamicInstrumentation.enabled', + code_origin_enabled: 'codeOriginForSpans.enabled', + tracing_sampling_rate: 'sampleRate', + log_injection_enabled: 'logInjection', + tracing_enabled: 'tracing', + tracing_sampling_rules: 'samplingRules', + tracing_header_tags: 'headerTags', + tracing_tags: 'tags', +} + +const transformers = { + tracing_sampling_rules (samplingRules) { + for (const rule of (samplingRules || [])) { + if (rule.tags) { + const reformattedTags = {} + for (const tag of rule.tags) { + reformattedTags[tag.key] = tag.value_glob + } + rule.tags = reformattedTags + } + } + return samplingRules + }, + tracing_header_tags (headerTags) { + return headerTags?.map(tag => { + return tag.tag_name ? `${tag.header}:${tag.tag_name}` : tag.header + }) + }, + tracing_tags (tags) { + const normalizedTags = {} + tagger.add(normalizedTags, tags) + return normalizedTags + }, +} diff --git a/packages/dd-trace/src/config/supported-configurations.json b/packages/dd-trace/src/config/supported-configurations.json index 5bca7736dc9..e20da7b9703 100644 --- a/packages/dd-trace/src/config/supported-configurations.json +++ b/packages/dd-trace/src/config/supported-configurations.json @@ -13,9 +13,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "ciVisAgentlessLogSubmissionEnabled" - ] + "internalPropertyName": "ciVisAgentlessLogSubmissionEnabled" } ], "DD_AGENTLESS_LOG_SUBMISSION_URL": [ @@ -104,9 +102,7 @@ "aliases": [ "DATADOG_API_KEY" ], - "configurationNames": [ - "apiKey" - ] + "internalPropertyName": "apiKey" } ], "DD_API_SECURITY_ENABLED": [ @@ -114,7 +110,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.apiSecurity.enabled" + "appsec.apiSecurity.enabled", + "experimental.appsec.apiSecurity.enabled" ], "default": "true", "aliases": [ @@ -127,7 +124,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.apiSecurity.endpointCollectionEnabled" + "appsec.apiSecurity.endpointCollectionEnabled", + "experimental.appsec.apiSecurity.endpointCollectionEnabled" ], "default": "true" } @@ -137,7 +135,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.apiSecurity.endpointCollectionMessageLimit" + "appsec.apiSecurity.endpointCollectionMessageLimit", + "experimental.appsec.apiSecurity.endpointCollectionMessageLimit" ], "default": "300" } @@ -146,9 +145,7 @@ { "implementation": "A", "type": "decimal", - "configurationNames": [ - "appsec.apiSecurity.downstreamBodyAnalysisSampleRate" - ], + "internalPropertyName": "appsec.apiSecurity.downstreamBodyAnalysisSampleRate", "default": "0.5" } ], @@ -156,9 +153,7 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "appsec.apiSecurity.maxDownstreamRequestBodyAnalysis" - ], + "internalPropertyName": "appsec.apiSecurity.maxDownstreamRequestBodyAnalysis", "default": "1" } ], @@ -167,9 +162,7 @@ "implementation": "A", "type": "decimal", "default": "30", - "configurationNames": [ - "appsec.apiSecurity.sampleDelay" - ] + "internalPropertyName": "appsec.apiSecurity.sampleDelay" } ], "DD_APM_FLUSH_DEADLINE_MILLISECONDS": [ @@ -194,7 +187,8 @@ "implementation": "E", "type": "string", "configurationNames": [ - "appsec.eventTracking.mode" + "appsec.eventTracking.mode", + "experimental.appsec.eventTracking.mode" ], "default": "identification", "aliases": [ @@ -207,7 +201,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.extendedHeadersCollection.enabled" + "appsec.extendedHeadersCollection.enabled", + "experimental.appsec.extendedHeadersCollection.enabled" ], "default": "false", "deprecated": true @@ -218,7 +213,10 @@ "implementation": "C", "type": "boolean", "configurationNames": [ - "appsec.enabled" + "appsec.enabled", + "appsec", + "experimental.appsec.enabled", + "experimental.appsec" ], "default": null } @@ -228,9 +226,11 @@ "implementation": "A", "type": "string", "configurationNames": [ - "appsec.blockedTemplateGraphql" + "appsec.blockedTemplateGraphql", + "experimental.appsec.blockedTemplateGraphql" ], - "default": null + "default": null, + "transform": "readFilePath" } ], "DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED": [ @@ -238,7 +238,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.extendedHeadersCollection.redaction" + "appsec.extendedHeadersCollection.redaction", + "experimental.appsec.extendedHeadersCollection.redaction" ], "default": "true" } @@ -248,9 +249,11 @@ "implementation": "B", "type": "string", "configurationNames": [ - "appsec.blockedTemplateHtml" + "appsec.blockedTemplateHtml", + "experimental.appsec.blockedTemplateHtml" ], - "default": null + "default": null, + "transform": "readFilePath" } ], "DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON": [ @@ -258,9 +261,11 @@ "implementation": "B", "type": "string", "configurationNames": [ - "appsec.blockedTemplateJson" + "appsec.blockedTemplateJson", + "experimental.appsec.blockedTemplateJson" ], - "default": null + "default": null, + "transform": "readFilePath" } ], "DD_APPSEC_MAX_COLLECTED_HEADERS": [ @@ -268,7 +273,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.extendedHeadersCollection.maxHeaders" + "appsec.extendedHeadersCollection.maxHeaders", + "experimental.appsec.extendedHeadersCollection.maxHeaders" ], "default": "50" } @@ -278,7 +284,11 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.stackTrace.maxStackTraces" + "appsec.stackTrace.maxStackTraces", + "experimental.appsec.stackTrace.maxStackTraces" + ], + "aliases": [ + "DD_APPSEC_MAX_STACKTRACES" ], "default": "2" } @@ -288,7 +298,11 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.stackTrace.maxDepth" + "appsec.stackTrace.maxDepth", + "experimental.appsec.stackTrace.maxDepth" + ], + "aliases": [ + "DD_APPSEC_MAX_STACKTRACE_DEPTH" ], "default": "32" } @@ -298,7 +312,8 @@ "implementation": "B", "type": "string", "configurationNames": [ - "appsec.obfuscatorKeyRegex" + "appsec.obfuscatorKeyRegex", + "experimental.appsec.obfuscatorKeyRegex" ], "default": "(?i)pass|pw(?:or)?d|secret|(?:api|private|public|access)[_-]?key|token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization|jsessionid|phpsessid|asp\\.net[_-]sessionid|sid|jwt" } @@ -308,7 +323,8 @@ "implementation": "G", "type": "string", "configurationNames": [ - "appsec.obfuscatorValueRegex" + "appsec.obfuscatorValueRegex", + "experimental.appsec.obfuscatorValueRegex" ], "default": "(?i)(?:p(?:ass)?w(?:or)?d|pass(?:[_-]?phrase)?|secret(?:[_-]?key)?|(?:(?:api|private|public|access)[_-]?)key(?:[_-]?id)?|(?:(?:auth|access|id|refresh)[_-]?)?token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|jsessionid|phpsessid|asp\\.net(?:[_-]|-)sessionid|sid|jwt)(?:\\s*=([^;&]+)|\"\\s*:\\s*(\"[^\"]+\"|\\d+))|bearer\\s+([a-z0-9\\._\\-]+)|token\\s*:\\s*([a-z0-9]{13})|gh[opsu]_([0-9a-zA-Z]{36})|ey[I-L][\\w=-]+\\.(ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?)|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}([^\\-]+)[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*([a-z0-9\\/\\.+]{100,})" } @@ -318,7 +334,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.rasp.bodyCollection" + "appsec.rasp.bodyCollection", + "experimental.appsec.rasp.bodyCollection" ], "default": "false", "deprecated": true @@ -329,7 +346,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.rasp.enabled" + "appsec.rasp.enabled", + "experimental.appsec.rasp.enabled" ], "default": "true" } @@ -339,7 +357,8 @@ "implementation": "B", "type": "string", "configurationNames": [ - "appsec.rules" + "appsec.rules", + "experimental.appsec.rules" ], "default": null } @@ -349,9 +368,7 @@ "implementation": "B", "type": "boolean", "default": null, - "configurationNames": [ - "appsec.sca.enabled" - ] + "internalPropertyName": "appsec.sca.enabled" } ], "DD_APPSEC_STACK_TRACE_ENABLED": [ @@ -359,7 +376,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.stackTrace.enabled" + "appsec.stackTrace.enabled", + "experimental.appsec.stackTrace.enabled" ], "default": "true" } @@ -369,7 +387,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.rateLimit" + "appsec.rateLimit", + "experimental.appsec.rateLimit" ], "default": "100" } @@ -379,7 +398,8 @@ "implementation": "E", "type": "int", "configurationNames": [ - "appsec.wafTimeout" + "appsec.wafTimeout", + "experimental.appsec.wafTimeout" ], "default": "5000" } @@ -389,9 +409,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "appKey" - ] + "internalPropertyName": "appKey" } ], "DD_AZURE_RESOURCE_GROUP": [ @@ -441,9 +459,7 @@ "implementation": "B", "type": "boolean", "default": "true", - "configurationNames": [ - "isEarlyFlakeDetectionEnabled" - ] + "internalPropertyName": "isEarlyFlakeDetectionEnabled" } ], "DD_CIVISIBILITY_ENABLED": [ @@ -458,9 +474,7 @@ "implementation": "A", "type": "int", "default": "5", - "configurationNames": [ - "flakyTestRetriesCount" - ] + "internalPropertyName": "flakyTestRetriesCount" } ], "DD_CIVISIBILITY_FLAKY_RETRY_ENABLED": [ @@ -468,9 +482,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isFlakyTestRetriesEnabled" - ] + "internalPropertyName": "isFlakyTestRetriesEnabled" } ], "DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED": [ @@ -485,9 +497,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isGitUploadEnabled" - ] + "internalPropertyName": "isGitUploadEnabled" } ], "DD_CIVISIBILITY_IMPACTED_TESTS_DETECTION_ENABLED": [ @@ -495,9 +505,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isImpactedTestsEnabled" - ] + "internalPropertyName": "isImpactedTestsEnabled" } ], "DD_CIVISIBILITY_ITR_ENABLED": [ @@ -505,9 +513,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isIntelligentTestRunnerEnabled" - ] + "internalPropertyName": "isIntelligentTestRunnerEnabled" } ], "DD_CIVISIBILITY_MANUAL_API_ENABLED": [ @@ -515,9 +521,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isManualApiEnabled" - ] + "internalPropertyName": "isManualApiEnabled" } ], "DD_CIVISIBILITY_RUM_FLUSH_WAIT_MILLIS": [ @@ -573,9 +577,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "crashtracking.enabled" - ] + "internalPropertyName": "crashtracking.enabled" } ], "DD_CUSTOM_TRACE_ID": [ @@ -690,19 +692,19 @@ ], "DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS": [ { - "implementation": "A", - "type": "int", + "implementation": "C", + "type": "decimal", "configurationNames": [ "dynamicInstrumentation.uploadIntervalSeconds" ], - "default": "1" + "default": "1.0" } ], "DD_ENABLE_NX_SERVICE_NAME": [ { - "implementation": "A", - "type": "string", - "default": null + "implementation": "B", + "type": "boolean", + "default": "false" } ], "DD_ENV": [ @@ -717,9 +719,12 @@ ], "DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED": [ { - "implementation": "A", + "implementation": "B", "type": "boolean", - "default": "true" + "default": "false", + "configurationNames": [ + "experimental.appsec.standalone.enabled" + ] } ], "DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS": [ @@ -747,9 +752,7 @@ "implementation": "B", "type": "boolean", "default": "true", - "configurationNames": [ - "propagateProcessTags.enabled" - ] + "internalPropertyName": "propagateProcessTags.enabled" } ], "DD_EXPERIMENTAL_TEST_OPT_SETTINGS_CACHE": [ @@ -894,23 +897,20 @@ ], "DD_GRPC_CLIENT_ERROR_STATUSES": [ { - "implementation": "A", - "type": "array", - "default": "1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16", - "configurationNames": [ - "grpc.client.error.statuses" - ], - "handler": "GRPC_HANDLER" + "implementation": "C", + "type": "string", + "default": "1-16", + "internalPropertyName": "grpc.client.error.statuses", + "transform": "setIntegerRangeSet" } ], "DD_GRPC_SERVER_ERROR_STATUSES": [ { - "implementation": "A", - "type": "array", - "default": "2,3,4,5,6,7,8,9,10,11,12,13,14,15,16", - "configurationNames": [ - "grpc.server.error.statuses" - ] + "implementation": "C", + "type": "string", + "default": "2-16", + "internalPropertyName": "grpc.server.error.statuses", + "transform": "setIntegerRangeSet" } ], "DD_HEAP_SNAPSHOT_COUNT": [ @@ -918,9 +918,7 @@ "implementation": "A", "type": "int", "default": "0", - "configurationNames": [ - "heapSnapshot.count" - ] + "internalPropertyName": "heapSnapshot.count" } ], "DD_HEAP_SNAPSHOT_DESTINATION": [ @@ -928,9 +926,7 @@ "implementation": "A", "type": "string", "default": "", - "configurationNames": [ - "heapSnapshot.destination" - ] + "internalPropertyName": "heapSnapshot.destination" } ], "DD_HEAP_SNAPSHOT_INTERVAL": [ @@ -938,9 +934,7 @@ "implementation": "A", "type": "int", "default": "3600", - "configurationNames": [ - "heapSnapshot.interval" - ] + "internalPropertyName": "heapSnapshot.interval" } ], "DD_IAST_DB_ROWS_TO_TAINT": [ @@ -948,7 +942,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "iast.dbRowsToTaint" + "iast.dbRowsToTaint", + "experimental.iast.dbRowsToTaint" ], "default": "1" } @@ -958,7 +953,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "iast.deduplicationEnabled" + "iast.deduplicationEnabled", + "experimental.iast.deduplicationEnabled" ], "default": "true" } @@ -968,7 +964,10 @@ "implementation": "B", "type": "boolean", "configurationNames": [ - "iast.enabled" + "iast.enabled", + "iast", + "experimental.iast.enabled", + "experimental.iast" ], "default": "false" } @@ -978,7 +977,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "iast.maxConcurrentRequests" + "iast.maxConcurrentRequests", + "experimental.iast.maxConcurrentRequests" ], "default": "2" } @@ -988,7 +988,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "iast.maxContextOperations" + "iast.maxContextOperations", + "experimental.iast.maxContextOperations" ], "default": "2" } @@ -998,7 +999,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "iast.redactionEnabled" + "iast.redactionEnabled", + "experimental.iast.redactionEnabled" ], "default": "true" } @@ -1008,7 +1010,8 @@ "implementation": "A", "type": "string", "configurationNames": [ - "iast.redactionNamePattern" + "iast.redactionNamePattern", + "experimental.iast.redactionNamePattern" ], "default": "(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|(?:sur|last)name|user(?:name)?|address|e?mail)" } @@ -1018,7 +1021,8 @@ "implementation": "A", "type": "string", "configurationNames": [ - "iast.redactionValuePattern" + "iast.redactionValuePattern", + "experimental.iast.redactionValuePattern" ], "default": "(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,})|[\\w\\.-]+@[a-zA-Z\\d\\.-]+\\.[a-zA-Z]{2,})" } @@ -1028,9 +1032,12 @@ "implementation": "A", "type": "int", "configurationNames": [ - "iast.requestSampling" + "iast.requestSampling", + "experimental.iast.requestSampling" ], - "default": "30" + "default": "30", + "allowed": "100|[1-9]?\\d", + "transform": "iastRequestSampling" } ], "DD_IAST_SECURITY_CONTROLS_CONFIGURATION": [ @@ -1038,7 +1045,8 @@ "implementation": "B", "type": "string", "configurationNames": [ - "iast.securityControlsConfiguration" + "iast.securityControlsConfiguration", + "experimental.iast.securityControlsConfiguration" ], "default": null } @@ -1048,7 +1056,8 @@ "implementation": "B", "type": "boolean", "configurationNames": [ - "iast.stackTrace.enabled" + "iast.stackTrace.enabled", + "experimental.iast.stackTrace.enabled" ], "default": "true" } @@ -1058,19 +1067,18 @@ "implementation": "B", "type": "string", "configurationNames": [ - "iast.telemetryVerbosity" + "iast.telemetryVerbosity", + "experimental.iast.telemetryVerbosity" ], "default": "INFORMATION" } ], "DD_INJECTION_ENABLED": [ { - "implementation": "A", - "type": "array", - "default": "", - "configurationNames": [ - "injectionEnabled" - ] + "implementation": "C", + "type": "string", + "default": null, + "internalPropertyName": "injectionEnabled" } ], "DD_INJECT_FORCE": [ @@ -1078,9 +1086,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "injectForce" - ] + "internalPropertyName": "injectForce" } ], "DD_INSTRUMENTATION_CONFIG_ID": [ @@ -1088,9 +1094,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "instrumentation_config_id" - ] + "internalPropertyName": "instrumentation_config_id" } ], "DD_INSTRUMENTATION_INSTALL_ID": [ @@ -1098,9 +1102,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "installSignature.id" - ] + "internalPropertyName": "installSignature.id" } ], "DD_INSTRUMENTATION_INSTALL_TIME": [ @@ -1108,9 +1110,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "installSignature.time" - ] + "internalPropertyName": "installSignature.time" } ], "DD_INSTRUMENTATION_INSTALL_TYPE": [ @@ -1118,9 +1118,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "installSignature.type" - ] + "internalPropertyName": "installSignature.type" } ], "DD_INSTRUMENTATION_TELEMETRY_ENABLED": [ @@ -1131,9 +1129,7 @@ "aliases": [ "DD_TRACE_TELEMETRY_ENABLED" ], - "configurationNames": [ - "telemetry.enabled" - ] + "internalPropertyName": "telemetry.enabled" } ], "DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": [ @@ -1141,9 +1137,7 @@ "implementation": "A", "type": "int", "default": "30000", - "configurationNames": [ - "profiling.longLivedThreshold" - ] + "internalPropertyName": "profiling.longLivedThreshold" } ], "DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED": [ @@ -1165,9 +1159,7 @@ "implementation": "A", "type": "int", "default": "128", - "configurationNames": [ - "langchain.spanCharLimit" - ] + "internalPropertyName": "langchain.spanCharLimit" } ], "DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE": [ @@ -1175,9 +1167,7 @@ "implementation": "A", "type": "decimal", "default": "1", - "configurationNames": [ - "langchain.spanPromptCompletionSampleRate" - ] + "internalPropertyName": "langchain.spanPromptCompletionSampleRate" } ], "DD_LLMOBS_AGENTLESS_ENABLED": [ @@ -1195,9 +1185,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "llmobs.enabled" - ] + "internalPropertyName": "llmobs.enabled" } ], "DD_LLMOBS_ML_APP": [ @@ -1225,16 +1213,22 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "otelLogsEnabled" - ] + "internalPropertyName": "otelLogsEnabled" } ], - "DD_LOG_LEVEL": [ + "DD_TRACE_LOG_LEVEL": [ { - "implementation": "B", + "implementation": "C", "type": "string", - "default": null + "default": "debug", + "configurationNames": [ + "logLevel" + ], + "aliases": [ + "DD_LOG_LEVEL", + "OTEL_LOG_LEVEL" + ], + "allowed": "debug|info|warn|error" } ], "DD_METRICS_OTEL_ENABLED": [ @@ -1242,9 +1236,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "otelMetricsEnabled" - ] + "internalPropertyName": "otelMetricsEnabled" } ], "DD_MINI_AGENT_PATH": [ @@ -1269,9 +1261,7 @@ "implementation": "A", "type": "int", "default": "128", - "configurationNames": [ - "openai.spanCharLimit" - ] + "internalPropertyName": "openai.spanCharLimit" } ], "DD_PIPELINE_EXECUTION_ID": [ @@ -1346,38 +1336,30 @@ ], "DD_PROFILING_DEBUG_UPLOAD_COMPRESSION": [ { - "implementation": "A", + "implementation": "B", "type": "string", - "default": "zstd" + "default": "on", + "allowed": "on|off|(gzip|zstd)(-[1-9][0-9]?)?", + "transform": "toLowerCase" } ], "DD_PROFILING_ENABLED": [ { - "implementation": "A", - "type": "boolean", + "implementation": "B", + "type": "string", + "internalPropertyName": "profiling.enabled", "configurationNames": [ - "profiling.enabled" + "profiling" ], + "allowed": "false|true|auto|1|0", + "transform": "normalizeProfilingEnabled", "default": "false", + "__TODO__": "The alias is deprecated and should log. This needs an re-implementation.", "aliases": [ "DD_EXPERIMENTAL_PROFILING_ENABLED" ] } ], - "DD_EXPERIMENTAL_PROFILING_ENABLED": [ - { - "implementation": "A", - "type": "boolean", - "configurationNames": [ - "profiling.enabled" - ], - "default": "false", - "aliases": [ - "DD_PROFILING_ENABLED" - ], - "deprecated": true - } - ], "DD_PROFILING_ENDPOINT_COLLECTION_ENABLED": [ { "implementation": "A", @@ -1401,8 +1383,8 @@ ], "DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES": [ { - "implementation": "A", - "type": "string", + "implementation": "B", + "type": "array", "default": "process" } ], @@ -1429,19 +1411,16 @@ ], "DD_PROFILING_EXPORTERS": [ { - "implementation": "A", - "type": "string", - "default": "agent", - "configurationNames": [ - "profiling.exporters" - ] + "implementation": "B", + "type": "array", + "default": "agent" } ], "DD_PROFILING_HEAP_ENABLED": [ { - "implementation": "A", + "implementation": "B", "type": "boolean", - "default": "false" + "default": null } ], "DD_PROFILING_HEAP_SAMPLING_INTERVAL": [ @@ -1460,8 +1439,8 @@ ], "DD_PROFILING_PROFILERS": [ { - "implementation": "A", - "type": "string", + "implementation": "B", + "type": "array", "default": "space,wall" } ], @@ -1469,10 +1448,7 @@ { "implementation": "A", "type": "boolean", - "default": "true", - "configurationNames": [ - "profiling.sourceMap" - ] + "default": "true" } ], "DD_PROFILING_TIMELINE_ENABLED": [ @@ -1519,9 +1495,9 @@ ], "DD_PROFILING_WALLTIME_ENABLED": [ { - "implementation": "B", + "implementation": "A", "type": "boolean", - "default": "true" + "default": null } ], "DD_REMOTE_CONFIGURATION_ENABLED": [ @@ -1532,9 +1508,7 @@ "aliases": [ "DD_REMOTE_CONFIG_ENABLED" ], - "configurationNames": [ - "remoteConfig.enabled" - ] + "internalPropertyName": "remoteConfig.enabled" } ], "DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS": [ @@ -1552,7 +1526,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "runtimeMetrics.enabled" + "runtimeMetrics.enabled", + "runtimeMetrics" ], "default": "false" } @@ -1597,6 +1572,14 @@ ] } ], + "DD_ROOT_JS_SESSION_ID": [ + { + "implementation": "A", + "type": "string", + "default": null, + "internal": true + } + ], "DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED": [ { "implementation": "B", @@ -1617,8 +1600,10 @@ ], "default": null, "aliases": [ - "DD_SERVICE_NAME" - ] + "DD_SERVICE_NAME", + "OTEL_SERVICE_NAME" + ], + "allowed": ".+" } ], "DD_SERVICE_MAPPING": [ @@ -1643,20 +1628,21 @@ ], "DD_SPAN_SAMPLING_RULES": [ { - "implementation": "C", - "type": "array", + "implementation": "D", + "type": "json", "configurationNames": [ - "spanSamplingRules", - "sampler.spanSamplingRules" + "spanSamplingRules" ], - "default": null + "default": null, + "transform": "toCamelCase" } ], "DD_SPAN_SAMPLING_RULES_FILE": [ { - "implementation": "B", + "implementation": "A", "type": "string", - "default": "" + "default": null, + "transform": "readFilePath" } ], "DD_TAGS": [ @@ -1674,9 +1660,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "telemetry.debug" - ] + "internalPropertyName": "telemetry.debug" } ], "DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED": [ @@ -1684,9 +1668,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "telemetry.dependencyCollection" - ] + "internalPropertyName": "telemetry.dependencyCollection" } ], "DD_TELEMETRY_FORWARDER_PATH": [ @@ -1701,9 +1683,7 @@ "implementation": "B", "type": "decimal", "default": "60.0", - "configurationNames": [ - "telemetry.heartbeatInterval" - ] + "internalPropertyName": "telemetry.heartbeatInterval" } ], "DD_TELEMETRY_LOG_COLLECTION_ENABLED": [ @@ -1711,9 +1691,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "telemetry.logCollection" - ] + "internalPropertyName": "telemetry.logCollection" } ], "DD_TELEMETRY_METRICS_ENABLED": [ @@ -1721,9 +1699,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "telemetry.metrics" - ] + "internalPropertyName": "telemetry.metrics" } ], "DD_TEST_FAILED_TEST_REPLAY_ENABLED": [ @@ -1731,9 +1707,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isTestDynamicInstrumentationEnabled" - ] + "internalPropertyName": "isTestDynamicInstrumentationEnabled" } ], "DD_TEST_FLEET_CONFIG_PATH": [ @@ -1755,9 +1729,7 @@ "implementation": "C", "type": "int", "default": "20", - "configurationNames": [ - "testManagementAttemptToFixRetries" - ] + "internalPropertyName": "testManagementAttemptToFixRetries" } ], "DD_TEST_MANAGEMENT_ENABLED": [ @@ -1765,9 +1737,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isTestManagementEnabled" - ] + "internalPropertyName": "isTestManagementEnabled" } ], "DD_TEST_TIA_KEEP_COV_CONFIG": [ @@ -1775,9 +1745,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "isKeepingCoverageConfiguration" - ] + "internalPropertyName": "isKeepingCoverageConfiguration" } ], "DD_TEST_SESSION_NAME": [ @@ -1785,9 +1753,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "ciVisibilityTestSessionName" - ] + "internalPropertyName": "ciVisibilityTestSessionName" } ], "DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED": [ @@ -1939,9 +1905,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "trace.aws.addSpanPointers" - ] + "internalPropertyName": "trace.aws.addSpanPointers" } ], "DD_TRACE_AWS_SDK_AWS_BATCH_PROPAGATION_ENABLED": [ @@ -2327,7 +2291,8 @@ "configurationNames": [ "clientIpHeader" ], - "default": null + "default": null, + "transform": "toLowerCase" } ], "DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH": [ @@ -2337,27 +2302,30 @@ "configurationNames": [ "cloudPayloadTagging.maxDepth" ], - "default": "10" + "default": "10", + "allowed": "\\d+" } ], "DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING": [ { - "implementation": "A", - "type": "array", + "implementation": "B", + "type": "string", "configurationNames": [ "cloudPayloadTagging.request" ], - "default": null + "default": null, + "transform": "splitJSONPathRules" } ], "DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING": [ { - "implementation": "A", - "type": "array", + "implementation": "B", + "type": "string", "configurationNames": [ "cloudPayloadTagging.response" ], - "default": null + "default": null, + "transform": "splitJSONPathRules" } ], "DD_TRACE_COLLECTIONS_ENABLED": [ @@ -2470,9 +2438,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "trace.dynamoDb.tablePrimaryKeys" - ] + "internalPropertyName": "trace.dynamoDb.tablePrimaryKeys" } ], "DD_TRACE_ELASTICSEARCH_ENABLED": [ @@ -2501,8 +2467,9 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "traceEnabled" + "internalPropertyName": "tracing", + "aliases": [ + "DD_TRACING_ENABLED" ] } ], @@ -2620,10 +2587,7 @@ { "implementation": "A", "type": "boolean", - "default": "true", - "configurationNames": [ - "isGCPPubSubPushSubscriptionEnabled" - ] + "default": "true" } ], "DD_TRACE_GENERIC_POOL_ENABLED": [ @@ -2638,9 +2602,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "gitMetadataEnabled" - ] + "internalPropertyName": "gitMetadataEnabled" } ], "DD_TRACE_GOOGLE_CLOUD_PUBSUB_ENABLED": [ @@ -2683,9 +2645,7 @@ "implementation": "A", "type": "array", "default": "", - "configurationNames": [ - "graphqlErrorExtensions" - ] + "internalPropertyName": "graphqlErrorExtensions" } ], "DD_TRACE_GRAPHQL_TAG_ENABLED": [ @@ -2772,7 +2732,8 @@ "default": "", "configurationNames": [ "headerTags" - ] + ], + "transform": "stripColonWhitespace" } ], "DD_TRACE_HONO_ENABLED": [ @@ -3040,13 +3001,6 @@ "default": "true" } ], - "DD_TRACE_LOG_LEVEL": [ - { - "implementation": "C", - "type": "string", - "default": "debug" - } - ], "DD_TRACE_LOOPBACK_ENABLED": [ { "implementation": "A", @@ -3066,9 +3020,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "memcachedCommandEnabled" - ] + "internalPropertyName": "memcachedCommandEnabled" } ], "DD_TRACE_MEMCACHED_ENABLED": [ @@ -3184,9 +3136,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "trace.nativeSpanEvents" - ] + "internalPropertyName": "trace.nativeSpanEvents" } ], "DD_TRACE_NET_ENABLED": [ @@ -3236,9 +3186,7 @@ "implementation": "F", "type": "string", "default": "(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:(?:\\s|%20)*(?:=|%3D)[^&]+|(?:\"|%22)(?:\\s|%20)*(?::|%3A)(?:\\s|%20)*(?:\"|%22)(?:%2[^2]|%[^2]|[^\"%])+(?:\"|%22))|bearer(?:\\s|%20)+[a-z0-9\\._\\-]+|token(?::|%3A)[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L](?:[\\w=-]|%3D)+\\.ey[I-L](?:[\\w=-]|%3D)+(?:\\.(?:[\\w.+\\/=-]|%3D|%2F|%2B)+)?|[\\-]{5}BEGIN(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY[\\-]{5}[^\\-]+[\\-]{5}END(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY|ssh-rsa(?:\\s|%20)*(?:[a-z0-9\\/\\.+]|%2F|%5C|%2B){100,}", - "configurationNames": [ - "queryStringObfuscation" - ] + "internalPropertyName": "queryStringObfuscation" } ], "DD_TRACE_OPENAI_ENABLED": [ @@ -3429,10 +3377,10 @@ { "implementation": "B", "type": "string", + "allowed": "continue|restart|ignore", + "transform": "toLowerCase", "default": "continue", - "configurationNames": [ - "tracePropagationBehaviorExtract" - ] + "internalPropertyName": "tracePropagationBehaviorExtract" } ], "DD_TRACE_PROPAGATION_EXTRACT_FIRST": [ @@ -3440,19 +3388,18 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "tracePropagationExtractFirst" - ] + "internalPropertyName": "tracePropagationExtractFirst" } ], "DD_TRACE_PROPAGATION_STYLE": [ { "implementation": "D", "type": "array", - "configurationNames": [ - "tracePropagationStyle" - ], - "default": "datadog,tracecontext,baggage" + "default": "datadog,tracecontext,baggage", + "transform": "validatePropagationStyles", + "aliases": [ + "OTEL_PROPAGATORS" + ] } ], "DD_TRACE_PROPAGATION_STYLE_EXTRACT": [ @@ -3462,7 +3409,8 @@ "configurationNames": [ "tracePropagationStyle.extract" ], - "default": "datadog, tracecontext, baggage" + "default": "datadog, tracecontext, baggage", + "transform": "toLowerCase" } ], "DD_TRACE_PROPAGATION_STYLE_INJECT": [ @@ -3472,7 +3420,8 @@ "configurationNames": [ "tracePropagationStyle.inject" ], - "default": "datadog, tracecontext, baggage" + "default": "datadog, tracecontext, baggage", + "transform": "toLowerCase" } ], "DD_TRACE_PROTOBUFJS_ENABLED": [ @@ -3501,8 +3450,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "ingestion.rateLimit", - "sampler.rateLimit" + "rateLimit", + "ingestion.rateLimit" ], "default": "100" } @@ -3567,9 +3516,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "resourceRenamingEnabled" - ] + "internalPropertyName": "resourceRenamingEnabled" } ], "DD_TRACE_RESTIFY_ENABLED": [ @@ -3598,20 +3545,22 @@ "implementation": "B", "type": "decimal", "configurationNames": [ + "sampleRate", "ingestion.sampleRate" ], - "default": null + "default": null, + "transform": "sampleRate" } ], "DD_TRACE_SAMPLING_RULES": [ { - "implementation": "A", - "type": "array", + "implementation": "E", + "type": "json", "configurationNames": [ - "samplingRules", - "sampler.rules" + "samplingRules" ], - "default": "" + "default": "[]", + "transform": "toCamelCase" } ], "DD_TRACE_SCOPE": [ @@ -3619,9 +3568,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "scope" - ] + "internalPropertyName": "scope" } ], "DD_TRACE_SELENIUM_ENABLED": [ @@ -3663,6 +3610,8 @@ { "implementation": "B", "type": "string", + "allowed": "v0|v1", + "transform": "toLowerCase", "configurationNames": [ "spanAttributeSchema" ], @@ -3673,9 +3622,7 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "spanLeakDebug" - ], + "internalPropertyName": "spanLeakDebug", "default": "0" } ], @@ -3688,20 +3635,21 @@ ], "DD_TRACE_STARTUP_LOGS": [ { - "implementation": "D", + "implementation": "C", "type": "boolean", "configurationNames": [ "startupLogs" ], - "default": "false" + "default": "true" } ], "DD_TRACE_STATS_COMPUTATION_ENABLED": [ { "implementation": "A", "type": "boolean", + "internalPropertyName": "stats.enabled", "configurationNames": [ - "stats.enabled" + "stats" ], "default": "false" } @@ -3828,19 +3776,7 @@ "implementation": "A", "type": "int", "default": "512", - "configurationNames": [ - "tagsHeaderMaxLength" - ] - } - ], - "DD_TRACING_ENABLED": [ - { - "implementation": "A", - "type": "boolean", - "default": "true", - "configurationNames": [ - "tracing" - ] + "internalPropertyName": "tagsHeaderMaxLength" } ], "DD_VERSION": [ @@ -3858,9 +3794,7 @@ "implementation": "A", "type": "int", "default": "128", - "configurationNames": [ - "vertexai.spanCharLimit" - ] + "internalPropertyName": "vertexai.spanCharLimit" } ], "DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE": [ @@ -3868,9 +3802,7 @@ "implementation": "A", "type": "decimal", "default": "1", - "configurationNames": [ - "vertexai.spanPromptCompletionSampleRate" - ] + "internalPropertyName": "vertexai.spanPromptCompletionSampleRate" } ], "DD_VITEST_WORKER": [ @@ -3884,50 +3816,42 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelMaxExportBatchSize" - ], - "default": "512" + "internalPropertyName": "otelMaxExportBatchSize", + "default": "512", + "allowed": "[1-9]\\d*" } ], "OTEL_BSP_MAX_QUEUE_SIZE": [ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelMaxQueueSize" - ], - "default": "2048" + "internalPropertyName": "otelMaxQueueSize", + "default": "2048", + "allowed": "[1-9]\\d*" } ], "OTEL_BSP_SCHEDULE_DELAY": [ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelBatchTimeout" - ], - "default": "5000" + "internalPropertyName": "otelBatchTimeout", + "default": "5000", + "allowed": "[1-9]\\d*" } ], "OTEL_EXPORTER_OTLP_ENDPOINT": [ { "implementation": "A", "type": "string", - "default": null, - "configurationNames": [ - "otelUrl" - ] + "default": null } ], "OTEL_EXPORTER_OTLP_HEADERS": [ { - "implementation": "B", - "type": "map", + "implementation": "C", + "type": "string", "default": null, - "configurationNames": [ - "otelHeaders" - ] + "internalPropertyName": "otelHeaders" } ], "OTEL_EXPORTER_OTLP_LOGS_ENDPOINT": [ @@ -3935,18 +3859,20 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "otelLogsUrl" + "internalPropertyName": "otelLogsUrl", + "aliases": [ + "OTEL_EXPORTER_OTLP_ENDPOINT" ] } ], "OTEL_EXPORTER_OTLP_LOGS_HEADERS": [ { - "implementation": "B", - "type": "map", + "implementation": "A", + "type": "string", "default": null, - "configurationNames": [ - "otelLogsHeaders" + "internalPropertyName": "otelLogsHeaders", + "aliases": [ + "OTEL_EXPORTER_OTLP_HEADERS" ] } ], @@ -3955,8 +3881,9 @@ "implementation": "D", "type": "string", "default": "http/protobuf", - "configurationNames": [ - "otelLogsProtocol" + "internalPropertyName": "otelLogsProtocol", + "aliases": [ + "OTEL_EXPORTER_OTLP_PROTOCOL" ] } ], @@ -3964,10 +3891,12 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelLogsTimeout" - ], - "default": "10000" + "internalPropertyName": "otelLogsTimeout", + "default": "10000", + "allowed": "[1-9]\\d*", + "aliases": [ + "OTEL_EXPORTER_OTLP_TIMEOUT" + ] } ], "OTEL_EXPORTER_OTLP_METRICS_ENDPOINT": [ @@ -3975,18 +3904,20 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "otelMetricsUrl" + "internalPropertyName": "otelMetricsUrl", + "aliases": [ + "OTEL_EXPORTER_OTLP_ENDPOINT" ] } ], "OTEL_EXPORTER_OTLP_METRICS_HEADERS": [ { - "implementation": "A", - "type": "map", + "implementation": "B", + "type": "string", "default": null, - "configurationNames": [ - "otelMetricsHeaders" + "internalPropertyName": "otelMetricsHeaders", + "aliases": [ + "OTEL_EXPORTER_OTLP_HEADERS" ] } ], @@ -3995,8 +3926,9 @@ "implementation": "B", "type": "string", "default": "http/protobuf", - "configurationNames": [ - "otelMetricsProtocol" + "internalPropertyName": "otelMetricsProtocol", + "aliases": [ + "OTEL_EXPORTER_OTLP_PROTOCOL" ] } ], @@ -4004,9 +3936,9 @@ { "implementation": "A", "type": "string", - "configurationNames": [ - "otelMetricsTemporalityPreference" - ], + "allowed": "Delta|Cumulative|LowMemory", + "transform": "toUpperCase", + "internalPropertyName": "otelMetricsTemporalityPreference", "default": "delta" } ], @@ -4014,10 +3946,12 @@ { "implementation": "B", "type": "int", - "configurationNames": [ - "otelMetricsTimeout" - ], - "default": "10000" + "allowed": "[1-9]\\d*", + "internalPropertyName": "otelMetricsTimeout", + "default": "10000", + "aliases": [ + "OTEL_EXPORTER_OTLP_TIMEOUT" + ] } ], "OTEL_EXPORTER_OTLP_PROTOCOL": [ @@ -4025,18 +3959,15 @@ "implementation": "A", "type": "string", "default": "http/protobuf", - "configurationNames": [ - "otelProtocol" - ] + "internalPropertyName": "otelProtocol" } ], "OTEL_EXPORTER_OTLP_TIMEOUT": [ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelTimeout" - ], + "allowed": "[1-9]\\d*", + "internalPropertyName": "otelTimeout", "default": "10000" } ], @@ -4044,30 +3975,34 @@ { "implementation": "A", "type": "string", - "default": null + "default": null, + "allowed": "none|otlp", + "transform": "toLowerCase" } ], "OTEL_LOG_LEVEL": [ { "implementation": "C", "type": "string", - "default": null + "default": null, + "allowed": "debug|info|warn|error" } ], "OTEL_METRICS_EXPORTER": [ { "implementation": "C", "type": "string", - "default": null + "default": null, + "allowed": "none|otlp", + "transform": "toLowerCase" } ], "OTEL_METRIC_EXPORT_INTERVAL": [ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelMetricsExportInterval" - ], + "allowed": "[1-9]\\d*", + "internalPropertyName": "otelMetricsExportInterval", "default": "10000" } ], @@ -4075,27 +4010,17 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelMetricsExportTimeout" - ], + "allowed": "[1-9]\\d*", + "internalPropertyName": "otelMetricsExportTimeout", "default": "7500" } ], - "OTEL_PROPAGATORS": [ - { - "implementation": "A", - "type": "array", - "default": "", - "configurationNames": [ - "tracePropagationStyle.otelPropagators" - ] - } - ], "OTEL_RESOURCE_ATTRIBUTES": [ { "implementation": "B", "type": "string", - "default": "" + "default": "", + "transform": "parseOtelTags" } ], "OTEL_SDK_DISABLED": [ @@ -4105,38 +4030,30 @@ "default": "true" } ], - "OTEL_SERVICE_NAME": [ - { - "implementation": "B", - "type": "string", - "configurationNames": [ - "service" - ], - "default": null - } - ], "OTEL_TRACES_EXPORTER": [ { "implementation": "F", "type": "string", - "default": "otlp" + "default": "otlp", + "allowed": "none|otlp", + "transform": "toLowerCase" } ], "OTEL_TRACES_SAMPLER": [ { "implementation": "E", "type": "string", - "default": "parentbased_always_on" + "default": "parentbased_always_on", + "allowed": "always_on|always_off|traceidratio|parentbased_always_on|parentbased_always_off|parentbased_traceidratio", + "transform": "toLowerCase" } ], "OTEL_TRACES_SAMPLER_ARG": [ { "implementation": "D", "type": "decimal", - "configurationNames": [ - "sampleRate" - ], - "default": null + "default": null, + "allowed": "\\d+(\\.\\d+)?" } ] } diff --git a/packages/dd-trace/src/dogstatsd.js b/packages/dd-trace/src/dogstatsd.js index b9f1491febc..af4fa9d972f 100644 --- a/packages/dd-trace/src/dogstatsd.js +++ b/packages/dd-trace/src/dogstatsd.js @@ -7,7 +7,7 @@ const isIP = require('net').isIP const request = require('./exporters/common/request') const log = require('./log') const Histogram = require('./histogram') -const defaults = require('./config/defaults') +const { defaults } = require('./config/defaults') const { getAgentUrl } = require('./agent/url') const { entityId } = require('./exporters/common/docker') diff --git a/packages/dd-trace/src/index.js b/packages/dd-trace/src/index.js index 582511ab6a9..0366f023e4c 100644 --- a/packages/dd-trace/src/index.js +++ b/packages/dd-trace/src/index.js @@ -1,7 +1,7 @@ 'use strict' const { getValueFromEnvSources } = require('./config/helper') -const { isFalse } = require('./util') +const { isFalse, isTrue } = require('./util') // Global `jest` is only present in Jest workers. const inJestWorker = typeof jest !== 'undefined' @@ -9,7 +9,10 @@ const inJestWorker = typeof jest !== 'undefined' const ddTraceDisabled = getValueFromEnvSources('DD_TRACE_ENABLED') ? isFalse(getValueFromEnvSources('DD_TRACE_ENABLED')) : String(getValueFromEnvSources('OTEL_TRACES_EXPORTER')).toLowerCase() === 'none' +const shouldUseProxyWhenTracingDisabled = + isTrue(getValueFromEnvSources('DD_DYNAMIC_INSTRUMENTATION_ENABLED')) || + isTrue(getValueFromEnvSources('DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED')) -module.exports = ddTraceDisabled || inJestWorker +module.exports = (ddTraceDisabled && !shouldUseProxyWhenTracingDisabled) || inJestWorker ? require('./noop/proxy') : require('./proxy') diff --git a/packages/dd-trace/src/llmobs/sdk.js b/packages/dd-trace/src/llmobs/sdk.js index 6e06027953c..3429a95a27e 100644 --- a/packages/dd-trace/src/llmobs/sdk.js +++ b/packages/dd-trace/src/llmobs/sdk.js @@ -29,6 +29,11 @@ class LLMObs extends NoopLLMObs { */ #hasUserSpanProcessor = false + /** + * @param {import('../tracer')} tracer - Tracer instance + * @param {import('./index')} llmobsModule - LLMObs module instance + * @param {import('../config/config-base')} config - Tracer configuration + */ constructor (tracer, llmobsModule, config) { super(tracer) @@ -38,7 +43,7 @@ class LLMObs extends NoopLLMObs { } get enabled () { - return this._config.llmobs.enabled + return this._config.llmobs.enabled ?? false } enable (options = {}) { @@ -56,13 +61,10 @@ class LLMObs extends NoopLLMObs { return } - const llmobs = { - mlApp: options.mlApp, - agentlessEnabled: options.agentlessEnabled, - } - // TODO: This will update config telemetry with the origin 'code', which is not ideal when `enable()` is called - // based on `APM_TRACING` RC product updates. - this._config.updateOptions({ llmobs }) + // TODO: These configs should be passed through directly at construction time instead. + this._config.llmobs.enabled = true + this._config.llmobs.mlApp = options.mlApp + this._config.llmobs.agentlessEnabled = options.agentlessEnabled // configure writers and channel subscribers this._llmobsModule.enable(this._config) diff --git a/packages/dd-trace/src/log/index.js b/packages/dd-trace/src/log/index.js index 7ff2a82fe99..a237325d899 100644 --- a/packages/dd-trace/src/log/index.js +++ b/packages/dd-trace/src/log/index.js @@ -1,5 +1,8 @@ 'use strict' + const { inspect } = require('util') + +const { defaults } = require('../config/defaults') const { isTrue } = require('../util') const { getValueFromEnvSources } = require('../config/helper') const { traceChannel, debugChannel, infoChannel, warnChannel, errorChannel } = require('./channels') @@ -8,12 +11,17 @@ const { Log, LogConfig, NoTransmitError } = require('./log') const { memoize } = require('./utils') const config = { - enabled: false, + enabled: defaults.DD_TRACE_DEBUG, logger: undefined, - logLevel: 'debug', + logLevel: defaults.logLevel, } -// in most places where we know we want to mute a log we use log.error() directly +const deprecate = memoize((code, message) => { + publishFormatted(errorChannel, null, message) + return true +}) + +// In most places where we know we want to mute a log we use log.error() directly const NO_TRANSMIT = new LogConfig(false) const log = { @@ -21,36 +29,6 @@ const log = { NO_TRANSMIT, NoTransmitError, - /** - * @returns Read-only version of logging config. To modify config, call `log.use` and `log.toggle` - */ - getConfig () { - return { ...config } - }, - - use (logger) { - config.logger = logger - logWriter.use(logger) - return log - }, - - toggle (enabled, logLevel) { - config.enabled = enabled - config.logLevel = logLevel - logWriter.toggle(enabled, logLevel) - return log - }, - - reset () { - logWriter.reset() - log._deprecate = memoize((code, message) => { - publishFormatted(errorChannel, null, message) - return true - }) - - return log - }, - trace (...args) { if (traceChannel.hasSubscribers) { const logRecord = {} @@ -66,6 +44,8 @@ const log = { publishFormatted(traceChannel, null, stack.join('\n')) } + // TODO: Why do we allow chaining here? This is likely not used anywhere. + // If it is used, that seems like a mistake. return log }, @@ -103,30 +83,23 @@ const log = { }, deprecate (code, message) { - return log._deprecate(code, message) + return deprecate(code, message) }, - isEnabled (fleetStableConfigValue, localStableConfigValue) { - return isTrue( - fleetStableConfigValue ?? + configure (options) { + config.logger = options.logger + config.logLevel = options.logLevel ?? + getValueFromEnvSources('DD_TRACE_LOG_LEVEL') ?? + config.logLevel + config.enabled = isTrue( getValueFromEnvSources('DD_TRACE_DEBUG') ?? - (getValueFromEnvSources('OTEL_LOG_LEVEL') === 'debug' || undefined) ?? - localStableConfigValue ?? - config.enabled + // TODO: Handle this by adding a log buffer so that configure may be called with the actual configurations. + // eslint-disable-next-line eslint-rules/eslint-process-env + (process.env.OTEL_LOG_LEVEL === 'debug' || config.enabled) ) - }, + logWriter.configure(config.enabled, config.logLevel, options.logger) - getLogLevel ( - optionsValue, - fleetStableConfigValue, - localStableConfigValue - ) { - return optionsValue ?? - fleetStableConfigValue ?? - getValueFromEnvSources('DD_TRACE_LOG_LEVEL') ?? - getValueFromEnvSources('OTEL_LOG_LEVEL') ?? - localStableConfigValue ?? - config.logLevel + return config.enabled }, } @@ -150,8 +123,6 @@ function getErrorLog (err) { return err } -log.reset() - -log.toggle(log.isEnabled(), log.getLogLevel()) +log.configure({}) module.exports = log diff --git a/packages/dd-trace/src/log/writer.js b/packages/dd-trace/src/log/writer.js index 13ce84d92d7..358a3b680fe 100644 --- a/packages/dd-trace/src/log/writer.js +++ b/packages/dd-trace/src/log/writer.js @@ -2,6 +2,7 @@ const { storage } = require('../../../datadog-core') const { LogChannel } = require('./channels') + const defaultLogger = { debug: msg => console.debug(msg), /* eslint-disable-line no-console */ info: msg => console.info(msg), /* eslint-disable-line no-console */ @@ -17,12 +18,8 @@ function withNoop (fn) { storage('legacy').run({ noop: true }, fn) } -function unsubscribeAll () { - logChannel.unsubscribe({ trace, debug, info, warn, error }) -} - function toggleSubscription (enable, level) { - unsubscribeAll() + logChannel.unsubscribe({ trace, debug, info, warn, error }) if (enable) { logChannel = new LogChannel(level) @@ -30,23 +27,14 @@ function toggleSubscription (enable, level) { } } -function toggle (enable, level) { +function configure (enable, level, newLogger) { enabled = enable + logger = typeof newLogger?.debug === 'function' && typeof newLogger.error === 'function' + ? newLogger + : defaultLogger toggleSubscription(enabled, level) } -function use (newLogger) { - if (typeof newLogger?.debug === 'function' && typeof newLogger.error === 'function') { - logger = newLogger - } -} - -function reset () { - logger = defaultLogger - enabled = false - toggleSubscription(false) -} - function error (err) { withNoop(() => logger.error(err)) } @@ -69,4 +57,4 @@ function trace (log) { withNoop(() => logger.debug(log)) } -module.exports = { use, toggle, reset, error, warn, info, debug, trace } +module.exports = { configure, error, warn, info, debug, trace } diff --git a/packages/dd-trace/src/opentracing/propagation/text_map.js b/packages/dd-trace/src/opentracing/propagation/text_map.js index 3c7b65eefb0..50efb42c9e0 100644 --- a/packages/dd-trace/src/opentracing/propagation/text_map.js +++ b/packages/dd-trace/src/opentracing/propagation/text_map.js @@ -6,6 +6,7 @@ const id = require('../../id') const DatadogSpanContext = require('../span_context') const log = require('../../log') const tags = require('../../../../../ext/tags') +const { getConfiguredEnvName } = require('../../config/helper') const { setBaggageItem, getAllBaggageItems, removeAllBaggageItems } = require('../../baggage') const telemetryMetrics = require('../../telemetry/metrics') @@ -65,8 +66,15 @@ const zeroTraceId = '0000000000000000' const hex16 = /^[0-9A-Fa-f]{16}$/ class TextMapPropagator { + #extractB3Context + constructor (config) { this._config = config + + // TODO: should match "b3 single header" in next major + const envName = getConfiguredEnvName('DD_TRACE_PROPAGATION_STYLE') + // eslint-disable-next-line eslint-rules/eslint-env-aliases + this.#extractB3Context = envName === 'OTEL_PROPAGATORS' ? this._extractB3SingleContext : this._extractB3MultiContext } inject (spanContext, carrier) { @@ -363,10 +371,7 @@ class TextMapPropagator { extractedContext = this._extractB3SingleContext(carrier) break case 'b3': - extractedContext = this._config.tracePropagationStyle.otelPropagators - // TODO: should match "b3 single header" in next major - ? this._extractB3SingleContext(carrier) - : this._extractB3MultiContext(carrier) + extractedContext = this.#extractB3Context(carrier) break case 'b3multi': extractedContext = this._extractB3MultiContext(carrier) diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 040618c5b58..f34135284af 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -6,9 +6,8 @@ const { pathToFileURL } = require('url') const satisfies = require('../../../../vendor/dist/semifies') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags') const { getIsAzureFunction } = require('../serverless') -const { isFalse, isTrue } = require('../util') const { getAzureTagsFromMetadata, getAzureAppMetadata, getAzureFunctionMetadata } = require('../azure_metadata') -const { getEnvironmentVariable, getValueFromEnvSources } = require('../config/helper') +const { getEnvironmentVariable } = require('../config/helper') const { getAgentUrl } = require('../agent/url') const { isACFActive } = require('../../../datadog-core/src/storage') @@ -22,59 +21,22 @@ const { oomExportStrategies, snapshotKinds } = require('./constants') const { tagger } = require('./tagger') class Config { - constructor (options = {}) { - // TODO: Remove entries that were already resolved in config. - // For the others, move them over to config. + constructor (options) { const AWS_LAMBDA_FUNCTION_NAME = getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') - // TODO: Move initialization of these values to packages/dd-trace/src/config/index.js, and just read from config - const { - DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED, - DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED, - DD_PROFILING_CODEHOTSPOTS_ENABLED, - DD_PROFILING_CPU_ENABLED, - DD_PROFILING_DEBUG_SOURCE_MAPS, - DD_PROFILING_DEBUG_UPLOAD_COMPRESSION, - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE, - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT, - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED, - DD_PROFILING_HEAP_ENABLED, - DD_PROFILING_HEAP_SAMPLING_INTERVAL, - DD_PROFILING_PPROF_PREFIX, - DD_PROFILING_PROFILERS, - DD_PROFILING_TIMELINE_ENABLED, - DD_PROFILING_UPLOAD_PERIOD, - DD_PROFILING_UPLOAD_TIMEOUT, - DD_PROFILING_V8_PROFILER_BUG_WORKAROUND, - DD_PROFILING_WALLTIME_ENABLED, - DD_TAGS, - } = getProfilingEnvValues() - - // Must be longer than one minute so pad with five seconds - const flushInterval = options.interval ?? (Number(DD_PROFILING_UPLOAD_PERIOD) * 1000 || 65 * 1000) - const uploadTimeout = options.uploadTimeout ?? (Number(DD_PROFILING_UPLOAD_TIMEOUT) || 60 * 1000) - const pprofPrefix = options.pprofPrefix ?? DD_PROFILING_PPROF_PREFIX ?? '' - - // TODO: Remove the fallback. Just use the value from the config. - this.service = options.service || 'node' + this.version = options.version + this.service = options.service this.env = options.env this.functionname = AWS_LAMBDA_FUNCTION_NAME - this.version = options.version - this.tags = Object.assign( - tagger.parse(DD_TAGS), - tagger.parse(options.tags), - tagger.parse({ - env: options.env, + this.tags = { + ...options.tags, + ...tagger.parse({ host: options.reportHostname ? require('os').hostname() : undefined, - service: this.service, - version: this.version, functionname: AWS_LAMBDA_FUNCTION_NAME, }), - getAzureTagsFromMetadata(getIsAzureFunction() ? getAzureFunctionMetadata() : getAzureAppMetadata()) - ) + ...getAzureTagsFromMetadata(getIsAzureFunction() ? getAzureFunctionMetadata() : getAzureAppMetadata()), + } // Add source code integration tags if available if (options.repositoryUrl && options.commitSHA) { @@ -82,58 +44,35 @@ class Config { this.tags[GIT_COMMIT_SHA] = options.commitSHA } - this.logger = ensureLogger(options.logger) - // Profiler sampling contexts are not available on Windows, so features - // depending on those (code hotspots and endpoint collection) need to default - // to false on Windows. - const samplingContextsAvailable = process.platform !== 'win32' - function checkOptionAllowed (option, description, condition) { - if (option && !condition) { - // injection hardening: all of these can only happen if user explicitly - // sets an environment variable to its non-default value on the platform. - // In practical terms, it'd require someone explicitly turning on OOM - // monitoring, code hotspots, endpoint profiling, or CPU profiling on - // Windows, where it is not supported. - throw new Error(`${description} not supported on ${process.platform}.`) - } - } - function checkOptionWithSamplingContextAllowed (option, description) { - checkOptionAllowed(option, description, samplingContextsAvailable) - } + // Normalize from seconds to milliseconds. Default must be longer than a minute. + this.flushInterval = options.DD_PROFILING_UPLOAD_PERIOD * 1000 + this.uploadTimeout = options.DD_PROFILING_UPLOAD_TIMEOUT + this.sourceMap = options.DD_PROFILING_SOURCE_MAP + this.debugSourceMaps = options.DD_PROFILING_DEBUG_SOURCE_MAPS + this.endpointCollectionEnabled = options.DD_PROFILING_ENDPOINT_COLLECTION_ENABLED + this.pprofPrefix = options.DD_PROFILING_PPROF_PREFIX + this.v8ProfilerBugWorkaroundEnabled = options.DD_PROFILING_V8_PROFILER_BUG_WORKAROUND - this.flushInterval = flushInterval - this.uploadTimeout = uploadTimeout - this.sourceMap = options.sourceMap - this.debugSourceMaps = isTrue(options.debugSourceMaps ?? DD_PROFILING_DEBUG_SOURCE_MAPS) - this.endpointCollectionEnabled = isTrue(options.endpointCollection ?? - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED ?? samplingContextsAvailable) - checkOptionWithSamplingContextAllowed(this.endpointCollectionEnabled, 'Endpoint collection') - - this.pprofPrefix = pprofPrefix - this.v8ProfilerBugWorkaroundEnabled = isTrue(options.v8ProfilerBugWorkaround ?? - DD_PROFILING_V8_PROFILER_BUG_WORKAROUND ?? true) + this.logger = ensureLogger(options.logger) this.url = getAgentUrl(options) - this.libraryInjected = options.libraryInjected - this.activation = options.activation - this.exporters = ensureExporters(options.exporters || [ - new AgentExporter(this), - ], this) + this.libraryInjected = !!options.DD_INJECTION_ENABLED - // OOM monitoring does not work well on Windows, so it is disabled by default. - const oomMonitoringSupported = process.platform !== 'win32' + let activation + if (options.profiling.enabled === 'auto') { + activation = 'auto' + } else if (options.profiling.enabled === 'true') { + activation = 'manual' + } // else activation = undefined - const oomMonitoringEnabled = isTrue(options.oomMonitoring ?? - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED ?? oomMonitoringSupported) - checkOptionAllowed(oomMonitoringEnabled, 'OOM monitoring', oomMonitoringSupported) + this.activation = activation + this.exporters = ensureExporters(options.DD_PROFILING_EXPORTERS, this) - const heapLimitExtensionSize = options.oomHeapLimitExtensionSize ?? - (Number(DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE) || 0) - const maxHeapExtensionCount = options.oomMaxHeapExtensionCount ?? - (Number(DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT) || 0) + const oomMonitoringEnabled = options.DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED + const heapLimitExtensionSize = options.DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE + const maxHeapExtensionCount = options.DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT const exportStrategies = oomMonitoringEnabled - ? ensureOOMExportStrategies(options.oomExportStrategies ?? DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES ?? - [oomExportStrategies.PROCESS], this) + ? ensureOOMExportStrategies(options.DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, this) : [] const exportCommand = oomMonitoringEnabled ? buildExportCommand(this) : undefined this.oomMonitoring = { @@ -144,61 +83,26 @@ class Config { exportCommand, } - const profilers = options.profilers || getProfilers({ - DD_PROFILING_HEAP_ENABLED, - DD_PROFILING_WALLTIME_ENABLED, - DD_PROFILING_PROFILERS, - }) + const profilers = getProfilers(options) - this.timelineEnabled = isTrue( - options.timelineEnabled ?? DD_PROFILING_TIMELINE_ENABLED ?? samplingContextsAvailable - ) - checkOptionWithSamplingContextAllowed(this.timelineEnabled, 'Timeline view') - this.timelineSamplingEnabled = isTrue( - options.timelineSamplingEnabled ?? DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED ?? true - ) + this.timelineEnabled = options.DD_PROFILING_TIMELINE_ENABLED + this.timelineSamplingEnabled = options.DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED + this.codeHotspotsEnabled = options.DD_PROFILING_CODEHOTSPOTS_ENABLED + this.cpuProfilingEnabled = options.DD_PROFILING_CPU_ENABLED + this.heapSamplingInterval = options.DD_PROFILING_HEAP_SAMPLING_INTERVAL - this.codeHotspotsEnabled = isTrue( - options.codeHotspotsEnabled ?? DD_PROFILING_CODEHOTSPOTS_ENABLED ?? samplingContextsAvailable - ) - checkOptionWithSamplingContextAllowed(this.codeHotspotsEnabled, 'Code hotspots') - - this.cpuProfilingEnabled = isTrue( - options.cpuProfilingEnabled ?? DD_PROFILING_CPU_ENABLED ?? samplingContextsAvailable - ) - checkOptionWithSamplingContextAllowed(this.cpuProfilingEnabled, 'CPU profiling') - - this.samplingInterval = options.samplingInterval || 1e3 / 99 // 99hz in millis - - this.heapSamplingInterval = options.heapSamplingInterval ?? - (Number(DD_PROFILING_HEAP_SAMPLING_INTERVAL) || 512 * 1024) + this.samplingInterval = 1e3 / 99 // 99hz in milliseconds const isAtLeast24 = satisfies(process.versions.node, '>=24.0.0') - const uploadCompression0 = options.uploadCompression ?? DD_PROFILING_DEBUG_UPLOAD_COMPRESSION ?? 'on' + const uploadCompression0 = options.DD_PROFILING_DEBUG_UPLOAD_COMPRESSION let [uploadCompression, level0] = uploadCompression0.split('-') - if (!['on', 'off', 'gzip', 'zstd'].includes(uploadCompression)) { - this.logger.warn(`Invalid profile upload compression method "${uploadCompression0}". Will use "on".`) - uploadCompression = 'on' - } let level = level0 ? Number.parseInt(level0, 10) : undefined if (level !== undefined) { - if (['on', 'off'].includes(uploadCompression)) { - this.logger.warn(`Compression levels are not supported for "${uploadCompression}".`) - level = undefined - } else if (Number.isNaN(level)) { - this.logger.warn( - `Invalid compression level "${level0}". Will use default level.`) - level = undefined - } else if (level < 1) { - this.logger.warn(`Invalid compression level ${level}. Will use 1.`) - level = 1 - } else { - const maxLevel = { gzip: 9, zstd: 22 }[uploadCompression] - if (level > maxLevel) { - this.logger.warn(`Invalid compression level ${level}. Will use ${maxLevel}.`) - level = maxLevel - } + const maxLevel = { gzip: 9, zstd: 22 }[uploadCompression] + if (level > maxLevel) { + this.logger.warn(`Invalid compression level ${level}. Will use ${maxLevel}.`) + level = maxLevel } } @@ -219,13 +123,9 @@ class Config { that.asyncContextFrameEnabled = false } - const canUseAsyncContextFrame = samplingContextsAvailable && isACFActive - - this.asyncContextFrameEnabled = isTrue(DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED ?? canUseAsyncContextFrame) - if (this.asyncContextFrameEnabled && !canUseAsyncContextFrame) { - if (!samplingContextsAvailable) { - turnOffAsyncContextFrame(`on ${process.platform}`) - } else if (isAtLeast24) { + this.asyncContextFrameEnabled = options.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED ?? isACFActive + if (this.asyncContextFrameEnabled && !isACFActive) { + if (isAtLeast24) { turnOffAsyncContextFrame('with --no-async-context-frame') } else if (satisfies(process.versions.node, '>=22.9.0')) { turnOffAsyncContextFrame('without --experimental-async-context-frame') @@ -234,7 +134,7 @@ class Config { } } - this.heartbeatInterval = options.heartbeatInterval || 60 * 1000 // 1 minute + this.heartbeatInterval = options.telemetry.heartbeatInterval this.profilers = ensureProfilers(profilers, this) } @@ -248,7 +148,7 @@ class Config { endpointCollectionEnabled: this.endpointCollectionEnabled, heapSamplingInterval: this.heapSamplingInterval, oomMonitoring: { ...this.oomMonitoring }, - profilerTypes: this.profilers.map(p => p.type), + profilerTypes: this.profilers.map(profiler => profiler.type), sourceMap: this.sourceMap, timelineEnabled: this.timelineEnabled, timelineSamplingEnabled: this.timelineSamplingEnabled, @@ -263,7 +163,9 @@ class Config { module.exports = { Config } function getProfilers ({ - DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS, + DD_PROFILING_HEAP_ENABLED, + DD_PROFILING_WALLTIME_ENABLED, + DD_PROFILING_PROFILERS, }) { // First consider "legacy" DD_PROFILING_PROFILERS env variable, defaulting to space + wall // Use a Set to avoid duplicates @@ -272,26 +174,26 @@ function getProfilers ({ // snapshots the space profile won't include memory taken by profiles created // before it in the sequence. That memory is ultimately transient and will be // released when all profiles are subsequently encoded. - const profilers = new Set((DD_PROFILING_PROFILERS ?? 'space,wall').split(',')) + const profilers = new Set(DD_PROFILING_PROFILERS) let spaceExplicitlyEnabled = false // Add/remove space depending on the value of DD_PROFILING_HEAP_ENABLED - if (DD_PROFILING_HEAP_ENABLED != null) { - if (isTrue(DD_PROFILING_HEAP_ENABLED)) { + if (DD_PROFILING_HEAP_ENABLED !== undefined) { + if (DD_PROFILING_HEAP_ENABLED) { if (!profilers.has('space')) { profilers.add('space') spaceExplicitlyEnabled = true } - } else if (isFalse(DD_PROFILING_HEAP_ENABLED)) { + } else { profilers.delete('space') } } // Add/remove wall depending on the value of DD_PROFILING_WALLTIME_ENABLED - if (DD_PROFILING_WALLTIME_ENABLED != null) { - if (isTrue(DD_PROFILING_WALLTIME_ENABLED)) { + if (DD_PROFILING_WALLTIME_ENABLED !== undefined) { + if (DD_PROFILING_WALLTIME_ENABLED) { profilers.add('wall') - } else if (isFalse(DD_PROFILING_WALLTIME_ENABLED)) { + } else { profilers.delete('wall') profilers.delete('cpu') // remove alias too } @@ -321,22 +223,12 @@ function getExportStrategy (name, options) { } function ensureOOMExportStrategies (strategies, options) { - if (!strategies) { - return [] + const set = new Set() + for (const strategy of strategies) { + set.add(getExportStrategy(strategy, options)) } - if (typeof strategies === 'string') { - strategies = strategies.split(',') - } - - for (let i = 0; i < strategies.length; i++) { - const strategy = strategies[i] - if (typeof strategy === 'string') { - strategies[i] = getExportStrategy(strategy, options) - } - } - - return [...new Set(strategies)] + return [...set] } function getExporter (name, options) { @@ -345,22 +237,13 @@ function getExporter (name, options) { return new AgentExporter(options) case 'file': return new FileExporter(options) + default: + options.logger.error(`Unknown exporter "${name}"`) } } function ensureExporters (exporters, options) { - if (typeof exporters === 'string') { - exporters = exporters.split(',') - } - - for (let i = 0; i < exporters.length; i++) { - const exporter = exporters[i] - if (typeof exporter === 'string') { - exporters[i] = getExporter(exporter, options) - } - } - - return exporters + return exporters.map((exporter) => getExporter(exporter, options)) } function getProfiler (name, options) { @@ -376,30 +259,26 @@ function getProfiler (name, options) { } function ensureProfilers (profilers, options) { - if (typeof profilers === 'string') { - profilers = profilers.split(',') - } + const filteredProfilers = [] for (let i = 0; i < profilers.length; i++) { - const profiler = profilers[i] - if (typeof profiler === 'string') { - profilers[i] = getProfiler(profiler, options) + const profiler = getProfiler(profilers[i], options) + if (profiler !== undefined) { + filteredProfilers.push(profiler) } } // Events profiler is a profiler that produces timeline events. It is only // added if timeline is enabled and there's a wall profiler. - if (options.timelineEnabled && profilers.some(p => p instanceof WallProfiler)) { - profilers.push(new EventsProfiler(options)) + if (options.timelineEnabled && filteredProfilers.some(profiler => profiler instanceof WallProfiler)) { + filteredProfilers.push(new EventsProfiler(options)) } - // Filter out any invalid profilers - return profilers.filter(Boolean) + return filteredProfilers } function ensureLogger (logger) { - if (typeof logger !== 'object' || - typeof logger.debug !== 'function' || + if (typeof logger?.debug !== 'function' || typeof logger.info !== 'function' || typeof logger.warn !== 'function' || typeof logger.error !== 'function') { @@ -424,50 +303,3 @@ function buildExportCommand (options) { path.join(__dirname, 'exporter_cli.js'), urls.join(','), tags, 'space'] } - -function getProfilingEnvValues () { - return { - DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED: - getValueFromEnvSources('DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED'), - DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED: - getValueFromEnvSources('DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED'), - DD_PROFILING_CODEHOTSPOTS_ENABLED: - getValueFromEnvSources('DD_PROFILING_CODEHOTSPOTS_ENABLED'), - DD_PROFILING_CPU_ENABLED: - getValueFromEnvSources('DD_PROFILING_CPU_ENABLED'), - DD_PROFILING_DEBUG_SOURCE_MAPS: - getValueFromEnvSources('DD_PROFILING_DEBUG_SOURCE_MAPS'), - DD_PROFILING_DEBUG_UPLOAD_COMPRESSION: - getValueFromEnvSources('DD_PROFILING_DEBUG_UPLOAD_COMPRESSION'), - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: - getValueFromEnvSources('DD_PROFILING_ENDPOINT_COLLECTION_ENABLED'), - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: - getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES'), - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: - getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE'), - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: - getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT'), - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: - getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED'), - DD_PROFILING_HEAP_ENABLED: - getValueFromEnvSources('DD_PROFILING_HEAP_ENABLED'), - DD_PROFILING_HEAP_SAMPLING_INTERVAL: - getValueFromEnvSources('DD_PROFILING_HEAP_SAMPLING_INTERVAL'), - DD_PROFILING_PPROF_PREFIX: - getValueFromEnvSources('DD_PROFILING_PPROF_PREFIX'), - DD_PROFILING_PROFILERS: - getValueFromEnvSources('DD_PROFILING_PROFILERS'), - DD_PROFILING_TIMELINE_ENABLED: - getValueFromEnvSources('DD_PROFILING_TIMELINE_ENABLED'), - DD_PROFILING_UPLOAD_PERIOD: - getValueFromEnvSources('DD_PROFILING_UPLOAD_PERIOD'), - DD_PROFILING_UPLOAD_TIMEOUT: - getValueFromEnvSources('DD_PROFILING_UPLOAD_TIMEOUT'), - DD_PROFILING_V8_PROFILER_BUG_WORKAROUND: - getValueFromEnvSources('DD_PROFILING_V8_PROFILER_BUG_WORKAROUND'), - DD_PROFILING_WALLTIME_ENABLED: - getValueFromEnvSources('DD_PROFILING_WALLTIME_ENABLED'), - DD_TAGS: - getValueFromEnvSources('DD_TAGS'), - } -} diff --git a/packages/dd-trace/src/profiling/exporter_cli.js b/packages/dd-trace/src/profiling/exporter_cli.js index cba3d6349b1..a122a334664 100644 --- a/packages/dd-trace/src/profiling/exporter_cli.js +++ b/packages/dd-trace/src/profiling/exporter_cli.js @@ -17,9 +17,6 @@ function exporterFromURL (url) { if (url.protocol === 'file:') { return new FileExporter({ pprofPrefix: fileURLToPath(url) }) } - // TODO: Why is DD_INJECTION_ENABLED a comma separated list? - const injectionEnabled = (getValueFromEnvSources('DD_INJECTION_ENABLED') ?? '').split(',') - const libraryInjected = injectionEnabled.length > 0 const profilingEnabled = (getValueFromEnvSources('DD_PROFILING_ENABLED') ?? '').toLowerCase() const activation = ['true', '1'].includes(profilingEnabled) ? 'manual' @@ -30,7 +27,7 @@ function exporterFromURL (url) { url, logger, uploadTimeout: timeoutMs, - libraryInjected, + libraryInjected: !!getValueFromEnvSources('DD_INJECTION_ENABLED'), activation, }) } diff --git a/packages/dd-trace/src/profiling/exporters/event_serializer.js b/packages/dd-trace/src/profiling/exporters/event_serializer.js index a7bd652f9e9..586c91409b1 100644 --- a/packages/dd-trace/src/profiling/exporters/event_serializer.js +++ b/packages/dd-trace/src/profiling/exporters/event_serializer.js @@ -14,7 +14,7 @@ class EventSerializer { this._host = host this._service = service this._appVersion = version - this._libraryInjected = !!libraryInjected + this._libraryInjected = libraryInjected this._activation = activation || 'unknown' } diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js index c107fc82750..6ecffadabfb 100644 --- a/packages/dd-trace/src/profiling/profiler.js +++ b/packages/dd-trace/src/profiling/profiler.js @@ -70,56 +70,22 @@ class Profiler extends EventEmitter { return this.#config?.flushInterval } + /** + * @param {import('../config/config-base')} config - Tracer configuration + */ start (config) { - const { - service, - version, - env, - url, - hostname, - port, - tags, - repositoryUrl, - commitSHA, - injectionEnabled, - reportHostname, - } = config - const { enabled, sourceMap, exporters } = config.profiling - const { heartbeatInterval } = config.telemetry - // TODO: Unify with main logger and rewrite template strings to use printf formatting. const logger = { - debug (message) { log.debug(message) }, - info (message) { log.info(message) }, - warn (message) { log.warn(message) }, - error (...args) { log.error(...args) }, + debug: log.debug.bind(log), + info: log.info.bind(log), + warn: log.warn.bind(log), + error: log.error.bind(log), } - const libraryInjected = injectionEnabled.length > 0 - let activation - if (enabled === 'auto') { - activation = 'auto' - } else if (enabled === 'true') { - activation = 'manual' - } // else activation = undefined - + // TODO: Rewrite this to not need to copy the config. const options = { - service, - version, - env, + ...config, logger, - sourceMap, - exporters, - url, - hostname, - port, - tags, - repositoryUrl, - commitSHA, - libraryInjected, - activation, - heartbeatInterval, - reportHostname, } try { @@ -182,6 +148,9 @@ class Profiler extends EventEmitter { return this.#compressionFn } + /** + * @param {import('../config/config-base')} options - Tracer configuration + */ _start (options) { if (this.enabled) return true diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index c2e5aa02fd9..eddeef7fab1 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -51,8 +51,7 @@ function labelFromStrStr (stringTable, keyStr, valStr) { } function getMaxSamples (options) { - const flushInterval = options.flushInterval || 65 * 1e3 // 65 seconds - const maxCpuSamples = flushInterval / options.samplingInterval + const maxCpuSamples = options.flushInterval / options.samplingInterval // The lesser of max parallelism and libuv thread pool size, plus one so we can detect // oversubscription on libuv thread pool, plus another one for GC. @@ -403,7 +402,7 @@ class EventsProfiler { get type () { return 'events' } - constructor (options = {}) { + constructor (options) { this.#maxSamples = getMaxSamples(options) this.#timelineSamplingEnabled = !!options.timelineSamplingEnabled this.#eventSerializer = new EventSerializer(this.#maxSamples) diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index 05a7ec8b96b..43e884b026f 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -5,15 +5,14 @@ let telemetry // Lazy load the telemetry module to avoid the performance impact of loading it unconditionally module.exports = { start (config, ...args) { + if (!config.telemetry.enabled) return telemetry ??= require('./telemetry') telemetry.start(config, ...args) }, - stop () { - telemetry?.stop() - }, // This might be called before `start` so we have to trigger loading the // underlying module here as well. updateConfig (changes, config, ...args) { + if (!config.telemetry.enabled) return telemetry ??= require('./telemetry') telemetry.updateConfig(changes, config, ...args) }, diff --git a/packages/dd-trace/src/telemetry/send-data.js b/packages/dd-trace/src/telemetry/send-data.js index fb7af48e64d..ef0d86634df 100644 --- a/packages/dd-trace/src/telemetry/send-data.js +++ b/packages/dd-trace/src/telemetry/send-data.js @@ -62,19 +62,6 @@ const { getValueFromEnvSources } = require('../config/helper') * kernel_name?: string * } & Record} TelemetryHost */ -/** - * @typedef {{ - * hostname?: string, - * port?: string | number, - * url?: string | URL, - * site?: string, - * apiKey?: string, - * isCiVisibility?: boolean, - * spanAttributeSchema?: string, - * tags: Record, - * telemetry?: { debug?: boolean } - * }} TelemetryConfig - */ /** * @callback SendDataCallback * @param {Error | null | undefined} error @@ -85,23 +72,22 @@ const { getValueFromEnvSources } = require('../config/helper') let agentTelemetry = true /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config * @param {TelemetryApplication} application * @param {TelemetryRequestType} reqType * @returns {Record} */ function getHeaders (config, application, reqType) { - const sessionId = config.tags['runtime-id'] const headers = { 'content-type': 'application/json', 'dd-telemetry-api-version': 'v2', 'dd-telemetry-request-type': reqType, 'dd-client-library-language': application.language_name, 'dd-client-library-version': application.tracer_version, - 'dd-session-id': sessionId, + 'dd-session-id': config.tags['runtime-id'], } - if (config.rootSessionId && config.rootSessionId !== sessionId) { - headers['dd-root-session-id'] = config.rootSessionId + if (config.DD_ROOT_JS_SESSION_ID) { + headers['dd-root-session-id'] = config.DD_ROOT_JS_SESSION_ID } const debug = config.telemetry && config.telemetry.debug if (debug) { @@ -141,7 +127,7 @@ function getPayload (payload) { // TODO(BridgeAR): Simplify this code. A lot does not need to be recalculated on every call. /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config * @param {TelemetryApplication} application * @param {TelemetryHost} host * @param {TelemetryRequestType} reqType diff --git a/packages/dd-trace/src/telemetry/session-propagation.js b/packages/dd-trace/src/telemetry/session-propagation.js index 0af4968db52..7f191f02d7a 100644 --- a/packages/dd-trace/src/telemetry/session-propagation.js +++ b/packages/dd-trace/src/telemetry/session-propagation.js @@ -1,53 +1,37 @@ 'use strict' -const dc = require('dc-polyfill') - +const dc = /** @type {typeof import('diagnostics_channel')} */ (require('dc-polyfill')) const childProcessChannel = dc.tracingChannel('datadog:child_process:execution') let subscribed = false -let rootSessionId let runtimeId -function injectSessionEnv (existingEnv) { - // eslint-disable-next-line eslint-rules/eslint-process-env -- not in supported-configurations.json - const base = existingEnv == null ? process.env : existingEnv - return { - ...base, - DD_ROOT_JS_SESSION_ID: rootSessionId, - DD_PARENT_JS_SESSION_ID: runtimeId, - } +function isOptionsObject (value) { + return value != null && typeof value === 'object' && !Array.isArray(value) && value } -function findOptionsIndex (args, shell) { - if (Array.isArray(args[1])) { - return { index: 2, exists: args[2] != null && typeof args[2] === 'object' } - } - if (args[1] != null && typeof args[1] === 'object') { - return { index: 1, exists: true } - } - if (!shell && args[2] != null && typeof args[2] === 'object') { - return { index: 2, exists: true } - } - return { index: shell ? 1 : 2, exists: false } +function getEnvWithRuntimeId (env) { + // eslint-disable-next-line eslint-rules/eslint-process-env + return { ...(env ?? process.env), DD_ROOT_JS_SESSION_ID: runtimeId } } function onChildProcessStart (context) { - if (!context.callArgs) return - const args = context.callArgs - const { index, exists } = findOptionsIndex(args, context.shell) + if (!args) return - if (exists) { - args[index] = { ...args[index], env: injectSessionEnv(args[index].env) } + const index = Array.isArray(args[1]) || (!context.shell && !isOptionsObject(args[1])) ? 2 : 1 + const options = isOptionsObject(args[index]) ? args[index] : undefined + + if (options) { + args[index] = { ...options, env: getEnvWithRuntimeId(options.env) } return } - const opts = { env: injectSessionEnv(null) } - - if (!context.shell && !Array.isArray(args[1])) { + if (index === 2 && !Array.isArray(args[1])) { args.splice(1, 0, []) } + const opts = { env: getEnvWithRuntimeId() } if (typeof args[index] === 'function') { args.splice(index, 0, opts) } else { @@ -55,24 +39,15 @@ function onChildProcessStart (context) { } } -const handler = { start: onChildProcessStart } - function start (config) { if (!config.telemetry?.enabled || subscribed) return subscribed = true - rootSessionId = config.rootSessionId - runtimeId = config.tags['runtime-id'] - - childProcessChannel.subscribe(handler) -} + runtimeId = config.DD_ROOT_JS_SESSION_ID || config.tags['runtime-id'] -function stop () { - if (!subscribed) return - childProcessChannel.unsubscribe(handler) - subscribed = false - rootSessionId = undefined - runtimeId = undefined + childProcessChannel.subscribe( + /** @type {import('diagnostics_channel').TracingChannelSubscribers} */ ({ start: onChildProcessStart }) + ) } -module.exports = { start, stop, _onChildProcessStart: onChildProcessStart } +module.exports = { start } diff --git a/packages/dd-trace/src/telemetry/telemetry.js b/packages/dd-trace/src/telemetry/telemetry.js index e113bb9e077..b59475da4df 100644 --- a/packages/dd-trace/src/telemetry/telemetry.js +++ b/packages/dd-trace/src/telemetry/telemetry.js @@ -18,15 +18,17 @@ const sessionPropagation = require('./session-propagation') * @typedef {Record} TelemetryPayloadObject */ /** - * @typedef {string | number | boolean | null | undefined | URL | Record | unknown[]} ConfigValue + * @typedef {string | number | boolean | null | URL | Record | unknown[] | Function} ConfigValue + */ +/** + * @typedef {{ [K in keyof processTags]: typeof processTags.tagsObject[K] }} ProcessTags */ /** * @typedef {{ * name: string, * enabled: boolean, * auto_enabled: boolean, - * process_tags: typeof processTags.tagsObject - * }} Integration + * } & Partial} Integration */ /** * @typedef {{ _enabled: boolean }} Plugin @@ -56,41 +58,11 @@ const sessionPropagation = require('./session-propagation') * kernel_name?: string * }} TelemetryHost */ -/** - * @typedef {{ - * telemetry: { - * enabled: boolean, - * heartbeatInterval: number, - * debug?: boolean, - * dependencyCollection?: boolean, - * logCollection?: boolean - * }, - * service: string | undefined, - * env: string | undefined, - * version: string | undefined, - * tags: Record, - * url?: string | URL, - * hostname?: string, - * port?: string | number, - * site?: string, - * apiKey?: string, - * isCiVisibility?: boolean, - * spanAttributeSchema?: string, - * installSignature?: { id?: string, time?: string, type?: string }, - * sca?: { enabled?: boolean }, - * appsec: { enabled: boolean, apiSecurity?: { - * endpointCollectionEnabled?: boolean, - * endpointCollectionMessageLimit?: number - * } }, - * profiling: { enabled: boolean | 'true' | 'false' | 'auto' } - * }} TelemetryConfig - */ const telemetryStartChannel = dc.channel('datadog:telemetry:start') -const telemetryStopChannel = dc.channel('datadog:telemetry:stop') const telemetryAppClosingChannel = dc.channel('datadog:telemetry:app-closing') -/** @type {TelemetryConfig | undefined} */ +/** @type {import('../config/config-base') | undefined} */ let config /** @type {PluginManager} */ @@ -102,18 +74,9 @@ let application /** @type {TelemetryHost} */ const host = createHostObject() -/** @type {ReturnType | undefined} */ -let heartbeatInterval - -/** @type {ReturnType | undefined} */ -let extendedInterval - /** @type {Integration[]} */ let integrations -/** @type {Map} */ -const configWithOrigin = new Map() - /** * Retry information that `telemetry.js` keeps in-memory to be merged into the next payload. * @@ -130,8 +93,6 @@ let heartbeatFailedDependencies = [] const sentIntegrations = new Set() -let seqId = 0 - function getRetryData () { return retryData } @@ -184,7 +145,7 @@ function getIntegrations () { } /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config */ function getProducts (config) { return { @@ -199,7 +160,7 @@ function getProducts (config) { } /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config */ function getInstallSignature (config) { const { installSignature: sig } = config @@ -212,13 +173,11 @@ function getInstallSignature (config) { } } -/** - * @param {TelemetryConfig} config - */ +/** @param {import('../config/config-base')} config */ function appStarted (config) { const app = { products: getProducts(config), - configuration: [...configWithOrigin.values()], + configuration: latestConfiguration, } const installSignature = getInstallSignature(config) if (installSignature) { @@ -245,7 +204,7 @@ function appClosing () { } /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config * @returns {TelemetryApplication} */ function createAppObject (config) { @@ -320,11 +279,11 @@ function createPayload (currReqType, currPayload = {}) { } /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config * @param {TelemetryApplication} application */ function heartbeat (config, application) { - heartbeatInterval = setInterval(() => { + setInterval(() => { metricsManager.send(config, application, host) telemetryLogger.send(config, application, host) @@ -333,11 +292,9 @@ function heartbeat (config, application) { }, config.telemetry.heartbeatInterval).unref() } -/** - * @param {TelemetryConfig} config - */ +/** @param {import('../config/config-base')} config */ function extendedHeartbeat (config) { - extendedInterval = setInterval(() => { + setInterval(() => { const appPayload = appStarted(config) if (heartbeatFailedIntegrations.length > 0) { appPayload.integrations = heartbeatFailedIntegrations @@ -352,12 +309,12 @@ function extendedHeartbeat (config) { } /** - * @param {TelemetryConfig} aConfig + * @param {import('../config/config-base')} aConfig * @param {PluginManager} thePluginManager */ function start (aConfig, thePluginManager) { if (!aConfig.telemetry.enabled) { - if (aConfig.sca?.enabled) { + if (aConfig.appsec.sca.enabled) { logger.warn('DD_APPSEC_SCA_ENABLED requires enabling telemetry to work.') } @@ -376,8 +333,7 @@ function start (aConfig, thePluginManager) { sendData(config, application, host, 'app-started', appStarted(config)) if (integrations.length > 0) { - sendData(config, application, host, 'app-integrations-change', - { integrations }, updateRetryData) + sendData(config, application, host, 'app-integrations-change', { integrations }, updateRetryData) } heartbeat(config, application) @@ -388,21 +344,6 @@ function start (aConfig, thePluginManager) { telemetryStartChannel.publish(getTelemetryData()) } -function stop () { - if (!config) { - return - } - clearInterval(extendedInterval) - clearInterval(heartbeatInterval) - globalThis[Symbol.for('dd-trace')].beforeExitHandlers.delete(appClosing) - - telemetryStopChannel.publish(getTelemetryData()) - - endpoints.stop() - sessionPropagation.stop() - config = undefined -} - function updateIntegrations () { if (!config?.telemetry.enabled) { return @@ -417,121 +358,37 @@ function updateIntegrations () { sendData(config, application, host, reqType, payload, updateRetryData) } -/** - * @param {Record | null | undefined} map - */ -function formatMapForTelemetry (map) { - // format from an object to a string map in order for - // telemetry intake to accept the configuration - return map - ? Object.entries(map).map(([key, value]) => `${key}:${value}`).join(',') - : '' -} - -const nameMapping = { - sampleRate: 'DD_TRACE_SAMPLE_RATE', - logInjection: 'DD_LOG_INJECTION', - headerTags: 'DD_TRACE_HEADER_TAGS', - tags: 'DD_TAGS', - 'sampler.rules': 'DD_TRACE_SAMPLING_RULES', - traceEnabled: 'DD_TRACE_ENABLED', - url: 'DD_TRACE_AGENT_URL', - 'sampler.rateLimit': 'DD_TRACE_RATE_LIMIT', - queryStringObfuscation: 'DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP', - version: 'DD_VERSION', - env: 'DD_ENV', - service: 'DD_SERVICE', - clientIpHeader: 'DD_TRACE_CLIENT_IP_HEADER', - 'grpc.client.error.statuses': 'DD_GRPC_CLIENT_ERROR_STATUSES', - 'grpc.server.error.statuses': 'DD_GRPC_SERVER_ERROR_STATUSES', - traceId128BitLoggingEnabled: 'DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED', - instrumentationSource: 'instrumentation_source', - injectionEnabled: 'ssi_injection_enabled', - injectForce: 'ssi_forced_injection_enabled', - 'runtimeMetrics.enabled': 'runtimeMetrics', - otelLogsEnabled: 'DD_LOGS_OTEL_ENABLED', - otelUrl: 'OTEL_EXPORTER_OTLP_ENDPOINT', - otelEndpoint: 'OTEL_EXPORTER_OTLP_ENDPOINT', - otelHeaders: 'OTEL_EXPORTER_OTLP_HEADERS', - otelProtocol: 'OTEL_EXPORTER_OTLP_PROTOCOL', - otelTimeout: 'OTEL_EXPORTER_OTLP_TIMEOUT', - otelLogsHeaders: 'OTEL_EXPORTER_OTLP_LOGS_HEADERS', - otelLogsProtocol: 'OTEL_EXPORTER_OTLP_LOGS_PROTOCOL', - otelLogsTimeout: 'OTEL_EXPORTER_OTLP_LOGS_TIMEOUT', - otelLogsUrl: 'OTEL_EXPORTER_OTLP_LOGS_ENDPOINT', - otelBatchTimeout: 'OTEL_BSP_SCHEDULE_DELAY', - otelMaxExportBatchSize: 'OTEL_BSP_MAX_EXPORT_BATCH_SIZE', - otelMaxQueueSize: 'OTEL_BSP_MAX_QUEUE_SIZE', - otelMetricsEnabled: 'DD_METRICS_OTEL_ENABLED', - otelMetricsHeaders: 'OTEL_EXPORTER_OTLP_METRICS_HEADERS', - otelMetricsProtocol: 'OTEL_EXPORTER_OTLP_METRICS_PROTOCOL', - otelMetricsTimeout: 'OTEL_EXPORTER_OTLP_METRICS_TIMEOUT', - otelMetricsExportTimeout: 'OTEL_METRIC_EXPORT_TIMEOUT', - otelMetricsUrl: 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT', - otelMetricsExportInterval: 'OTEL_METRIC_EXPORT_INTERVAL', - otelMetricsTemporalityPreference: 'OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE', -} - -const namesNeedFormatting = new Set(['DD_TAGS', 'peerServiceMapping', 'serviceMapping']) +let latestConfiguration = [] /** - * @param {{ name: string, value: ConfigValue, origin: string }[]} changes - * @param {TelemetryConfig} config + * @param {{ name: string, value: ConfigValue, origin: string, seq_id: number }[]} configuration + * @param {import('../config/config-base')} config */ -function updateConfig (changes, config) { +function updateConfig (configuration, config) { if (!config.telemetry.enabled) return - if (changes.length === 0) return - logger.trace(changes) + logger.trace(configuration) const application = createAppObject(config) - const changed = configWithOrigin.size > 0 - - for (const change of changes) { - const name = nameMapping[change.name] || change.name - const { origin, value } = change - const entry = { name, value, origin, seq_id: seqId++ } - - if (namesNeedFormatting.has(name)) { - // @ts-expect-error entry.value is known to be a map for these config names - entry.value = formatMapForTelemetry(value) - } else if (name === 'url') { - if (value) { - entry.value = value.toString() - } - } else if (name === 'DD_TRACE_SAMPLING_RULES') { - entry.value = JSON.stringify(value) - } else if (Array.isArray(value)) { - entry.value = value.join(',') - } - - // Use composite key to support multiple origins for same config name - configWithOrigin.set(`${name}|${origin}`, entry) - } - - if (changed) { - // update configWithOrigin to contain up-to-date full list of config values for app-extended-heartbeat + if (latestConfiguration.length) { const { reqType, payload } = createPayload('app-client-configuration-change', { - configuration: [...configWithOrigin.values()], + configuration, }) sendData(config, application, host, reqType, payload, updateRetryData) } + latestConfiguration = configuration } /** - * @param {TelemetryConfig['profiling']['enabled']} profilingEnabled + * @param {import('../config/config-base')['profiling']['enabled']} profilingEnabled */ function profilingEnabledToBoolean (profilingEnabled) { - if (typeof profilingEnabled === 'boolean') { - return profilingEnabled - } return profilingEnabled === 'true' || profilingEnabled === 'auto' } module.exports = { start, - stop, updateIntegrations, updateConfig, appClosing, diff --git a/packages/dd-trace/src/util.js b/packages/dd-trace/src/util.js index a902f3b99de..f4cce7196fd 100644 --- a/packages/dd-trace/src/util.js +++ b/packages/dd-trace/src/util.js @@ -67,14 +67,6 @@ function calculateDDBasePath (dirname) { return dirSteps.slice(0, packagesIndex).join(path.sep) + path.sep } -function normalizeProfilingEnabledValue (configValue) { - return isTrue(configValue) - ? 'true' - : isFalse(configValue) - ? 'false' - : configValue === 'auto' ? 'auto' : undefined -} - function normalizePluginEnvName (envPluginName, makeLowercase = false) { if (envPluginName.startsWith('@')) { envPluginName = envPluginName.slice(1) @@ -89,6 +81,5 @@ module.exports = { isError, globMatch, ddBasePath: globalThis.__DD_ESBUILD_BASEPATH || calculateDDBasePath(__dirname), - normalizeProfilingEnabledValue, normalizePluginEnvName, } diff --git a/packages/dd-trace/test/agent/info.spec.js b/packages/dd-trace/test/agent/info.spec.js index de1576f46f7..421b546933c 100644 --- a/packages/dd-trace/test/agent/info.spec.js +++ b/packages/dd-trace/test/agent/info.spec.js @@ -8,10 +8,10 @@ const sinon = require('sinon') require('../setup/core') const { fetchAgentInfo, clearCache } = require('../../src/agent/info') +const { defaults: { hostname, port } } = require('../../src/config/defaults') describe('agent/info', () => { - const port = 8126 - const url = `http://127.0.0.1:${port}` + const url = `http://${hostname}:${port}` describe('fetchAgentInfo', () => { afterEach(() => { @@ -130,7 +130,7 @@ describe('agent/info', () => { }) it('should clear cache when URL changes', (done) => { - const url2 = `http://127.0.0.1:${port + 1}` + const url2 = `http://${hostname}:${port + 1}` const agentInfo1 = { endpoints: ['/evp_proxy/v2'] } const agentInfo2 = { endpoints: ['/evp_proxy/v3'] } diff --git a/packages/dd-trace/test/agent/url.spec.js b/packages/dd-trace/test/agent/url.spec.js index 1fd8bcd92b1..08c518a58c6 100644 --- a/packages/dd-trace/test/agent/url.spec.js +++ b/packages/dd-trace/test/agent/url.spec.js @@ -7,7 +7,7 @@ const { describe, it } = require('mocha') require('../setup/core') const { getAgentUrl } = require('../../src/agent/url') -const defaults = require('../../src/config/defaults') +const { defaults: { hostname, port } } = require('../../src/config/defaults') describe('agent/url', () => { describe('getAgentUrl', () => { @@ -41,7 +41,7 @@ describe('agent/url', () => { const result = getAgentUrl(config) - assert.strictEqual(result.hostname, defaults.hostname) + assert.strictEqual(result.hostname, hostname) assert.strictEqual(result.port, '9999') }) @@ -53,7 +53,7 @@ describe('agent/url', () => { const result = getAgentUrl(config) assert.strictEqual(result.hostname, 'custom-host') - assert.strictEqual(result.port, defaults.port) + assert.strictEqual(result.port, String(port)) assert.strictEqual(result.protocol, 'http:') }) @@ -62,8 +62,8 @@ describe('agent/url', () => { const result = getAgentUrl(config) - assert.strictEqual(result.hostname, defaults.hostname) - assert.strictEqual(result.port, defaults.port) + assert.strictEqual(result.hostname, hostname) + assert.strictEqual(result.port, String(port)) assert.strictEqual(result.protocol, 'http:') }) diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js index a350ac6e801..6996716396f 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js @@ -7,7 +7,7 @@ const sinon = require('sinon') const sensitiveHandler = require('../../../../../src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler') -const defaults = require('../../../../../src/config/defaults') +const { defaults } = require('../../../../../src/config/defaults') const { suite } = require('../resources/evidence-redaction-suite.json') const DEFAULT_IAST_REDACTION_NAME_PATTERN = /** @type {string} */ (defaults['iast.redactionNamePattern']) diff --git a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js index 6961a1b7453..1ac470eb007 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js @@ -15,6 +15,7 @@ const DynamicInstrumentationLogsWriter = require('../../../../src/ci-visibility/ const CoverageWriter = require('../../../../src/ci-visibility/exporters/agentless/coverage-writer') const AgentWriter = require('../../../../src/exporters/agent/writer') const { clearCache } = require('../../../../src/agent/info') +const { defaults: { hostname, port } } = require('../../../../src/config/defaults') describe('AgentProxyCiVisibilityExporter', () => { beforeEach(() => { @@ -24,8 +25,7 @@ describe('AgentProxyCiVisibilityExporter', () => { }) const flushInterval = 50 - const port = 8126 - const url = `http://127.0.0.1:${port}` + const url = `http://${hostname}:${port}` const queryDelay = 50 const tags = {} diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index b65d3dc5534..c4864efc1a3 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -13,10 +13,10 @@ const nock = require('nock') const { assertObjectContains } = require('../../../../../integration-tests/helpers') require('../../../../dd-trace/test/setup/core') const CiVisibilityExporter = require('../../../src/ci-visibility/exporters/ci-visibility-exporter') +const { defaults: { hostname, port } } = require('../../../src/config/defaults') describe('CI Visibility Exporter', () => { - const port = 8126 - const url = `http://127.0.0.1:${port}` + const url = `http://${hostname}:${port}` beforeEach(() => { // to make sure `isShallowRepository` in `git.js` returns false diff --git a/packages/dd-trace/test/config/generated-config-types.spec.js b/packages/dd-trace/test/config/generated-config-types.spec.js index 452595157f5..20ce9b08b36 100644 --- a/packages/dd-trace/test/config/generated-config-types.spec.js +++ b/packages/dd-trace/test/config/generated-config-types.spec.js @@ -10,8 +10,7 @@ const { OUTPUT_PATH, } = require('../../../../scripts/generate-config-types') -// TODO: Re-enable when landing the actual change. -describe.skip('generated config types', () => { +describe('generated config types', () => { it('should stay in sync with supported-configurations.json', () => { assert.strictEqual( readFileSync(OUTPUT_PATH, 'utf8').replaceAll('\r\n', '\n'), diff --git a/packages/dd-trace/test/config/helper.spec.js b/packages/dd-trace/test/config/helper.spec.js index e7af52ff1fc..e1fd0fe7b15 100644 --- a/packages/dd-trace/test/config/helper.spec.js +++ b/packages/dd-trace/test/config/helper.spec.js @@ -81,6 +81,7 @@ describe('config-helper stable config sources', () => { describe('config-helper env resolution', () => { let getValueFromEnvSources + let getConfiguredEnvName let getEnvironmentVariable let resetModule let originalEnv @@ -89,6 +90,7 @@ describe('config-helper env resolution', () => { // Ensure we always get a fresh copy of the module when needed const mod = proxyquire('../../src/config/helper', overrides) getValueFromEnvSources = mod.getValueFromEnvSources + getConfiguredEnvName = mod.getConfiguredEnvName getEnvironmentVariable = mod.getEnvironmentVariable resetModule = () => {} } @@ -144,6 +146,23 @@ describe('config-helper env resolution', () => { assert.strictEqual(value, 'canonical-hostname') }) + it('returns the env name used for canonical values', () => { + process.env.DD_TRACE_AGENT_HOSTNAME = 'alias-hostname' + process.env.DD_AGENT_HOST = 'canonical-hostname' + + const envName = getConfiguredEnvName('DD_AGENT_HOST') + + assert.strictEqual(envName, 'DD_AGENT_HOST') + }) + + it('returns the env alias name when alias is used', () => { + process.env.DD_TRACE_AGENT_HOSTNAME = 'alias-hostname' + + const envName = getConfiguredEnvName('DD_AGENT_HOST') + + assert.strictEqual(envName, 'DD_TRACE_AGENT_HOSTNAME') + }) + it('throws for unsupported DD_ configuration', () => { assert.throws( () => getEnvironmentVariable('DD_UNSUPPORTED_CONFIG'), diff --git a/packages/dd-trace/test/config/index.spec.js b/packages/dd-trace/test/config/index.spec.js index 16e8d53275e..223f7191b6c 100644 --- a/packages/dd-trace/test/config/index.spec.js +++ b/packages/dd-trace/test/config/index.spec.js @@ -2,6 +2,7 @@ const { readFileSync, mkdtempSync, rmSync, writeFileSync } = require('node:fs') const assert = require('node:assert/strict') +const dns = require('node:dns') const { once } = require('node:events') const path = require('node:path') const os = require('node:os') @@ -12,7 +13,7 @@ const context = describe const proxyquire = require('proxyquire') require('../setup/core') -const defaults = require('../../src/config/defaults') +const { defaults } = require('../../src/config/defaults') const { getEnvironmentVariable, getEnvironmentVariables } = require('../../src/config/helper') const { assertObjectContains } = require('../../../../integration-tests/helpers') const { DD_MAJOR } = require('../../../../version') @@ -22,7 +23,6 @@ const GRPC_CLIENT_ERROR_STATUSES = defaults['grpc.client.error.statuses'] const GRPC_SERVER_ERROR_STATUSES = defaults['grpc.server.error.statuses'] describe('Config', () => { - let getConfig let log let pkg let env @@ -30,6 +30,7 @@ describe('Config', () => { let existsSyncParam let existsSyncReturn let updateConfig + const isWindows = process.platform === 'win32' const RECOMMENDED_JSON_PATH = require.resolve('../../src/appsec/recommended.json') const RULES_JSON_PATH = require.resolve('../fixtures/config/appsec-rules.json') @@ -42,35 +43,42 @@ describe('Config', () => { const comparator = (a, b) => a.name.localeCompare(b.name) || a.origin.localeCompare(b.origin) - function reloadLoggerAndConfig () { - log = proxyquire('../../src/log', {}) - log.use = sinon.spy() - log.toggle = sinon.spy() - log.warn = sinon.spy() - log.error = sinon.spy() + function assertConfigUpdateContains (actual, expected) { + for (const entry of expected) { + const match = actual.find(actualEntry => actualEntry.name === entry.name && actualEntry.origin === entry.origin) - const configDefaults = proxyquire('../../src/config/defaults', { - '../pkg': pkg, - }) - - // Reload the config module with each call to getConfig to ensure we get a new instance of the config. - getConfig = (options) => { - const supportedConfigurations = proxyquire.noPreserveCache()('../../src/config/supported-configurations.json', {}) - const configHelper = proxyquire.noPreserveCache()('../../src/config/helper', { - './supported-configurations.json': supportedConfigurations, - }) - const serverless = proxyquire.noPreserveCache()('../../src/serverless', {}) - return proxyquire.noPreserveCache()('../../src/config', { - './defaults': configDefaults, - '../log': log, - '../telemetry': { updateConfig }, - '../serverless': serverless, - 'node:fs': fs, - './helper': configHelper, - })(options) + assert.ok(match, `Expected update for ${entry.name} (${entry.origin})`) + assertObjectContains(match, entry) } } + // Reload the config module with each call to getConfig to ensure we get a new instance of the config. + const getConfig = (options) => { + log = proxyquire('../../src/log', {}) + sinon.spy(log, 'warn') + sinon.spy(log, 'error') + const parsers = proxyquire.noPreserveCache()('../../src/config/parsers', {}) + const supportedConfigurations = proxyquire.noPreserveCache()('../../src/config/supported-configurations.json', {}) + const configDefaults = proxyquire.noPreserveCache()('../../src/config/defaults', { + './supported-configurations.json': supportedConfigurations, + '../log': log, + './parsers': parsers, + }) + const configHelper = proxyquire.noPreserveCache()('../../src/config/helper', { + './supported-configurations.json': supportedConfigurations, + }) + const serverless = proxyquire.noPreserveCache()('../../src/serverless', {}) + return proxyquire.noPreserveCache()('../../src/config', { + './defaults': configDefaults, + '../log': log, + '../telemetry': { updateConfig }, + '../serverless': serverless, + 'node:fs': fs, + './helper': configHelper, + '../pkg': pkg, + })(options) + } + beforeEach(() => { pkg = { name: '', @@ -90,8 +98,6 @@ describe('Config', () => { mkdtempSync, writeFileSync, } - - reloadLoggerAndConfig() }) afterEach(() => { @@ -110,9 +116,7 @@ describe('Config', () => { it('should return aliased value', () => { process.env.DATADOG_API_KEY = '12345' - assert.throws(() => getEnvironmentVariable('DATADOG_API_KEY'), { - message: /Missing DATADOG_API_KEY env\/configuration in "supported-configurations.json" file./, - }) + assert.strictEqual(getEnvironmentVariable('DATADOG_API_KEY'), '12345') assert.strictEqual(getEnvironmentVariable('DD_API_KEY'), '12345') const { DD_API_KEY, DATADOG_API_KEY } = getEnvironmentVariables() assert.strictEqual(DATADOG_API_KEY, undefined) @@ -161,8 +165,6 @@ describe('Config', () => { process.env.DD_TRACE_DEBUG = 'true' process.env.DD_TRACE_LOG_LEVEL = 'error' - reloadLoggerAndConfig() - const config = getConfig() assertObjectContains(config, { @@ -191,9 +193,6 @@ describe('Config', () => { process.env.DD_TRACE_PROPAGATION_STYLE_EXTRACT = 'b3,tracecontext' process.env.OTEL_PROPAGATORS = 'datadog,tracecontext' - // required if we want to check updates to config.debug and config.logLevel which is fetched from logger - reloadLoggerAndConfig() - const config = getConfig() assertObjectContains(config, { @@ -211,7 +210,6 @@ describe('Config', () => { tracePropagationStyle: { inject: ['b3', 'tracecontext'], extract: ['b3', 'tracecontext'], - otelPropagators: false, }, }) @@ -230,9 +228,6 @@ describe('Config', () => { process.env.OTEL_RESOURCE_ATTRIBUTES = 'foo=bar1,baz=qux1' process.env.OTEL_PROPAGATORS = 'b3,datadog' - // required if we want to check updates to config.debug and config.logLevel which is fetched from logger - reloadLoggerAndConfig() - const config = getConfig() assertObjectContains(config, { @@ -250,7 +245,6 @@ describe('Config', () => { tracePropagationStyle: { inject: ['b3', 'datadog'], extract: ['b3', 'datadog'], - otelPropagators: true, }, }) @@ -260,6 +254,85 @@ describe('Config', () => { assert.strictEqual(indexFile, noop) }) + it('should use proxy when dynamic instrumentation is enabled with DD_TRACE_ENABLED=false', () => { + process.env.DD_TRACE_ENABLED = 'false' + process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' + + const config = getConfig() + + assert.strictEqual(config.tracing, false) + assert.strictEqual(config.dynamicInstrumentation.enabled, true) + + delete require.cache[require.resolve('../../src/index')] + const indexFile = require('../../src/index') + const proxy = require('../../src/proxy') + assert.strictEqual(indexFile, proxy) + }) + + it('should use proxy when dynamic instrumentation is enabled with DD_TRACING_ENABLED=false', () => { + process.env.DD_TRACING_ENABLED = 'false' + process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' + + const config = getConfig() + + assert.strictEqual(config.tracing, false) + assert.strictEqual(config.dynamicInstrumentation.enabled, true) + + delete require.cache[require.resolve('../../src/index')] + const indexFile = require('../../src/index') + const proxy = require('../../src/proxy') + assert.strictEqual(indexFile, proxy) + }) + + it('should use proxy when appsec standalone is enabled with DD_TRACE_ENABLED=false', () => { + process.env.DD_TRACE_ENABLED = 'false' + process.env.DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED = 'true' + + const config = getConfig() + + assert.strictEqual(config.tracing, false) + assert.strictEqual(config.apmTracingEnabled, false) + + delete require.cache[require.resolve('../../src/index')] + const indexFile = require('../../src/index') + const proxy = require('../../src/proxy') + assert.strictEqual(indexFile, proxy) + }) + + it('should prefer DD propagation style over OTEL propagators', () => { + process.env.DD_TRACE_PROPAGATION_STYLE = 'tracecontext' + process.env.OTEL_PROPAGATORS = 'b3,datadog' + + const config = getConfig() + + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['tracecontext']) + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['tracecontext']) + }) + + it('should use generic OTLP exporter config for logs and metrics when specific config is not set', () => { + process.env.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://collector:4318' + process.env.OTEL_EXPORTER_OTLP_HEADERS = 'x-test=value' + process.env.OTEL_EXPORTER_OTLP_PROTOCOL = 'grpc' + process.env.OTEL_EXPORTER_OTLP_TIMEOUT = '1234' + + const config = getConfig() + + assertObjectContains(config, { + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://collector:4318', + otelLogsUrl: 'http://collector:4318', + otelMetricsUrl: 'http://collector:4318', + otelHeaders: 'x-test=value', + otelLogsHeaders: 'x-test=value', + otelMetricsHeaders: 'x-test=value', + otelProtocol: 'grpc', + otelLogsProtocol: 'grpc', + otelMetricsProtocol: 'grpc', + otelTimeout: 1234, + otelLogsTimeout: 1234, + otelMetricsTimeout: 1234, + }) + }) + it('should correctly map OTEL_RESOURCE_ATTRIBUTES', () => { process.env.OTEL_RESOURCE_ATTRIBUTES = 'deployment.environment=test1,service.name=test2,service.version=5,foo=bar1,baz=qux1' @@ -362,7 +435,7 @@ describe('Config', () => { debug: false, dogstatsd: { hostname: '127.0.0.1', - port: '8125', + port: 8125, }, dynamicInstrumentation: { enabled: false, @@ -436,7 +509,6 @@ describe('Config', () => { spanAttributeSchema: 'v0', spanComputePeerService: false, spanRemoveIntegrationFromService: false, - traceEnabled: true, traceId128BitGenerationEnabled: true, traceId128BitLoggingEnabled: true, tracePropagationBehaviorExtract: 'continue', @@ -445,7 +517,7 @@ describe('Config', () => { assert.deepStrictEqual(config.dynamicInstrumentation?.redactionExcludedIdentifiers, []) assert.deepStrictEqual(config.grpc.client.error.statuses, GRPC_CLIENT_ERROR_STATUSES) assert.deepStrictEqual(config.grpc.server.error.statuses, GRPC_SERVER_ERROR_STATUSES) - assert.deepStrictEqual(config.injectionEnabled, []) + assert.deepStrictEqual(config.injectionEnabled, undefined) assert.deepStrictEqual(config.serviceMapping, {}) assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['datadog', 'tracecontext', 'baggage']) assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['datadog', 'tracecontext', 'baggage']) @@ -455,163 +527,168 @@ describe('Config', () => { sinon.assert.calledOnce(updateConfig) - assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ - { name: 'apmTracingEnabled', value: true, origin: 'default' }, - { name: 'appsec.apiSecurity.enabled', value: true, origin: 'default' }, - { name: 'appsec.apiSecurity.sampleDelay', value: 30, origin: 'default' }, - { name: 'appsec.apiSecurity.endpointCollectionEnabled', value: true, origin: 'default' }, - { name: 'appsec.apiSecurity.endpointCollectionMessageLimit', value: 300, origin: 'default' }, - { name: 'appsec.apiSecurity.downstreamBodyAnalysisSampleRate', value: 0.5, origin: 'default' }, - { name: 'appsec.apiSecurity.maxDownstreamRequestBodyAnalysis', value: 1, origin: 'default' }, - { name: 'appsec.blockedTemplateHtml', value: undefined, origin: 'default' }, - { name: 'appsec.blockedTemplateJson', value: undefined, origin: 'default' }, - { name: 'appsec.enabled', value: undefined, origin: 'default' }, - { name: 'appsec.eventTracking.mode', value: 'identification', origin: 'default' }, - { name: 'appsec.extendedHeadersCollection.enabled', value: false, origin: 'default' }, - { name: 'appsec.extendedHeadersCollection.maxHeaders', value: 50, origin: 'default' }, - { name: 'appsec.extendedHeadersCollection.redaction', value: true, origin: 'default' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_APM_TRACING_ENABLED', value: true, origin: 'default' }, + { name: 'DD_API_SECURITY_ENABLED', value: true, origin: 'default' }, + { name: 'DD_API_SECURITY_SAMPLE_DELAY', value: 30, origin: 'default' }, + { name: 'DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT', value: 300, origin: 'default' }, + { name: 'DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE', value: 0.5, origin: 'default' }, + { name: 'DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS', value: 1, origin: 'default' }, + { name: 'DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML', value: null, origin: 'default' }, + { name: 'DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON', value: null, origin: 'default' }, + { name: 'DD_APPSEC_ENABLED', value: null, origin: 'default' }, + { name: 'DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE', value: 'identification', origin: 'default' }, + { name: 'DD_APPSEC_COLLECT_ALL_HEADERS', value: false, origin: 'default' }, + { name: 'DD_APPSEC_MAX_COLLECTED_HEADERS', value: 50, origin: 'default' }, + { name: 'DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED', value: true, origin: 'default' }, { - name: 'appsec.obfuscatorKeyRegex', + name: 'DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP', // eslint-disable-next-line @stylistic/max-len value: '(?i)pass|pw(?:or)?d|secret|(?:api|private|public|access)[_-]?key|token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization|jsessionid|phpsessid|asp\\.net[_-]sessionid|sid|jwt', origin: 'default', }, { - name: 'appsec.obfuscatorValueRegex', + name: 'DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP', // eslint-disable-next-line @stylistic/max-len value: '(?i)(?:p(?:ass)?w(?:or)?d|pass(?:[_-]?phrase)?|secret(?:[_-]?key)?|(?:(?:api|private|public|access)[_-]?)key(?:[_-]?id)?|(?:(?:auth|access|id|refresh)[_-]?)?token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|jsessionid|phpsessid|asp\\.net(?:[_-]|-)sessionid|sid|jwt)(?:\\s*=([^;&]+)|"\\s*:\\s*("[^"]+"|\\d+))|bearer\\s+([a-z0-9\\._\\-]+)|token\\s*:\\s*([a-z0-9]{13})|gh[opsu]_([0-9a-zA-Z]{36})|ey[I-L][\\w=-]+\\.(ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?)|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}([^\\-]+)[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*([a-z0-9\\/\\.+]{100,})', origin: 'default', }, - { name: 'appsec.rasp.bodyCollection', value: false, origin: 'default' }, - { name: 'appsec.rasp.enabled', value: true, origin: 'default' }, - { name: 'appsec.rateLimit', value: 100, origin: 'default' }, - { name: 'appsec.rules', value: undefined, origin: 'default' }, - { name: 'appsec.sca.enabled', value: undefined, origin: 'default' }, - { name: 'appsec.stackTrace.enabled', value: true, origin: 'default' }, - { name: 'appsec.stackTrace.maxDepth', value: 32, origin: 'default' }, - { name: 'appsec.stackTrace.maxStackTraces', value: 2, origin: 'default' }, - { name: 'appsec.wafTimeout', value: 5e3, origin: 'default' }, - { name: 'ciVisAgentlessLogSubmissionEnabled', value: false, origin: 'default' }, - { name: 'ciVisibilityTestSessionName', value: undefined, origin: 'default' }, - { name: 'clientIpEnabled', value: false, origin: 'default' }, - { name: 'clientIpHeader', value: undefined, origin: 'default' }, - { name: 'codeOriginForSpans.enabled', value: true, origin: 'default' }, - { name: 'codeOriginForSpans.experimental.exit_spans.enabled', value: false, origin: 'default' }, - { name: 'dbmPropagationMode', value: 'disabled', origin: 'default' }, - { name: 'dogstatsd.hostname', value: '127.0.0.1', origin: 'calculated' }, - { name: 'dogstatsd.port', value: '8125', origin: 'default' }, - { name: 'dsmEnabled', value: false, origin: 'default' }, - { name: 'dynamicInstrumentation.enabled', value: false, origin: 'default' }, - { name: 'dynamicInstrumentation.probeFile', value: undefined, origin: 'default' }, - { name: 'dynamicInstrumentation.redactedIdentifiers', value: [], origin: 'default' }, - { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: [], origin: 'default' }, - { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 1, origin: 'default' }, - { name: 'env', value: undefined, origin: 'default' }, - { name: 'experimental.aiguard.enabled', value: false, origin: 'default' }, - { name: 'experimental.aiguard.endpoint', value: undefined, origin: 'default' }, - { name: 'experimental.aiguard.maxContentSize', value: 512 * 1024, origin: 'default' }, - { name: 'experimental.aiguard.maxMessagesLength', value: 16, origin: 'default' }, - { name: 'experimental.aiguard.timeout', value: 10_000, origin: 'default' }, - { name: 'experimental.enableGetRumData', value: false, origin: 'default' }, - { name: 'experimental.exporter', value: '', origin: 'default' }, - { name: 'flakyTestRetriesCount', value: 5, origin: 'default' }, - { name: 'flushInterval', value: 2000, origin: 'default' }, - { name: 'flushMinSpans', value: 1000, origin: 'default' }, - { name: 'gitMetadataEnabled', value: true, origin: 'default' }, - { name: 'headerTags', value: [], origin: 'default' }, - { name: 'hostname', value: '127.0.0.1', origin: 'default' }, - { name: 'iast.dbRowsToTaint', value: 1, origin: 'default' }, - { name: 'iast.deduplicationEnabled', value: true, origin: 'default' }, - { name: 'iast.enabled', value: false, origin: 'default' }, - { name: 'iast.maxConcurrentRequests', value: 2, origin: 'default' }, - { name: 'iast.maxContextOperations', value: 2, origin: 'default' }, - { name: 'iast.redactionEnabled', value: true, origin: 'default' }, - { name: 'iast.redactionNamePattern', value: defaults['iast.redactionNamePattern'], origin: 'default' }, - { name: 'iast.redactionValuePattern', value: defaults['iast.redactionValuePattern'], origin: 'default' }, - { name: 'iast.requestSampling', value: 30, origin: 'default' }, - { name: 'iast.securityControlsConfiguration', value: undefined, origin: 'default' }, - { name: 'iast.stackTrace.enabled', value: true, origin: 'default' }, - { name: 'iast.telemetryVerbosity', value: 'INFORMATION', origin: 'default' }, - { name: 'injectForce', value: false, origin: 'default' }, - { name: 'injectionEnabled', value: [], origin: 'default' }, + { name: 'DD_APPSEC_RASP_COLLECT_REQUEST_BODY', value: false, origin: 'default' }, + { name: 'DD_APPSEC_RASP_ENABLED', value: true, origin: 'default' }, + { name: 'DD_APPSEC_TRACE_RATE_LIMIT', value: 100, origin: 'default' }, + { name: 'DD_APPSEC_RULES', value: null, origin: 'default' }, + { name: 'DD_APPSEC_SCA_ENABLED', value: null, origin: 'default' }, + { name: 'DD_APPSEC_STACK_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_APPSEC_MAX_STACK_TRACE_DEPTH', value: 32, origin: 'default' }, + { name: 'DD_APPSEC_MAX_STACK_TRACES', value: 2, origin: 'default' }, + { name: 'DD_APPSEC_WAF_TIMEOUT', value: 5e3, origin: 'default' }, + { name: 'DD_AGENTLESS_LOG_SUBMISSION_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TEST_SESSION_NAME', value: null, origin: 'default' }, + { name: 'DD_TRACE_CLIENT_IP_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_CLIENT_IP_HEADER', value: null, origin: 'default' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_DBM_PROPAGATION_MODE', value: 'disabled', origin: 'default' }, + { name: 'DD_DOGSTATSD_HOST', value: 'localhost', origin: 'default' }, + { name: 'DD_DOGSTATSD_PORT', value: 8125, origin: 'default' }, + { name: 'DD_DATA_STREAMS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_ENABLED', value: false, origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE', value: null, origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS', value: '', origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS', value: '', origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS', value: 1, origin: 'default' }, + { name: 'DD_ENV', value: null, origin: 'default' }, + { name: 'DD_AI_GUARD_ENABLED', value: false, origin: 'default' }, + { name: 'DD_AI_GUARD_ENDPOINT', value: null, origin: 'default' }, + { name: 'DD_AI_GUARD_MAX_CONTENT_SIZE', value: 512 * 1024, origin: 'default' }, + { name: 'DD_AI_GUARD_MAX_MESSAGES_LENGTH', value: 16, origin: 'default' }, + { name: 'DD_AI_GUARD_TIMEOUT', value: 10_000, origin: 'default' }, + { name: 'DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_EXPERIMENTAL_EXPORTER', value: '', origin: 'default' }, + { name: 'DD_CIVISIBILITY_FLAKY_RETRY_COUNT', value: 5, origin: 'default' }, + { name: 'DD_TRACE_FLUSH_INTERVAL', value: 2000, origin: 'default' }, + { name: 'DD_TRACE_PARTIAL_FLUSH_MIN_SPANS', value: 1000, origin: 'default' }, + { name: 'DD_TRACE_GIT_METADATA_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_HEADER_TAGS', value: '', origin: 'default' }, + { name: 'DD_AGENT_HOST', value: '127.0.0.1', origin: 'default' }, + { name: 'DD_IAST_DB_ROWS_TO_TAINT', value: 1, origin: 'default' }, + { name: 'DD_IAST_DEDUPLICATION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_IAST_ENABLED', value: false, origin: 'default' }, + { name: 'DD_IAST_MAX_CONCURRENT_REQUESTS', value: 2, origin: 'default' }, + { name: 'DD_IAST_MAX_CONTEXT_OPERATIONS', value: 2, origin: 'default' }, + { name: 'DD_IAST_REDACTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_IAST_REDACTION_NAME_PATTERN', value: defaults['iast.redactionNamePattern'], origin: 'default' }, + { name: 'DD_IAST_REDACTION_VALUE_PATTERN', value: defaults['iast.redactionValuePattern'], origin: 'default' }, + { name: 'DD_IAST_REQUEST_SAMPLING', value: 30, origin: 'default' }, + { name: 'DD_IAST_SECURITY_CONTROLS_CONFIGURATION', value: null, origin: 'default' }, + { name: 'DD_IAST_STACK_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_IAST_TELEMETRY_VERBOSITY', value: 'INFORMATION', origin: 'default' }, + { name: 'DD_INJECT_FORCE', value: false, origin: 'default' }, + { name: 'DD_INJECTION_ENABLED', value: null, origin: 'default' }, { name: 'instrumentationSource', value: 'manual', origin: 'default' }, { name: 'isCiVisibility', value: false, origin: 'default' }, - { name: 'isEarlyFlakeDetectionEnabled', value: true, origin: 'default' }, - { name: 'isFlakyTestRetriesEnabled', value: true, origin: 'default' }, - { name: 'isGCPFunction', value: false, origin: 'env_var' }, - { name: 'isGitUploadEnabled', value: false, origin: 'default' }, - { name: 'isIntelligentTestRunnerEnabled', value: false, origin: 'default' }, - { name: 'isManualApiEnabled', value: false, origin: 'default' }, - { name: 'langchain.spanCharLimit', value: 128, origin: 'default' }, - { name: 'langchain.spanPromptCompletionSampleRate', value: 1.0, origin: 'default' }, - { name: 'llmobs.agentlessEnabled', value: undefined, origin: 'default' }, - { name: 'llmobs.mlApp', value: undefined, origin: 'default' }, - { name: 'isTestDynamicInstrumentationEnabled', value: true, origin: 'default' }, - { name: 'logInjection', value: true, origin: 'default' }, - { name: 'lookup', value: undefined, origin: 'default' }, - { name: 'middlewareTracingEnabled', value: true, origin: 'default' }, - { name: 'openai.spanCharLimit', value: 128, origin: 'default' }, - { name: 'openAiLogsEnabled', value: false, origin: 'default' }, - { name: 'peerServiceMapping', value: {}, origin: 'default' }, + { name: 'DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CIVISIBILITY_FLAKY_RETRY_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CIVISIBILITY_GIT_UPLOAD_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CIVISIBILITY_ITR_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CIVISIBILITY_MANUAL_API_ENABLED', value: true, origin: 'default' }, + { name: 'DD_LANGCHAIN_SPAN_CHAR_LIMIT', value: 128, origin: 'default' }, + { name: 'DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE', value: 1.0, origin: 'default' }, + { name: 'DD_LLMOBS_AGENTLESS_ENABLED', value: null, origin: 'default' }, + { name: 'DD_LLMOBS_ML_APP', value: null, origin: 'default' }, + { name: 'DD_TEST_FAILED_TEST_REPLAY_ENABLED', value: true, origin: 'default' }, + { name: 'DD_LOGS_INJECTION', value: true, origin: 'default' }, + { name: 'lookup', value: dns.lookup, origin: 'default' }, + { name: 'DD_TRACE_MIDDLEWARE_TRACING_ENABLED', value: true, origin: 'default' }, + { name: 'DD_OPENAI_SPAN_CHAR_LIMIT', value: 128, origin: 'default' }, + { name: 'DD_OPENAI_LOGS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_PEER_SERVICE_MAPPING', value: '', origin: 'default' }, { name: 'plugins', value: true, origin: 'default' }, - { name: 'port', value: '8126', origin: 'default' }, - { name: 'profiling.enabled', value: false, origin: 'default' }, - { name: 'profiling.exporters', value: 'agent', origin: 'default' }, - { name: 'profiling.sourceMap', value: true, origin: 'default' }, - { name: 'protocolVersion', value: '0.4', origin: 'default' }, + { name: 'DD_TRACE_AGENT_PORT', value: 8126, origin: 'default' }, + { name: 'DD_PROFILING_ENABLED', value: 'false', origin: 'default' }, + { name: 'DD_PROFILING_EXPORTERS', value: 'agent', origin: 'default' }, + { name: 'DD_PROFILING_SOURCE_MAP', value: true, origin: 'default' }, + { name: 'DD_TRACE_AGENT_PROTOCOL_VERSION', value: '0.4', origin: 'default' }, { - name: 'queryStringObfuscation', + name: 'DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP', value: config.queryStringObfuscation, origin: 'default', }, - { name: 'remoteConfig.enabled', value: true, origin: 'default' }, - { name: 'remoteConfig.pollInterval', value: 5, origin: 'default' }, - { name: 'reportHostname', value: false, origin: 'default' }, - { name: 'runtimeMetrics.enabled', value: false, origin: 'default' }, - { name: 'runtimeMetricsRuntimeId', value: false, origin: 'default' }, - { name: 'sampleRate', value: undefined, origin: 'default' }, - { name: 'sampler.rateLimit', value: 100, origin: 'default' }, - { name: 'sampler.rules', value: [], origin: 'default' }, - { name: 'scope', value: undefined, origin: 'default' }, - { name: 'service', value: 'node', origin: 'default' }, - { name: 'site', value: 'datadoghq.com', origin: 'default' }, - { name: 'spanAttributeSchema', value: 'v0', origin: 'default' }, - { name: 'spanComputePeerService', value: false, origin: 'calculated' }, - { name: 'spanRemoveIntegrationFromService', value: false, origin: 'default' }, - { name: 'startupLogs', value: DD_MAJOR >= 6, origin: 'default' }, - { name: 'stats.enabled', value: false, origin: 'calculated' }, - { name: 'tagsHeaderMaxLength', value: 512, origin: 'default' }, - { name: 'telemetry.debug', value: false, origin: 'default' }, - { name: 'telemetry.dependencyCollection', value: true, origin: 'default' }, - { name: 'telemetry.enabled', value: true, origin: 'default' }, - { name: 'telemetry.heartbeatInterval', value: 60, origin: 'default' }, - { name: 'telemetry.logCollection', value: true, origin: 'default' }, - { name: 'telemetry.metrics', value: true, origin: 'default' }, - { name: 'traceEnabled', value: true, origin: 'default' }, - { name: 'traceId128BitGenerationEnabled', value: true, origin: 'default' }, - { name: 'traceId128BitLoggingEnabled', value: true, origin: 'default' }, - { name: 'tracing', value: true, origin: 'default' }, - { name: 'url', value: '', origin: 'default' }, - { name: 'version', value: '', origin: 'default' }, - { name: 'vertexai.spanCharLimit', value: 128, origin: 'default' }, - { name: 'vertexai.spanPromptCompletionSampleRate', value: 1.0, origin: 'default' }, + { name: 'DD_REMOTE_CONFIGURATION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS', value: 5, origin: 'default' }, + { name: 'DD_TRACE_REPORT_HOSTNAME', value: false, origin: 'default' }, + { name: 'DD_RUNTIME_METRICS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: null, origin: 'default' }, + { name: 'DD_TRACE_RATE_LIMIT', value: 100, origin: 'default' }, + { name: 'DD_TRACE_SAMPLING_RULES', value: '[]', origin: 'default' }, + { name: 'DD_TRACE_SCOPE', value: null, origin: 'default' }, + { name: 'DD_SERVICE', value: null, origin: 'default' }, + { name: 'DD_SITE', value: 'datadoghq.com', origin: 'default' }, + { name: 'DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', value: 'v0', origin: 'default' }, + { name: 'DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_STARTUP_LOGS', value: DD_MAJOR >= 6, origin: 'default' }, + { name: 'DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH', value: 512, origin: 'default' }, + { name: 'DD_TELEMETRY_DEBUG', value: false, origin: 'default' }, + { name: 'DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_INSTRUMENTATION_TELEMETRY_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TELEMETRY_HEARTBEAT_INTERVAL', value: 60, origin: 'default' }, + { name: 'DD_TELEMETRY_LOG_COLLECTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TELEMETRY_METRICS_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_AGENT_URL', value: '', origin: 'default' }, + { name: 'DD_VERSION', value: null, origin: 'default' }, + { name: 'DD_VERTEXAI_SPAN_CHAR_LIMIT', value: 128, origin: 'default' }, + { name: 'DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE', value: 1.0, origin: 'default' }, + { name: 'DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: false, origin: 'default' }, ].sort(comparator)) }) it('should support logging', () => { - const config = getConfig({ - logger: {}, - debug: true, - }) + process.env.DD_TRACE_DEBUG = 'true' + const logger = { + debug: sinon.spy(), + error: sinon.spy(), + } + getConfig({ logger }) - sinon.assert.calledWith(log.use, config.logger) - sinon.assert.calledWith(log.toggle, config.debug) + log.debug('debug') + log.error('error') + + sinon.assert.calledOnceWithExactly(logger.debug, 'debug') + sinon.assert.calledOnce(logger.error) + assert.ok(logger.error.firstCall.args[0] instanceof Error) + assert.strictEqual(logger.error.firstCall.args[0].message, 'error') }) it('should not warn on undefined DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', () => { const config = getConfig({ logger: {}, - debug: true, }) sinon.assert.notCalled(log.warn) assert.strictEqual(config.spanAttributeSchema, 'v0') @@ -619,7 +696,6 @@ describe('Config', () => { it('should initialize from the default service', () => { pkg.name = 'test' - reloadLoggerAndConfig() const config = getConfig() @@ -629,7 +705,6 @@ describe('Config', () => { it('should initialize from the default version', () => { pkg.version = '1.2.3' - reloadLoggerAndConfig() const config = getConfig() @@ -759,9 +834,6 @@ describe('Config', () => { process.env.DD_VERTEXAI_SPAN_CHAR_LIMIT = '50' process.env.DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE = '0.5' - // required if we want to check updates to config.debug and config.logLevel which is fetched from logger - reloadLoggerAndConfig() - const config = getConfig() assertObjectContains(config, { @@ -822,7 +894,7 @@ describe('Config', () => { debug: true, dogstatsd: { hostname: 'dsd-agent', - port: '5218', + port: 5218, }, dynamicInstrumentation: { enabled: true, @@ -896,11 +968,10 @@ describe('Config', () => { version: '1.0.0', env: 'test', }, - traceEnabled: true, traceId128BitGenerationEnabled: true, traceId128BitLoggingEnabled: true, tracePropagationBehaviorExtract: 'restart', - tracing: false, + tracing: true, version: '1.0.0', }) assert.deepStrictEqual(config.grpc.client.error.statuses, [3, 13, 400, 401, 402, 403]) @@ -912,7 +983,7 @@ describe('Config', () => { assert.deepStrictEqual(config.peerServiceMapping, { c: 'cc', d: 'dd' }) assert.deepStrictEqual(config.sampler, { sampleRate: 0.5, - rateLimit: '-1', + rateLimit: -1, rules: [ { service: 'usersvc', name: 'healthcheck', sampleRate: 0.0 }, { service: 'usersvc', sampleRate: 0.5 }, @@ -932,101 +1003,99 @@ describe('Config', () => { sinon.assert.calledOnce(updateConfig) - assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ - { name: 'apmTracingEnabled', value: false, origin: 'env_var' }, - { name: 'appsec.apiSecurity.enabled', value: true, origin: 'env_var' }, - { name: 'appsec.apiSecurity.sampleDelay', value: 25, origin: 'env_var' }, - { name: 'appsec.apiSecurity.endpointCollectionEnabled', value: false, origin: 'env_var' }, - { name: 'appsec.apiSecurity.endpointCollectionMessageLimit', value: 500, origin: 'env_var' }, - { name: 'appsec.apiSecurity.downstreamBodyAnalysisSampleRate', value: 0.75, origin: 'env_var' }, - { name: 'appsec.apiSecurity.maxDownstreamRequestBodyAnalysis', value: 2, origin: 'env_var' }, - { name: 'appsec.blockedTemplateHtml', value: BLOCKED_TEMPLATE_HTML_PATH, origin: 'env_var' }, - { name: 'appsec.blockedTemplateJson', value: BLOCKED_TEMPLATE_JSON_PATH, origin: 'env_var' }, - { name: 'appsec.enabled', value: true, origin: 'env_var' }, - { name: 'appsec.eventTracking.mode', value: 'extended', origin: 'env_var' }, - { name: 'appsec.extendedHeadersCollection.enabled', value: true, origin: 'env_var' }, - { name: 'appsec.extendedHeadersCollection.maxHeaders', value: '42', origin: 'env_var' }, - { name: 'appsec.extendedHeadersCollection.redaction', value: false, origin: 'env_var' }, - { name: 'appsec.obfuscatorKeyRegex', value: '.*', origin: 'env_var' }, - { name: 'appsec.obfuscatorValueRegex', value: '.*', origin: 'env_var' }, - { name: 'appsec.rasp.bodyCollection', value: true, origin: 'env_var' }, - { name: 'appsec.rasp.enabled', value: false, origin: 'env_var' }, - { name: 'appsec.rateLimit', value: '42', origin: 'env_var' }, - { name: 'appsec.rules', value: RULES_JSON_PATH, origin: 'env_var' }, - { name: 'appsec.sca.enabled', value: true, origin: 'env_var' }, - { name: 'appsec.stackTrace.enabled', value: false, origin: 'env_var' }, - { name: 'appsec.stackTrace.maxDepth', value: '42', origin: 'env_var' }, - { name: 'appsec.stackTrace.maxStackTraces', value: '5', origin: 'env_var' }, - { name: 'appsec.wafTimeout', value: '42', origin: 'env_var' }, - { name: 'clientIpEnabled', value: true, origin: 'env_var' }, - { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'env_var' }, - { name: 'codeOriginForSpans.enabled', value: false, origin: 'env_var' }, - { name: 'codeOriginForSpans.experimental.exit_spans.enabled', value: true, origin: 'env_var' }, - { name: 'crashtracking.enabled', value: false, origin: 'env_var' }, - { name: 'dogstatsd.hostname', value: 'dsd-agent', origin: 'env_var' }, - { name: 'dogstatsd.port', value: '5218', origin: 'env_var' }, - { name: 'dynamicInstrumentation.enabled', value: true, origin: 'env_var' }, - { name: 'dynamicInstrumentation.probeFile', value: 'probes.json', origin: 'env_var' }, - { name: 'dynamicInstrumentation.redactedIdentifiers', value: ['foo', 'bar'], origin: 'env_var' }, - { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: ['a', 'b', 'c'], origin: 'env_var' }, - { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 0.1, origin: 'env_var' }, - { name: 'env', value: 'test', origin: 'env_var' }, - { name: 'experimental.aiguard.enabled', value: false, origin: 'default' }, - { name: 'experimental.aiguard.endpoint', value: undefined, origin: 'default' }, - { name: 'experimental.aiguard.maxContentSize', value: 512 * 1024, origin: 'default' }, - { name: 'experimental.aiguard.maxMessagesLength', value: 16, origin: 'default' }, - { name: 'experimental.aiguard.timeout', value: 10_000, origin: 'default' }, - { name: 'experimental.enableGetRumData', value: true, origin: 'env_var' }, - { name: 'experimental.exporter', value: 'log', origin: 'env_var' }, - { name: 'hostname', value: 'agent', origin: 'env_var' }, - { name: 'iast.dbRowsToTaint', value: 2, origin: 'env_var' }, - { name: 'iast.deduplicationEnabled', value: false, origin: 'env_var' }, - { name: 'iast.enabled', value: true, origin: 'env_var' }, - { name: 'iast.maxConcurrentRequests', value: '3', origin: 'env_var' }, - { name: 'iast.maxContextOperations', value: '4', origin: 'env_var' }, - { name: 'iast.redactionEnabled', value: false, origin: 'env_var' }, - { name: 'iast.redactionNamePattern', value: 'REDACTION_NAME_PATTERN', origin: 'env_var' }, - { name: 'iast.redactionValuePattern', value: 'REDACTION_VALUE_PATTERN', origin: 'env_var' }, - { name: 'iast.requestSampling', value: '40', origin: 'env_var' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_APM_TRACING_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_API_SECURITY_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_API_SECURITY_SAMPLE_DELAY', value: 25, origin: 'env_var' }, + { name: 'DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT', value: 500, origin: 'env_var' }, + { name: 'DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE', value: 0.75, origin: 'env_var' }, + { name: 'DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS', value: 2, origin: 'env_var' }, + { name: 'DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML', value: BLOCKED_TEMPLATE_HTML_PATH, origin: 'env_var' }, + { name: 'DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON', value: BLOCKED_TEMPLATE_JSON_PATH, origin: 'env_var' }, + { name: 'DD_APPSEC_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE', value: 'extended', origin: 'env_var' }, + { name: 'DD_APPSEC_COLLECT_ALL_HEADERS', value: true, origin: 'env_var' }, + { name: 'DD_APPSEC_MAX_COLLECTED_HEADERS', value: 42, origin: 'env_var' }, + { name: 'DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP', value: '.*', origin: 'env_var' }, + { name: 'DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP', value: '.*', origin: 'env_var' }, + { name: 'DD_APPSEC_RASP_COLLECT_REQUEST_BODY', value: true, origin: 'env_var' }, + { name: 'DD_APPSEC_RASP_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_APPSEC_TRACE_RATE_LIMIT', value: 42, origin: 'env_var' }, + { name: 'DD_APPSEC_RULES', value: RULES_JSON_PATH, origin: 'env_var' }, + { name: 'DD_APPSEC_SCA_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_APPSEC_STACK_TRACE_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_APPSEC_MAX_STACK_TRACE_DEPTH', value: 42, origin: 'env_var' }, + { name: 'DD_APPSEC_MAX_STACK_TRACES', value: 5, origin: 'env_var' }, + { name: 'DD_APPSEC_WAF_TIMEOUT', value: 42, origin: 'env_var' }, + { name: 'DD_TRACE_CLIENT_IP_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_CLIENT_IP_HEADER', value: 'x-true-client-ip', origin: 'env_var' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_CRASHTRACKING_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_DOGSTATSD_HOST', value: 'dsd-agent', origin: 'env_var' }, + { name: 'DD_DOGSTATSD_PORT', value: 5218, origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE', value: 'probes.json', origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS', value: 'foo,bar', origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS', value: 'a,b,c', origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS', value: 0.1, origin: 'env_var' }, + { name: 'DD_ENV', value: 'test', origin: 'env_var' }, + { name: 'DD_AI_GUARD_ENABLED', value: false, origin: 'default' }, + { name: 'DD_AI_GUARD_MAX_CONTENT_SIZE', value: 512 * 1024, origin: 'default' }, + { name: 'DD_AI_GUARD_MAX_MESSAGES_LENGTH', value: 16, origin: 'default' }, + { name: 'DD_AI_GUARD_TIMEOUT', value: 10_000, origin: 'default' }, + { name: 'DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_EXPERIMENTAL_EXPORTER', value: 'log', origin: 'env_var' }, + { name: 'DD_AGENT_HOST', value: 'agent', origin: 'env_var' }, + { name: 'DD_IAST_DB_ROWS_TO_TAINT', value: 2, origin: 'env_var' }, + { name: 'DD_IAST_DEDUPLICATION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_IAST_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_IAST_MAX_CONCURRENT_REQUESTS', value: 3, origin: 'env_var' }, + { name: 'DD_IAST_MAX_CONTEXT_OPERATIONS', value: 4, origin: 'env_var' }, + { name: 'DD_IAST_REDACTION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_IAST_REDACTION_NAME_PATTERN', value: 'REDACTION_NAME_PATTERN', origin: 'env_var' }, + { name: 'DD_IAST_REDACTION_VALUE_PATTERN', value: 'REDACTION_VALUE_PATTERN', origin: 'env_var' }, + { name: 'DD_IAST_REQUEST_SAMPLING', value: 40, origin: 'env_var' }, { - name: 'iast.securityControlsConfiguration', + name: 'DD_IAST_SECURITY_CONTROLS_CONFIGURATION', value: 'SANITIZER:CODE_INJECTION:sanitizer.js:method', origin: 'env_var', }, - { name: 'iast.stackTrace.enabled', value: false, origin: 'env_var' }, - { name: 'iast.telemetryVerbosity', value: 'DEBUG', origin: 'env_var' }, - { name: 'injectForce', value: false, origin: 'env_var' }, - { name: 'injectionEnabled', value: ['tracer'], origin: 'env_var' }, - { name: 'instrumentation_config_id', value: 'abcdef123', origin: 'env_var' }, - { name: 'instrumentationSource', value: 'ssi', origin: 'env_var' }, - { name: 'isGCPFunction', value: false, origin: 'env_var' }, - { name: 'langchain.spanCharLimit', value: 50, origin: 'env_var' }, - { name: 'langchain.spanPromptCompletionSampleRate', value: 0.5, origin: 'env_var' }, - { name: 'llmobs.agentlessEnabled', value: true, origin: 'env_var' }, - { name: 'llmobs.mlApp', value: 'myMlApp', origin: 'env_var' }, - { name: 'middlewareTracingEnabled', value: false, origin: 'env_var' }, - { name: 'peerServiceMapping', value: process.env.DD_TRACE_PEER_SERVICE_MAPPING, origin: 'env_var' }, - { name: 'port', value: '6218', origin: 'env_var' }, - { name: 'profiling.enabled', value: 'true', origin: 'env_var' }, - { name: 'protocolVersion', value: '0.5', origin: 'env_var' }, - { name: 'queryStringObfuscation', value: '.*', origin: 'env_var' }, - { name: 'remoteConfig.enabled', value: false, origin: 'env_var' }, - { name: 'remoteConfig.pollInterval', value: '42', origin: 'env_var' }, - { name: 'reportHostname', value: true, origin: 'env_var' }, - { name: 'runtimeMetrics.enabled', value: true, origin: 'env_var' }, - { name: 'runtimeMetricsRuntimeId', value: true, origin: 'env_var' }, - { name: 'sampler.rateLimit', value: '-1', origin: 'env_var' }, - { name: 'sampler.rules', value: process.env.DD_TRACE_SAMPLING_RULES, origin: 'env_var' }, - { name: 'sampleRate', value: 0.5, origin: 'env_var' }, - { name: 'service', value: 'service', origin: 'env_var' }, - { name: 'spanAttributeSchema', value: 'v1', origin: 'env_var' }, - { name: 'spanRemoveIntegrationFromService', value: true, origin: 'env_var' }, - { name: 'traceId128BitGenerationEnabled', value: true, origin: 'env_var' }, - { name: 'traceId128BitLoggingEnabled', value: true, origin: 'env_var' }, - { name: 'tracing', value: false, origin: 'env_var' }, - { name: 'version', value: '1.0.0', origin: 'env_var' }, - { name: 'vertexai.spanCharLimit', value: 50, origin: 'env_var' }, - { name: 'vertexai.spanPromptCompletionSampleRate', value: 0.5, origin: 'env_var' }, + { name: 'DD_IAST_STACK_TRACE_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_IAST_TELEMETRY_VERBOSITY', value: 'DEBUG', origin: 'env_var' }, + { name: 'DD_INJECT_FORCE', value: false, origin: 'env_var' }, + { name: 'DD_INJECTION_ENABLED', value: 'tracer', origin: 'env_var' }, + { name: 'DD_INSTRUMENTATION_CONFIG_ID', value: 'abcdef123', origin: 'env_var' }, + { name: 'DD_LANGCHAIN_SPAN_CHAR_LIMIT', value: 50, origin: 'env_var' }, + { name: 'DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE', value: 0.5, origin: 'env_var' }, + { name: 'DD_LLMOBS_AGENTLESS_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_LLMOBS_ML_APP', value: 'myMlApp', origin: 'env_var' }, + { name: 'DD_TRACE_MIDDLEWARE_TRACING_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_TRACE_PEER_SERVICE_MAPPING', value: 'c:cc, d:dd', origin: 'env_var' }, + { name: 'DD_TRACE_AGENT_PORT', value: 6218, origin: 'env_var' }, + { name: 'DD_PROFILING_ENABLED', value: 'true', origin: 'env_var' }, + { name: 'DD_TRACE_AGENT_PROTOCOL_VERSION', value: '0.5', origin: 'env_var' }, + { name: 'DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP', value: '.*', origin: 'env_var' }, + { name: 'DD_REMOTE_CONFIGURATION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS', value: 42, origin: 'env_var' }, + { name: 'DD_TRACE_REPORT_HOSTNAME', value: true, origin: 'env_var' }, + { name: 'DD_RUNTIME_METRICS_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_RATE_LIMIT', value: -1, origin: 'env_var' }, + { name: 'DD_TRACE_SAMPLING_RULES', value: process.env.DD_TRACE_SAMPLING_RULES, origin: 'env_var' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.5, origin: 'env_var' }, + { name: 'DD_SERVICE', value: 'service', origin: 'env_var' }, + { name: 'DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', value: 'v1', origin: 'env_var' }, + { name: 'DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_VERSION', value: '1.0.0', origin: 'env_var' }, + { name: 'DD_VERTEXAI_SPAN_CHAR_LIMIT', value: 50, origin: 'env_var' }, + { name: 'DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE', value: 0.5, origin: 'env_var' }, + { name: 'instrumentationSource', value: 'ssi', origin: 'calculated' }, ].sort(comparator)) }) @@ -1037,7 +1106,7 @@ describe('Config', () => { assertObjectContains(config, { service: 'node', - env: undefined, + env: '', version: '', }) @@ -1047,7 +1116,7 @@ describe('Config', () => { assertObjectContains(config, { service: 'node', - env: undefined, + env: '', version: '', }) }) @@ -1116,7 +1185,7 @@ describe('Config', () => { process.env.DD_SITE = 'datadoghq.eu' process.env.DD_TRACE_AGENT_HOSTNAME = 'agent' process.env.DD_TRACE_AGENT_PORT = '6218' - process.env.DD_TRACING_ENABLED = 'false' + process.env.DD_TRACE_ENABLED = 'false' process.env.DD_SERVICE = 'service' process.env.DD_ENV = 'test' @@ -1173,7 +1242,9 @@ describe('Config', () => { }) it('should initialize from the options', () => { - const logger = {} + const logger = { + warn: sinon.spy(), + } const tags = { foo: 'bar', } @@ -1184,6 +1255,11 @@ describe('Config', () => { { service: 'authsvc', sampleRate: 1.0 }, { sampleRate: 0.1 }, ] + const samplingRulesString = '[{"service":"usersvc","name":"healthcheck","sampleRate":0},' + + '{"service":"usersvc","sampleRate":0.5},' + + '{"service":"authsvc","sampleRate":1},' + + '{"sampleRate":0.1}]' + const config = getConfig({ appsec: false, clientIpEnabled: true, @@ -1196,10 +1272,9 @@ describe('Config', () => { }, }, }, - debug: true, dogstatsd: { hostname: 'agent-dsd', - port: 5218, + port: '5218', }, dynamicInstrumentation: { enabled: true, @@ -1208,6 +1283,8 @@ describe('Config', () => { redactionExcludedIdentifiers: ['a', 'b', 'c'], uploadIntervalSeconds: 0.1, }, + // 'enabled' does not exist as property. This is added to test for the + // warning that is logged when a non-existent property is set. enabled: false, env: 'test', experimental: { @@ -1237,7 +1314,6 @@ describe('Config', () => { }, telemetryVerbosity: 'DEBUG', }, - traceparent: true, }, flushInterval: 5000, flushMinSpans: 500, @@ -1245,7 +1321,6 @@ describe('Config', () => { llmobs: { mlApp: 'myMlApp', agentlessEnabled: true, - apiKey: 'myApiKey', }, logger, logLevel, @@ -1315,12 +1390,6 @@ describe('Config', () => { dynamicInstrumentation: { enabled: true, probeFile: 'probes.json', - }, - }) - assert.deepStrictEqual(config.dynamicInstrumentation?.redactedIdentifiers, ['foo', 'bar']) - assert.deepStrictEqual(config.dynamicInstrumentation?.redactionExcludedIdentifiers, ['a', 'b', 'c']) - assertObjectContains(config, { - dynamicInstrumentation: { uploadIntervalSeconds: 0.1, }, env: 'test', @@ -1328,13 +1397,7 @@ describe('Config', () => { aiguard: { enabled: true, endpoint: 'https://dd.datad0g.com/api/unstable/ai-guard', - }, - }, - }) - assert.strictEqual(config.experimental?.aiguard?.maxContentSize, 1024 * 1024) - assertObjectContains(config, { - experimental: { - aiguard: { + maxContentSize: 1024 * 1024, maxMessagesLength: 32, timeout: 2000, }, @@ -1354,15 +1417,6 @@ describe('Config', () => { redactionNamePattern: 'REDACTION_NAME_PATTERN', redactionValuePattern: 'REDACTION_VALUE_PATTERN', requestSampling: 50, - }, - }) - if (DD_MAJOR < 6) { - assert.strictEqual(config.iast?.securityControlsConfiguration, 'SANITIZER:CODE_INJECTION:sanitizer.js:method') - } else { - assert.ok(!('iast.securityControlsConfiguration' in config)) - } - assertObjectContains(config, { - iast: { stackTrace: { enabled: false, }, @@ -1372,14 +1426,12 @@ describe('Config', () => { agentlessEnabled: true, mlApp: 'myMlApp', }, - }) - assertObjectContains(config, { logLevel, logger, middlewareTracingEnabled: false, peerServiceMapping: { d: 'dd' }, plugins: false, - port: '6218', + port: 6218, protocolVersion: '0.5', remoteConfig: { pollInterval: 42, @@ -1392,7 +1444,27 @@ describe('Config', () => { }, runtimeMetricsRuntimeId: true, sampleRate: 0.5, + service: 'service', + site: 'datadoghq.eu', + spanComputePeerService: true, + spanRemoveIntegrationFromService: true, + tags: { + env: 'test', + foo: 'bar', + service: 'service', + version: '0.1.0', + }, + traceId128BitGenerationEnabled: true, + traceId128BitLoggingEnabled: true, + version: '0.1.0', }) + assert.deepStrictEqual(config.dynamicInstrumentation?.redactedIdentifiers, ['foo', 'bar']) + assert.deepStrictEqual(config.dynamicInstrumentation?.redactionExcludedIdentifiers, ['a', 'b', 'c']) + if (DD_MAJOR < 6) { + assert.strictEqual(config.iast?.securityControlsConfiguration, 'SANITIZER:CODE_INJECTION:sanitizer.js:method') + } else { + assert.ok(!('iast.securityControlsConfiguration' in config)) + } assert.deepStrictEqual(config.sampler, { rateLimit: 1000, rules: [ @@ -1409,102 +1481,105 @@ describe('Config', () => { { sampleRate: 0.1 }, ], }) - assert.strictEqual(config.service, 'service') assert.deepStrictEqual(config.serviceMapping, { a: 'aa', b: 'bb' }) - assertObjectContains(config, { - site: 'datadoghq.eu', - spanComputePeerService: true, - spanRemoveIntegrationFromService: true, - }) - assert.ok(Object.hasOwn(config, 'tags')) - assertObjectContains(config.tags, { - env: 'test', - foo: 'bar', - }) assert.ok(Object.hasOwn(config.tags, 'runtime-id')) assert.match(config.tags['runtime-id'], /^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$/) - assertObjectContains(config.tags, { - service: 'service', - version: '0.1.0', - }) - assertObjectContains(config, { - traceId128BitGenerationEnabled: true, - traceId128BitLoggingEnabled: true, - }) - assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['datadog']) - assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['datadog']) - assert.strictEqual(config.version, '0.1.0') + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['datadog', 'b3', 'b3 single header']) + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['datadog', 'b3', 'b3 single header']) + + if (DD_MAJOR < 6) { + sinon.assert.calledOnce(log.warn) + } else { + sinon.assert.calledTwice(log.warn) + sinon.assert.calledWithExactly( + log.warn, + 'Unknown option %s with value %o', + 'experimental.iast.securityControlsConfiguration', + 'SANITIZER:CODE_INJECTION:sanitizer.js:method', + ) + } + sinon.assert.calledWithExactly(log.warn, 'Unknown option %s with value %o', 'enabled', false) sinon.assert.calledOnce(updateConfig) - assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ - { name: 'appsec.enabled', value: false, origin: 'code' }, - { name: 'clientIpEnabled', value: true, origin: 'code' }, - { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'code' }, - { name: 'codeOriginForSpans.enabled', value: false, origin: 'code' }, - { name: 'codeOriginForSpans.experimental.exit_spans.enabled', value: true, origin: 'code' }, - { name: 'dogstatsd.hostname', value: 'agent-dsd', origin: 'code' }, - { name: 'dogstatsd.port', value: '5218', origin: 'code' }, - { name: 'dynamicInstrumentation.enabled', value: true, origin: 'code' }, - { name: 'dynamicInstrumentation.probeFile', value: 'probes.json', origin: 'code' }, - { name: 'dynamicInstrumentation.redactedIdentifiers', value: ['foo', 'bar'], origin: 'code' }, - { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: ['a', 'b', 'c'], origin: 'code' }, - { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 0.1, origin: 'code' }, - { name: 'env', value: 'test', origin: 'code' }, - { name: 'experimental.aiguard.enabled', value: true, origin: 'code' }, - { name: 'experimental.aiguard.endpoint', value: 'https://dd.datad0g.com/api/unstable/ai-guard', origin: 'code' }, - { name: 'experimental.aiguard.maxContentSize', value: 1024 * 1024, origin: 'code' }, - { name: 'experimental.aiguard.maxMessagesLength', value: 32, origin: 'code' }, - { name: 'experimental.aiguard.timeout', value: 2_000, origin: 'code' }, - { name: 'experimental.enableGetRumData', value: true, origin: 'code' }, - { name: 'experimental.exporter', value: 'log', origin: 'code' }, - { name: 'flushInterval', value: 5000, origin: 'code' }, - { name: 'flushMinSpans', value: 500, origin: 'code' }, - { name: 'hostname', value: 'agent', origin: 'code' }, - { name: 'iast.dbRowsToTaint', value: 2, origin: 'code' }, - { name: 'iast.deduplicationEnabled', value: false, origin: 'code' }, - { name: 'iast.enabled', value: true, origin: 'code' }, - { name: 'iast.maxConcurrentRequests', value: 4, origin: 'code' }, - { name: 'iast.maxContextOperations', value: 5, origin: 'code' }, - { name: 'iast.redactionEnabled', value: false, origin: 'code' }, - { name: 'iast.redactionNamePattern', value: 'REDACTION_NAME_PATTERN', origin: 'code' }, - { name: 'iast.redactionValuePattern', value: 'REDACTION_VALUE_PATTERN', origin: 'code' }, - { name: 'iast.requestSampling', value: 50, origin: 'code' }, + assert.ok( + updateConfig.getCall(0).args[0].every( + entry => entry.name !== 'DD_TRACE_STATS_COMPUTATION_ENABLED' || entry.origin !== 'calculated' + ), + ) + + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_APPSEC_ENABLED', value: false, origin: 'code' }, + { name: 'DD_TRACE_CLIENT_IP_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_CLIENT_IP_HEADER', value: 'x-true-client-ip', origin: 'code' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_ENABLED', value: false, origin: 'code' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED', value: true, origin: 'code' }, + { name: 'DD_DOGSTATSD_HOST', value: 'agent-dsd', origin: 'code' }, + { name: 'DD_DOGSTATSD_PORT', value: '5218', origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_ENABLED', value: true, origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE', value: 'probes.json', origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS', value: 'foo,bar', origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS', value: 'a,b,c', origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS', value: 0.1, origin: 'code' }, + { name: 'DD_ENV', value: 'test', origin: 'code' }, + { name: 'DD_AI_GUARD_ENABLED', value: true, origin: 'code' }, + { name: 'DD_AI_GUARD_ENDPOINT', value: 'https://dd.datad0g.com/api/unstable/ai-guard', origin: 'code' }, + { name: 'DD_AI_GUARD_MAX_CONTENT_SIZE', value: 1024 * 1024, origin: 'code' }, + { name: 'DD_AI_GUARD_MAX_MESSAGES_LENGTH', value: 32, origin: 'code' }, + { name: 'DD_AI_GUARD_TIMEOUT', value: 2_000, origin: 'code' }, + { name: 'DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_EXPERIMENTAL_EXPORTER', value: 'log', origin: 'code' }, + { name: 'DD_TRACE_FLUSH_INTERVAL', value: 5000, origin: 'code' }, + { name: 'DD_TRACE_PARTIAL_FLUSH_MIN_SPANS', value: 500, origin: 'code' }, + { name: 'DD_AGENT_HOST', value: 'agent', origin: 'code' }, + { name: 'DD_IAST_DB_ROWS_TO_TAINT', value: 2, origin: 'code' }, + { name: 'DD_IAST_DEDUPLICATION_ENABLED', value: false, origin: 'code' }, + { name: 'DD_IAST_ENABLED', value: true, origin: 'code' }, + { name: 'DD_IAST_MAX_CONCURRENT_REQUESTS', value: 4, origin: 'code' }, + { name: 'DD_IAST_MAX_CONTEXT_OPERATIONS', value: 5, origin: 'code' }, + { name: 'DD_IAST_REDACTION_ENABLED', value: false, origin: 'code' }, + { name: 'DD_IAST_REDACTION_NAME_PATTERN', value: 'REDACTION_NAME_PATTERN', origin: 'code' }, + { name: 'DD_IAST_REDACTION_VALUE_PATTERN', value: 'REDACTION_VALUE_PATTERN', origin: 'code' }, + { name: 'DD_IAST_REQUEST_SAMPLING', value: 50, origin: 'code' }, DD_MAJOR < 6 && { - name: 'iast.securityControlsConfiguration', + name: 'DD_IAST_SECURITY_CONTROLS_CONFIGURATION', value: 'SANITIZER:CODE_INJECTION:sanitizer.js:method', origin: 'code', }, - { name: 'iast.stackTrace.enabled', value: false, origin: 'code' }, - { name: 'iast.telemetryVerbosity', value: 'DEBUG', origin: 'code' }, - { name: 'llmobs.agentlessEnabled', value: true, origin: 'code' }, - { name: 'llmobs.mlApp', value: 'myMlApp', origin: 'code' }, - { name: 'middlewareTracingEnabled', value: false, origin: 'code' }, - { name: 'peerServiceMapping', value: { d: 'dd' }, origin: 'code' }, + { name: 'DD_IAST_STACK_TRACE_ENABLED', value: false, origin: 'code' }, + { name: 'DD_IAST_TELEMETRY_VERBOSITY', value: 'DEBUG', origin: 'code' }, + { name: 'DD_LLMOBS_AGENTLESS_ENABLED', value: true, origin: 'code' }, + { name: 'DD_LLMOBS_ML_APP', value: 'myMlApp', origin: 'code' }, + { name: 'DD_TRACE_MIDDLEWARE_TRACING_ENABLED', value: false, origin: 'code' }, + { name: 'DD_TRACE_PEER_SERVICE_MAPPING', value: 'd:dd', origin: 'code' }, { name: 'plugins', value: false, origin: 'code' }, - { name: 'port', value: '6218', origin: 'code' }, - { name: 'protocolVersion', value: '0.5', origin: 'code' }, - { name: 'remoteConfig.pollInterval', value: 42, origin: 'code' }, - { name: 'reportHostname', value: true, origin: 'code' }, - { name: 'runtimeMetrics.enabled', value: true, origin: 'code' }, - { name: 'runtimeMetricsRuntimeId', value: true, origin: 'code' }, - { name: 'sampler.rateLimit', value: 1000, origin: 'code' }, - { name: 'sampler.rules', value: samplingRules, origin: 'code' }, - { name: 'sampleRate', value: 0.5, origin: 'code' }, - { name: 'service', value: 'service', origin: 'code' }, - { name: 'site', value: 'datadoghq.eu', origin: 'code' }, - { name: 'spanAttributeSchema', value: 'v1', origin: 'code' }, - { name: 'spanComputePeerService', value: true, origin: 'calculated' }, - { name: 'spanRemoveIntegrationFromService', value: true, origin: 'code' }, - { name: 'stats.enabled', value: false, origin: 'calculated' }, - { name: 'traceId128BitGenerationEnabled', value: true, origin: 'code' }, - { name: 'traceId128BitLoggingEnabled', value: true, origin: 'code' }, - { name: 'version', value: '0.1.0', origin: 'code' }, + { name: 'DD_TRACE_AGENT_PORT', value: 6218, origin: 'code' }, + { name: 'DD_TRACE_AGENT_PROTOCOL_VERSION', value: '0.5', origin: 'code' }, + { name: 'DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS', value: 42, origin: 'code' }, + { name: 'DD_TRACE_REPORT_HOSTNAME', value: true, origin: 'code' }, + { name: 'DD_RUNTIME_METRICS_ENABLED', value: true, origin: 'code' }, + { name: 'DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_RATE_LIMIT', value: 1000, origin: 'code' }, + { name: 'DD_TRACE_SAMPLING_RULES', value: samplingRulesString, origin: 'code' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.5, origin: 'code' }, + { name: 'DD_SERVICE', value: 'service', origin: 'code' }, + { name: 'DD_SITE', value: 'datadoghq.eu', origin: 'code' }, + { name: 'DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', value: 'v1', origin: 'code' }, + { name: 'DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED', value: true, origin: 'code' }, + { name: 'DD_VERSION', value: '0.1.0', origin: 'code' }, + { name: 'DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED', value: true, origin: 'code' }, ].filter(v => v).sort(comparator)) }) it('should initialize from the options with url taking precedence', () => { - const logger = {} + const logger = { + warn: sinon.spy(), + error: sinon.spy(), + info: sinon.spy(), + debug: sinon.spy(), + } const tags = { foo: 'bar' } const config = getConfig({ hostname: 'agent', @@ -1544,9 +1619,8 @@ describe('Config', () => { getConfig() - sinon.assert.calledWith(log.warn, 'Use either the DD_TRACE_PROPAGATION_STYLE ' + - 'environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and ' + - 'DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') + sinon.assert.calledWith(log.warn, 'Use either DD_TRACE_PROPAGATION_STYLE or separate ' + + 'DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') }) it('should warn if mixing shared and inject propagation style env vars', () => { @@ -1555,9 +1629,8 @@ describe('Config', () => { getConfig() - sinon.assert.calledWith(log.warn, 'Use either the DD_TRACE_PROPAGATION_STYLE ' + - 'environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and ' + - 'DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') + sinon.assert.calledWith(log.warn, 'Use either DD_TRACE_PROPAGATION_STYLE or separate ' + + 'DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') }) it('should warn if defaulting to v0 span attribute schema', () => { @@ -1565,7 +1638,10 @@ describe('Config', () => { const config = getConfig() - sinon.assert.calledWith(log.warn, 'Unexpected input for config.spanAttributeSchema, picked default', 'v0') + sinon.assert.calledWithExactly( + log.warn, + "Invalid value: 'foo' for DD_TRACE_SPAN_ATTRIBUTE_SCHEMA (source: env_var), picked default", + ) assert.strictEqual(config.spanAttributeSchema, 'v0') }) @@ -1790,7 +1866,6 @@ describe('Config', () => { timeout: 2000, }, b3: false, - traceparent: false, exporter: 'agent', enableGetRumData: false, }, @@ -1802,7 +1877,7 @@ describe('Config', () => { enabled: true, redactionNamePattern: 'REDACTION_NAME_PATTERN', redactionValuePattern: 'REDACTION_VALUE_PATTERN', - securityControlsConfiguration: 'SANITIZER:CODE_INJECTION:sanitizer.js:method2', + securityControlsConfiguration: 'SANITIZER:CODE_INJECTION:sanitizer.js:method' + (DD_MAJOR < 6 ? '2' : '1'), stackTrace: { enabled: false, }, @@ -1816,7 +1891,6 @@ describe('Config', () => { d: 'dd', }, port: 7777, - protocol: 'https', protocolVersion: '0.5', remoteConfig: { pollInterval: 42, @@ -1837,10 +1911,7 @@ describe('Config', () => { }, traceId128BitGenerationEnabled: false, traceId128BitLoggingEnabled: false, - tracePropagationStyle: { - inject: [], - extract: [], - }, + tracePropagationStyle: ['abc'], version: '1.0.0', }) @@ -1891,7 +1962,7 @@ describe('Config', () => { }, dogstatsd: { hostname: 'server', - port: '8888', + port: 8888, }, dynamicInstrumentation: { enabled: false, @@ -1960,8 +2031,8 @@ describe('Config', () => { env: 'development', }, tracePropagationStyle: { - extract: [], - inject: [], + extract: ['abc'], + inject: ['abc'], }, }) assert.strictEqual(config.url.toString(), 'https://agent2:6218/') @@ -2068,9 +2139,9 @@ describe('Config', () => { downstreamBodyAnalysisSampleRate: 0.5, maxDownstreamRequestBodyAnalysis: 1, }, - blockedTemplateGraphql: undefined, - blockedTemplateHtml: undefined, - blockedTemplateJson: undefined, + blockedTemplateGraphql: BLOCKED_TEMPLATE_GRAPHQL, + blockedTemplateHtml: BLOCKED_TEMPLATE_HTML, + blockedTemplateJson: BLOCKED_TEMPLATE_JSON, enabled: true, eventTracking: { mode: 'disabled', @@ -2087,7 +2158,7 @@ describe('Config', () => { bodyCollection: true, }, rateLimit: 42, - rules: undefined, + rules: RULES_JSON_PATH, sca: { enabled: undefined, }, @@ -2126,7 +2197,6 @@ describe('Config', () => { const config = getConfig({ url: 'https://agent3:7778', - protocol: 'http', hostname: 'server', port: 7777, service: 'test', @@ -2145,7 +2215,8 @@ describe('Config', () => { process.env.DD_SERVICE = 'test' process.env.DD_ENV = 'dev' process.env.DD_VERSION = '1.0.0' - process.env.DD_TAGS = 'service=foo,env=bar,version=0.0.0' + // TODO: Is that correct? Did we support equal signs in DD_TAGS before? + process.env.DD_TAGS = 'service:foo,env:bar,version:0.0.0' const config = getConfig() @@ -2350,21 +2421,34 @@ describe('Config', () => { it('should send empty array when remote config is called on empty options', () => { const config = getConfig() + sinon.assert.calledOnce(updateConfig) + + const length = updateConfig.getCall(0).args[0].length + + updateConfig.resetHistory() + config.setRemoteConfig({}) - sinon.assert.calledTwice(updateConfig) - assert.deepStrictEqual(updateConfig.getCall(1).args[0], []) + for (const entry of updateConfig.getCall(0).args[0].slice(length)) { + assert.notStrictEqual(entry.origin, 'remote_config') + } + + sinon.assert.calledOnce(updateConfig) }) it('should send remote config changes to telemetry', () => { const config = getConfig() + // Reset the changes array. This would normally be done by updateConfig. + updateConfig.getCall(0).args[0].length = 0 + updateConfig.resetHistory() + config.setRemoteConfig({ - tracing_sampling_rate: 0, + sampleRate: 0, }) - assert.deepStrictEqual(updateConfig.getCall(1).args[0], [ - { name: 'sampleRate', value: 0, origin: 'remote_config' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_TRACE_SAMPLE_RATE', value: 0, origin: 'remote_config' }, ]) }) @@ -2372,13 +2456,10 @@ describe('Config', () => { const config = getConfig() config.setRemoteConfig({ - tracing_sampling_rules: [ + samplingRules: [ { resource: '*', - tags: [ - { key: 'tag-a', value_glob: 'tag-a-val*' }, - { key: 'tag-b', value_glob: 'tag-b-val*' }, - ], + tags: { 'tag-a': 'tag-a-val*', 'tag-b': 'tag-b-val*' }, provenance: 'customer', }, ], @@ -2401,7 +2482,7 @@ describe('Config', () => { const config = getConfig() const runtimeId = config.tags['runtime-id'] config.setRemoteConfig({ - tracing_tags: { foo: 'bar' }, + tags: { foo: 'bar' }, }) assert.strictEqual(config.tags?.foo, 'bar') @@ -2433,10 +2514,33 @@ describe('Config', () => { ]) }) - it('should skip appsec config files if they do not exist', () => { - const error = new Error('file not found') - fs.readFileSync = () => { throw error } + it('should warn when span sampling rules file contains invalid JSON', function () { + if (isWindows) { + this.skip() + return + } + const tempDir = mkdtempSync(path.join(process.cwd(), 'dd-trace-span-sampling-rules-')) + const rulesPath = path.join(tempDir, 'span-sampling-rules.json') + writeFileSync(rulesPath, '{"sample_rate":') + process.env.DD_SPAN_SAMPLING_RULES_FILE = rulesPath + + try { + const config = getConfig() + + assert.strictEqual(config.sampler?.spanSamplingRules, undefined) + sinon.assert.calledWithMatch( + log.warn, + 'Error reading span sampling rules file %s; %o', + '{"sample_rate":', + sinon.match.instanceOf(SyntaxError) + ) + } finally { + rmSync(tempDir, { recursive: true, force: true }) + } + }) + + it('should skip appsec config files if they do not exist', () => { const config = getConfig({ appsec: { enabled: true, @@ -2448,9 +2552,41 @@ describe('Config', () => { }) sinon.assert.callCount(log.error, 3) - sinon.assert.calledWithExactly(log.error.firstCall, 'Error reading file %s', 'DOES_NOT_EXIST.json', error) - sinon.assert.calledWithExactly(log.error.secondCall, 'Error reading file %s', 'DOES_NOT_EXIST.html', error) - sinon.assert.calledWithExactly(log.error.thirdCall, 'Error reading file %s', 'DOES_NOT_EXIST.json', error) + const assertMissingAppsecTemplateError = (message, optionName, fileName) => { + const escapedFileName = fileName.replaceAll('.', '\\.') + const escapedOptionName = optionName.replaceAll('.', '\\.') + const escapedPathSuffix = `[\\\\/]${escapedFileName}` + + assert.match( + message, + new RegExp( + '^Error reading path: \'' + escapedFileName + '\' for ' + escapedOptionName + + ' \\(source: code\\), picked default\\n\\n' + + '\\[Error: ENOENT: no such file or directory, open \'(?:.*' + + escapedPathSuffix + '|' + escapedFileName + ')\'\\]' + ) + ) + assert.match(message, /errno: -(2|4058)/) + assert.match(message, /code: 'ENOENT'/) + assert.match(message, /syscall: 'open'/) + assert.match(message, new RegExp(`path: '(?:.*${escapedPathSuffix}|${escapedFileName})'`)) + } + + assertMissingAppsecTemplateError( + log.error.firstCall.args[0], + 'appsec.blockedTemplateHtml', + 'DOES_NOT_EXIST.html' + ) + assertMissingAppsecTemplateError( + log.error.secondCall.args[0], + 'appsec.blockedTemplateJson', + 'DOES_NOT_EXIST.json' + ) + assertMissingAppsecTemplateError( + log.error.thirdCall.args[0], + 'appsec.blockedTemplateGraphql', + 'DOES_NOT_EXIST.json' + ) assertObjectContains(config, { appsec: { @@ -2718,8 +2854,9 @@ describe('Config', () => { process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED = 'true' const config = getConfig(options) assertObjectContains(config, { - isIntelligentTestRunnerEnabled: false, - isGitUploadEnabled: false, + isCiVisibility: false, + isIntelligentTestRunnerEnabled: true, + isGitUploadEnabled: true, }) }) }) @@ -2848,8 +2985,8 @@ describe('Config', () => { assert.strictEqual(config.llmobs.enabled, false) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: false, origin: 'default', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: false, origin: 'default', }]) }) @@ -2859,8 +2996,8 @@ describe('Config', () => { assert.strictEqual(config.llmobs.enabled, true) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: true, origin: 'env_var', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: true, origin: 'env_var', }]) }) @@ -2870,29 +3007,29 @@ describe('Config', () => { assert.strictEqual(config.llmobs.enabled, false) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: false, origin: 'env_var', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: false, origin: 'env_var', }]) }) it('should enable llmobs with options and DD_LLMOBS_ENABLED is not set', () => { - const config = getConfig({ llmobs: {} }) + const config = getConfig({ llmobs: { agentlessEnabled: true } }) assert.strictEqual(config.llmobs.enabled, true) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: true, origin: 'code', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: true, origin: 'calculated', }]) }) it('should have DD_LLMOBS_ENABLED take priority over options', () => { process.env.DD_LLMOBS_ENABLED = 'false' - const config = getConfig({ llmobs: {} }) + const config = getConfig({ llmobs: { agentlessEnabled: true } }) assert.strictEqual(config.llmobs.enabled, false) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: false, origin: 'env_var', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: false, origin: 'env_var', }]) }) }) @@ -2913,8 +3050,8 @@ describe('Config', () => { it('defaults', () => { const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: false, - responsesEnabled: false, + request: undefined, + response: undefined, maxDepth: 10, }) }) @@ -2923,9 +3060,10 @@ describe('Config', () => { process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = 'all' const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: true, - responsesEnabled: false, + request: [], + response: undefined, maxDepth: 10, + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) const awsRules = taggingConfig.rules.aws for (const [serviceName, service] of Object.entries(awsRules)) { @@ -2937,8 +3075,8 @@ describe('Config', () => { process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = '$.foo.bar' const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: true, - responsesEnabled: false, + request: ['$.foo.bar'], + response: undefined, maxDepth: 10, }) const awsRules = taggingConfig.rules.aws @@ -2953,9 +3091,10 @@ describe('Config', () => { process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = 'all' const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: false, - responsesEnabled: true, + request: undefined, + response: [], maxDepth: 10, + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) const awsRules = taggingConfig.rules.aws for (const [serviceName, service] of Object.entries(awsRules)) { @@ -2967,9 +3106,10 @@ describe('Config', () => { process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = '$.foo.bar' const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: false, - responsesEnabled: true, + request: undefined, + response: ['$.foo.bar'], maxDepth: 10, + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) const awsRules = taggingConfig.rules.aws for (const [, service] of Object.entries(awsRules)) { @@ -2987,8 +3127,9 @@ describe('Config', () => { let { cloudPayloadTagging } = getConfig() assertObjectContains(cloudPayloadTagging, { maxDepth: 7, - requestsEnabled: true, - responsesEnabled: true, + request: [], + response: [], + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) delete process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH @@ -2996,8 +3137,9 @@ describe('Config', () => { ; ({ cloudPayloadTagging } = getConfig({ cloudPayloadTagging: { maxDepth: 7 } })) assertObjectContains(cloudPayloadTagging, { maxDepth: 7, - requestsEnabled: true, - responsesEnabled: true, + request: [], + response: [], + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) }) @@ -3014,7 +3156,10 @@ describe('Config', () => { ; ({ cloudPayloadTagging } = getConfig({ cloudPayloadTagging: { maxDepth: NaN } })) assertObjectContains(cloudPayloadTagging, { maxDepth: 10, + request: undefined, + response: undefined, }) + assert.ok(!(Object.hasOwn(cloudPayloadTagging, 'rules'))) }) }) @@ -3062,8 +3207,9 @@ describe('Config', () => { }, }) - assertObjectContains(updateConfig.getCall(0).args[0], [ - { name: 'stats.enabled', value: true, origin: 'calculated' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: true, origin: 'env_var' }, ]) }) @@ -3079,8 +3225,10 @@ describe('Config', () => { }, }) - assertObjectContains(updateConfig.getCall(0).args[0], [ - { name: 'stats.enabled', value: false, origin: 'calculated' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: false, origin: 'calculated' }, ]) }) @@ -3112,7 +3260,7 @@ describe('Config', () => { fleetConfigPath = path.join(tempDir, 'fleet.yaml') process.env.DD_TEST_LOCAL_CONFIG_PATH = localConfigPath process.env.DD_TEST_FLEET_CONFIG_PATH = fleetConfigPath - reloadLoggerAndConfig() + getConfig() }) afterEach(() => { @@ -3245,7 +3393,11 @@ apm_configuration_default: process.env.AWS_LAMBDA_FUNCTION_NAME = 'my-great-lambda-function' const stableConfig = getConfig() - assert.ok(!(Object.hasOwn(stableConfig, 'stableConfig'))) + assert.deepStrictEqual(stableConfig.stableConfig, { + fleetEntries: {}, + localEntries: {}, + warnings: undefined, + }) }) it('should support all extended configs across product areas', () => { @@ -3299,9 +3451,8 @@ apm_configuration_default: llmobs: { mlApp: 'my-llm-app', }, - profiling: { - exporters: 'agent', - }, + DD_PROFILING_EXPORTERS: ['agent'], + profiling: {}, dynamicInstrumentation: { probeFile: '/tmp/probes', }, @@ -3334,7 +3485,7 @@ apm_configuration_default: type: 'local_install', }, cloudPayloadTagging: { - requestsEnabled: true, + request: [], maxDepth: 5, }, }) @@ -3386,8 +3537,8 @@ rules: type: 'fleet_install', }, cloudPayloadTagging: { - requestsEnabled: false, - responsesEnabled: true, + request: undefined, + response: [], maxDepth: 15, }, }) @@ -3512,7 +3663,6 @@ rules: process.env.NX_TASK_TARGET_PROJECT = 'my-nx-project' pkg.name = 'default-service' - reloadLoggerAndConfig() const config = getConfig() @@ -3534,7 +3684,6 @@ rules: } pkg.name = 'default-service' - reloadLoggerAndConfig() const config = getConfig() @@ -3547,7 +3696,6 @@ rules: delete process.env.DD_ENABLE_NX_SERVICE_NAME delete process.env.DD_SERVICE pkg.name = 'default-service' - reloadLoggerAndConfig() getConfig() @@ -3568,7 +3716,6 @@ rules: process.env.DD_ENABLE_NX_SERVICE_NAME = 'true' delete process.env.DD_SERVICE pkg.name = 'default-service' - reloadLoggerAndConfig() getConfig() @@ -3579,7 +3726,6 @@ rules: process.env.NX_TASK_TARGET_PROJECT = 'my-nx-project' process.env.DD_SERVICE = 'explicit-service' delete process.env.DD_ENABLE_NX_SERVICE_NAME - reloadLoggerAndConfig() getConfig() @@ -3708,49 +3854,51 @@ rules: it('should map dynamic_instrumentation_enabled to dynamicInstrumentation.enabled', () => { const config = getConfig() assert.strictEqual(config.dynamicInstrumentation.enabled, false) - config.setRemoteConfig({ dynamic_instrumentation_enabled: true }) + config.setRemoteConfig({ 'dynamicInstrumentation.enabled': true }) assert.strictEqual(config.dynamicInstrumentation.enabled, true) }) it('should map code_origin_enabled to codeOriginForSpans.enabled', () => { const config = getConfig() assert.strictEqual(config.codeOriginForSpans.enabled, true) - config.setRemoteConfig({ code_origin_enabled: false }) + config.setRemoteConfig({ 'codeOriginForSpans.enabled': false }) assert.strictEqual(config.codeOriginForSpans.enabled, false) }) it('should map tracing_sampling_rate to sampleRate', () => { const config = getConfig() assert.strictEqual(config.sampleRate, undefined) - config.setRemoteConfig({ tracing_sampling_rate: 0.5 }) + config.setRemoteConfig({ sampleRate: 0.5 }) assert.strictEqual(config.sampleRate, 0.5) }) it('should map log_injection_enabled to logInjection', () => { const config = getConfig() assert.strictEqual(config.logInjection, true) - config.setRemoteConfig({ log_injection_enabled: false }) + config.setRemoteConfig({ logInjection: false }) assert.strictEqual(config.logInjection, false) }) it('should map tracing_enabled to tracing', () => { - const config = getConfig() + // Tracing is not exposed as programmatic option and will be ignored. + const config = getConfig({ tracing: false }) assert.strictEqual(config.tracing, true) - config.setRemoteConfig({ tracing_enabled: false }) + config.setRemoteConfig({ tracing: false }) assert.strictEqual(config.tracing, false) }) - it('should map tracing_sampling_rules to sampler.rules', () => { + it('should map tracing_sampling_rules to samplingRules', () => { const config = getConfig() assert.deepStrictEqual(config.sampler.rules, []) - config.setRemoteConfig({ tracing_sampling_rules: [{ sample_rate: 0.5 }] }) + config.setRemoteConfig({ samplingRules: [{ sample_rate: 0.5 }] }) + assert.deepStrictEqual(config.samplingRules, [{ sampleRate: 0.5 }]) assert.deepStrictEqual(config.sampler.rules, [{ sampleRate: 0.5 }]) }) it('should map tracing_header_tags to headerTags', () => { - const config = getConfig({ headerTags: ['foo:bar'] }) + const config = getConfig({ headerTags: ['foo :bar'] }) assert.deepStrictEqual(config.headerTags, ['foo:bar']) - config.setRemoteConfig({ tracing_header_tags: [{ header: 'x-custom-header', tag_name: 'custom.tag' }] }) + config.setRemoteConfig({ headerTags: ['x-custom-header:custom.tag'] }) assert.deepStrictEqual(config.headerTags, [ // TODO: There's an unrelated bug in the tracer resulting in headerTags not being merged. // 'foo:bar', @@ -3762,7 +3910,7 @@ rules: const config = getConfig({ tags: { foo: 'bar' } }) assertObjectContains(config.tags, { foo: 'bar' }) assert.strictEqual(config.tags.team, undefined) - config.setRemoteConfig({ tracing_tags: ['team:backend'] }) + config.setRemoteConfig({ tags: { team: 'backend' } }) assertObjectContains(config.tags, { // TODO: There's an unrelated bug in the tracer resulting in tags not being merged. // foo: 'bar', @@ -3775,7 +3923,13 @@ rules: it('should clear RC fields when setRemoteConfig is called with null', () => { const config = getConfig({ logInjection: true, sampleRate: 0.5 }) - config.setRemoteConfig({ tracing_enabled: false }) + assertObjectContains(config, { + tracing: true, + logInjection: true, + sampleRate: 0.5, + }) + + config.setRemoteConfig({ tracing: false }) assertObjectContains(config, { tracing: false, @@ -3794,25 +3948,133 @@ rules: it('should ignore null values', () => { const config = getConfig({ sampleRate: 0.5 }) - config.setRemoteConfig({ tracing_sampling_rate: null }) + config.setRemoteConfig({ sampleRate: null }) assert.strictEqual(config.sampleRate, 0.5) }) it('should treat null values as unset', () => { - const config = getConfig({ sampleRate: 0.5 }) - config.setRemoteConfig({ tracing_sampling_rate: 0.8 }) + const config = getConfig({ sampleRate: 0.5, tracing: true }) + assert.strictEqual(config.sampleRate, 0.5) + assert.strictEqual(config.tracing, true) + config.setRemoteConfig({ sampleRate: 0.8, tracing: false }) assert.strictEqual(config.sampleRate, 0.8) - config.setRemoteConfig({ tracing_sampling_rate: null }) + assert.strictEqual(config.tracing, false) + assert.strictEqual(config.logInjection, true) + config.setRemoteConfig({ logInjection: false }) assert.strictEqual(config.sampleRate, 0.5) + assert.strictEqual(config.tracing, true) + assert.strictEqual(config.logInjection, false) + }) + + it('should restore tracked origins when an individual RC option falls back to code', () => { + const config = getConfig({ sampleRate: 0.5, logInjection: true }) + + updateConfig.resetHistory() + + config.setRemoteConfig({ + sampleRate: 0.8, + logInjection: false, + }) + + assert.strictEqual(config.getOrigin('sampleRate'), 'remote_config') + assert.strictEqual(config.getOrigin('logInjection'), 'remote_config') + + config.setRemoteConfig({ + logInjection: false, + }) + + assert.strictEqual(config.sampleRate, 0.5) + assert.strictEqual(config.getOrigin('sampleRate'), 'code') + assert.strictEqual(config.getOrigin('logInjection'), 'remote_config') + }) + + it('should update telemetry when an individual RC option falls back to a previous source', () => { + const config = getConfig({ sampleRate: 0.5, logInjection: true }) + + updateConfig.resetHistory() + + config.setRemoteConfig({ + sampleRate: 0.8, + logInjection: false, + }) + config.setRemoteConfig({ + logInjection: false, + }) + + sinon.assert.calledTwice(updateConfig) + + const telemetry = updateConfig.getCall(1).args[0] + + assertObjectContains(telemetry.sort((a, b) => a.seq_id - b.seq_id), [ + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.8, origin: 'remote_config' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.5, origin: 'code' }, + { name: 'DD_LOGS_INJECTION', value: true, origin: 'code' }, + { name: 'DD_LOGS_INJECTION', value: false, origin: 'remote_config' }, + ]) + }) + + it('should restore default origins when an individual RC option falls back to defaults', () => { + const config = getConfig() + + updateConfig.resetHistory() + + config.setRemoteConfig({ + tracing: false, + sampleRate: 0.8, + }) + + assert.strictEqual(config.getOrigin('tracing'), 'remote_config') + assert.strictEqual(config.getOrigin('sampleRate'), 'remote_config') + + config.setRemoteConfig({ + sampleRate: 0.8, + }) + + assert.strictEqual(config.tracing, true) + assert.strictEqual(config.sampleRate, 0.8) + assert.strictEqual(config.getOrigin('tracing'), 'default') + assert.strictEqual(config.getOrigin('sampleRate'), 'remote_config') + }) + + it('should update telemetry when an individual RC option falls back to defaults', () => { + const config = getConfig() + + updateConfig.resetHistory() + + config.setRemoteConfig({ + tracing: false, + sampleRate: 0.1, + }) + config.setRemoteConfig({ + sampleRate: 0.8, + }) + + sinon.assert.calledTwice(updateConfig) + + const telemetry = updateConfig.getCall(1).args[0] + + assertObjectContains(telemetry.sort((a, b) => a.seq_id - b.seq_id), [ + { name: 'DD_TRACE_ENABLED', value: false, origin: 'remote_config' }, + { name: 'DD_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: null, origin: 'default' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.8, origin: 'remote_config' }, + ]) }) it('should replace all RC fields with each update', () => { const config = getConfig() + assertObjectContains(config, { + tracing: true, + logInjection: true, + }) + + assert.strictEqual(config.sampleRate, undefined) + config.setRemoteConfig({ - tracing_enabled: true, - log_injection_enabled: false, - tracing_sampling_rate: 0.8, + tracing: true, + logInjection: false, + sampleRate: 0.8, }) assertObjectContains(config, { @@ -3822,7 +4084,7 @@ rules: }) config.setRemoteConfig({ - tracing_enabled: false, + tracing: false, }) assertObjectContains(config, { diff --git a/packages/dd-trace/test/config/remote_config.spec.js b/packages/dd-trace/test/config/remote_config.spec.js index 3ef775c80b7..1e047f1f95d 100644 --- a/packages/dd-trace/test/config/remote_config.spec.js +++ b/packages/dd-trace/test/config/remote_config.spec.js @@ -150,7 +150,7 @@ describe('Tracing Remote Config', () => { // Service config should win const lastCall = config.setRemoteConfig.lastCall - sinon.assert.match(lastCall.args[0], { tracing_sampling_rate: 0.8 }) + sinon.assert.match(lastCall.args[0], { sampleRate: 0.8 }) }) it('should handle config removal', () => { @@ -181,7 +181,7 @@ describe('Tracing Remote Config', () => { // Lower priority should now apply const lastCall = config.setRemoteConfig.lastCall - sinon.assert.match(lastCall.args[0], { tracing_sampling_rate: 0.5 }) + sinon.assert.match(lastCall.args[0], { sampleRate: 0.5 }) }) it('should filter configs by service/env', () => { @@ -232,8 +232,8 @@ describe('Tracing Remote Config', () => { // Service config sampling rate should win, but log_injection should come from org const lastCall = config.setRemoteConfig.lastCall sinon.assert.match(lastCall.args[0], { - tracing_sampling_rate: 0.8, - log_injection_enabled: true, + sampleRate: 0.8, + logInjection: true, }) }) diff --git a/packages/dd-trace/test/dogstatsd.spec.js b/packages/dd-trace/test/dogstatsd.spec.js index 7b90f2586c8..184590e25e0 100644 --- a/packages/dd-trace/test/dogstatsd.spec.js +++ b/packages/dd-trace/test/dogstatsd.spec.js @@ -132,7 +132,7 @@ describe('dogstatsd', () => { assert.strictEqual(udp4.send.firstCall.args[0].toString(), 'test.avg:10|g\n') assert.strictEqual(udp4.send.firstCall.args[1], 0) assert.strictEqual(udp4.send.firstCall.args[2], 14) - assert.strictEqual(udp4.send.firstCall.args[3], '8125') + assert.strictEqual(udp4.send.firstCall.args[3], 8125) assert.strictEqual(udp4.send.firstCall.args[4], '127.0.0.1') }) @@ -146,7 +146,7 @@ describe('dogstatsd', () => { assert.strictEqual(udp4.send.firstCall.args[0].toString(), 'test.histogram:10|h\n') assert.strictEqual(udp4.send.firstCall.args[1], 0) assert.strictEqual(udp4.send.firstCall.args[2], 20) - assert.strictEqual(udp4.send.firstCall.args[3], '8125') + assert.strictEqual(udp4.send.firstCall.args[3], 8125) assert.strictEqual(udp4.send.firstCall.args[4], '127.0.0.1') }) diff --git a/packages/dd-trace/test/helpers/config.js b/packages/dd-trace/test/helpers/config.js index 0e83e73f4be..aa325ea58c9 100644 --- a/packages/dd-trace/test/helpers/config.js +++ b/packages/dd-trace/test/helpers/config.js @@ -2,11 +2,13 @@ const proxyquire = require('proxyquire') -// Resolve the config module from within the test package -const CONFIG_PATH = require.resolve('../../src/config') - function getConfigFresh (options) { - return proxyquire.noPreserveCache()(CONFIG_PATH, {})(options) + const helper = proxyquire.noPreserveCache()('../../src/config/helper.js', {}) + const defaults = proxyquire.noPreserveCache()('../../src/config/defaults.js', {}) + return proxyquire.noPreserveCache()('../../src/config', { + './defaults': defaults, + './helper': helper, + })(options) } module.exports = { diff --git a/packages/dd-trace/test/llmobs/sdk/index.spec.js b/packages/dd-trace/test/llmobs/sdk/index.spec.js index d2c8ce7a586..ceeec2dd691 100644 --- a/packages/dd-trace/test/llmobs/sdk/index.spec.js +++ b/packages/dd-trace/test/llmobs/sdk/index.spec.js @@ -150,7 +150,9 @@ describe('sdk', () => { } const config = getConfigFresh({ - llmobs: {}, + llmobs: { + agentlessEnabled: false, + }, }) const enabledLLMObs = new LLMObsSDK(tracer._tracer, llmobsModule, config) diff --git a/packages/dd-trace/test/log.spec.js b/packages/dd-trace/test/log.spec.js index e7ce165e6dd..93bba4b1875 100644 --- a/packages/dd-trace/test/log.spec.js +++ b/packages/dd-trace/test/log.spec.js @@ -15,6 +15,41 @@ describe('log', () => { describe('config', () => { let env + /** + * @param {{ + * fleetEntries?: Record, + * localEntries?: Record, + * isServerless?: boolean + * }} [options] + */ + const reloadLog = (options = {}) => { + const { fleetEntries, localEntries, isServerless = true } = options + const logWriter = { + configure: sinon.spy(), + } + const configHelper = isServerless + ? proxyquire.noPreserveCache()('../src/config/helper', { + '../serverless': { IS_SERVERLESS: true }, + }) + : proxyquire.noPreserveCache()('../src/config/helper', { + '../serverless': { IS_SERVERLESS: false }, + './stable': function StableConfigStub () { + this.localEntries = localEntries + this.fleetEntries = fleetEntries + this.warnings = [] + }, + }) + + const log = proxyquire.noPreserveCache()('../src/log', { + '../config/helper': configHelper, + './writer': logWriter, + }) + + logWriter.configure.resetHistory() + + return { log, logWriter } + } + beforeEach(() => { env = process.env process.env = {} @@ -24,110 +59,176 @@ describe('log', () => { process.env = env }) - it('should have getConfig function', () => { - const log = require('../src/log') - assert.strictEqual(typeof log.getConfig, 'function') + it('should have configure function', () => { + const { log } = reloadLog() + assert.strictEqual(typeof log.configure, 'function') }) - it('should be configured with default config if no environment variables are set', () => { - const log = require('../src/log') - assert.deepStrictEqual(log.getConfig(), { - enabled: false, - logger: undefined, - logLevel: 'debug', - }) + it('should configure with default config if no environment variables are set', () => { + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), false) + sinon.assert.calledOnceWithExactly(logWriter.configure, false, 'debug', undefined) }) - it('should not be possbile to mutate config object returned by getConfig', () => { - const log = require('../src/log') - const config = log.getConfig() - config.enabled = 1 - config.logger = 1 - config.logLevel = 1 - assert.deepStrictEqual(log.getConfig(), { - enabled: false, - logger: undefined, - logLevel: 'debug', - }) + it('should pass the logger option to the writer', () => { + const { log, logWriter } = reloadLog() + const logger = { + debug: () => {}, + error: () => {}, + } + + log.configure({ logger }) + + sinon.assert.calledOnceWithExactly(logWriter.configure, false, 'debug', logger) }) it('should initialize from environment variables with DD env vars taking precedence OTEL env vars', () => { process.env.DD_TRACE_LOG_LEVEL = 'error' process.env.DD_TRACE_DEBUG = 'false' process.env.OTEL_LOG_LEVEL = 'debug' - const config = proxyquire('../src/log', {}).getConfig() - assert.strictEqual(config.enabled, false) - assert.strictEqual(config.logLevel, 'error') + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), false) + sinon.assert.calledOnceWithExactly(logWriter.configure, false, 'error', undefined) }) it('should initialize with OTEL environment variables when DD env vars are not set', () => { process.env.OTEL_LOG_LEVEL = 'debug' - const config = proxyquire('../src/log', {}).getConfig() - assert.strictEqual(config.enabled, true) - assert.strictEqual(config.logLevel, 'debug') + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), true) + sinon.assert.calledOnceWithExactly(logWriter.configure, true, 'debug', undefined) }) it('should initialize from environment variables', () => { process.env.DD_TRACE_DEBUG = 'true' - const config = proxyquire('../src/log', {}).getConfig() - assert.strictEqual(config.enabled, true) + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), true) + sinon.assert.calledOnceWithExactly(logWriter.configure, true, 'debug', undefined) }) it('should read case-insensitive booleans from environment variables', () => { process.env.DD_TRACE_DEBUG = 'TRUE' - const config = proxyquire('../src/log', {}).getConfig() - assert.strictEqual(config.enabled, true) + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), true) + sinon.assert.calledOnceWithExactly(logWriter.configure, true, 'debug', undefined) }) - describe('isEnabled', () => { + describe('configure', () => { it('prefers fleetStableConfigValue over env and local', () => { - const log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled('true', 'false'), true) - assert.strictEqual(log.isEnabled('false', 'true'), false) + process.env.DD_TRACE_DEBUG = 'false' + + let loaded = reloadLog({ + fleetEntries: { DD_TRACE_DEBUG: 'true' }, + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'false' }, + }) + assert.strictEqual(loaded.log.configure({}), true) + + process.env.DD_TRACE_DEBUG = 'true' + + loaded = reloadLog({ + fleetEntries: { DD_TRACE_DEBUG: 'false' }, + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'true' }, + }) + assert.strictEqual(loaded.log.configure({}), false) }) it('uses DD_TRACE_DEBUG when fleetStableConfigValue is not set', () => { process.env.DD_TRACE_DEBUG = 'true' - let log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, 'false'), true) + let loaded = reloadLog({ + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'false' }, + }) + assert.strictEqual(loaded.log.configure({}), true) process.env.DD_TRACE_DEBUG = 'false' - log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, 'true'), false) + loaded = reloadLog({ + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'true' }, + }) + assert.strictEqual(loaded.log.configure({}), false) }) it('uses OTEL_LOG_LEVEL=debug when DD vars are not set', () => { process.env.OTEL_LOG_LEVEL = 'debug' - let log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, undefined), true) + let loaded = reloadLog({ + isServerless: false, + localEntries: { OTEL_LOG_LEVEL: 'info' }, + }) + assert.strictEqual(loaded.log.configure({}), true) process.env.OTEL_LOG_LEVEL = 'info' - log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, undefined), false) + loaded = reloadLog({ + isServerless: false, + localEntries: { OTEL_LOG_LEVEL: 'debug' }, + }) + assert.strictEqual(loaded.log.configure({}), false) }) it('falls back to localStableConfigValue', () => { - const log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, 'false'), false) - assert.strictEqual(log.isEnabled(undefined, 'true'), true) + let loaded = reloadLog({ + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'false' }, + }) + assert.strictEqual(loaded.log.configure({}), false) + + loaded = reloadLog({ + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'true' }, + }) + assert.strictEqual(loaded.log.configure({}), true) }) it('falls back to internal config.enabled when nothing else provided', () => { - const log = proxyquire('../src/log', {}) - log.toggle(true) - assert.strictEqual(log.isEnabled(), true) - log.toggle(false) - assert.strictEqual(log.isEnabled(), false) + const { log, logWriter } = reloadLog({ + fleetEntries: {}, + isServerless: false, + localEntries: {}, + }) + + process.env.OTEL_LOG_LEVEL = 'debug' + assert.strictEqual(log.configure({}), true) + + process.env = {} + assert.strictEqual(log.configure({}), true) + sinon.assert.calledWithExactly(logWriter.configure.secondCall, true, 'debug', undefined) + }) + + it('falls back to the previous log level when no override is provided', () => { + const { log, logWriter } = reloadLog() + + log.configure({ logLevel: 'error' }) + log.configure({}) + + sinon.assert.calledWithExactly(logWriter.configure.secondCall, false, 'error', undefined) }) }) }) describe('general usage', () => { + let env let log let logger let error + function loadConfiguredLog (options = {}, envEntries = {}) { + process.env = { + DD_TRACE_DEBUG: 'true', + ...envEntries, + } + log = proxyquire.noPreserveCache()('../src/log', {}) + log.configure(options) + return log + } + beforeEach(() => { + env = process.env + process.env = {} sinon.stub(console, 'info') sinon.stub(console, 'error') sinon.stub(console, 'warn') @@ -140,12 +241,11 @@ describe('log', () => { error: sinon.spy(), } - log = proxyquire('../src/log', {}) - log.toggle(true) + loadConfiguredLog() }) afterEach(() => { - log.reset() + process.env = env console.info.restore() console.error.restore() console.warn.restore() @@ -153,12 +253,11 @@ describe('log', () => { }) it('should support chaining', () => { + loadConfiguredLog({ logger }) + log - .use(logger) - .toggle(true) .error('error') .debug('debug') - .reset() }) it('should call the logger in a noop context', () => { @@ -167,7 +266,8 @@ describe('log', () => { assert.strictEqual(storage('legacy').getStore().noop, true) } - log.use(logger).debug('debug') + loadConfiguredLog({ logger }) + log.debug('debug') }) describe('debug', () => { @@ -198,7 +298,7 @@ describe('log', () => { } } - log.toggle(true, 'trace') + loadConfiguredLog({ logLevel: 'trace' }) log.trace('argument', { hello: 'world' }, new Foo()) sinon.assert.calledOnce(console.debug) @@ -310,9 +410,9 @@ describe('log', () => { }) }) - describe('toggle', () => { - it('should disable the logger', () => { - log.toggle(false) + describe('configure', () => { + it('should disable the logger when DD_TRACE_DEBUG is false', () => { + loadConfiguredLog({}, { DD_TRACE_DEBUG: 'false' }) log.debug('debug') log.error(error) @@ -320,9 +420,8 @@ describe('log', () => { sinon.assert.notCalled(console.error) }) - it('should enable the logger', () => { - log.toggle(false) - log.toggle(true) + it('should enable the logger when OTEL_LOG_LEVEL is debug', () => { + loadConfiguredLog({}, { OTEL_LOG_LEVEL: 'debug' }) log.debug('debug') log.error(error) @@ -330,8 +429,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should set minimum log level when enabled with logLevel argument set to a valid string', () => { - log.toggle(true, 'error') + it('should set minimum log level when configured with a valid string', () => { + loadConfiguredLog({ logLevel: 'error' }) log.debug('debug') log.error(error) @@ -339,8 +438,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should set default log level when enabled with logLevel argument set to an invalid string', () => { - log.toggle(true, 'not a real log level') + it('should set default log level when configured with an invalid string', () => { + loadConfiguredLog({ logLevel: 'not a real log level' }) log.debug('debug') log.error(error) @@ -348,8 +447,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should set min log level when enabled w/logLevel arg set to valid string w/wrong case or whitespace', () => { - log.toggle(true, ' ErRoR ') + it('should set min log level when configured with valid string with wrong case or whitespace', () => { + loadConfiguredLog({ logLevel: ' ErRoR ' }) log.debug('debug') log.error(error) @@ -358,7 +457,7 @@ describe('log', () => { }) it('should log all log levels greater than or equal to minimum log level', () => { - log.toggle(true, 'debug') + loadConfiguredLog({ logLevel: 'debug' }) log.debug('debug') log.error(error) @@ -366,8 +465,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should enable default log level when enabled with logLevel argument set to invalid input', () => { - log.toggle(true, ['trace', 'info', 'eror']) + it('should enable default log level when configured with invalid input', () => { + loadConfiguredLog({ logLevel: ['trace', 'info', 'eror'] }) log.debug('debug') log.error(error) @@ -375,8 +474,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should enable default log level when enabled without logLevel argument', () => { - log.toggle(true) + it('should enable default log level when configured without logLevel argument', () => { + loadConfiguredLog() log.debug('debug') log.error(error) @@ -385,9 +484,9 @@ describe('log', () => { }) }) - describe('use', () => { + describe('logger option', () => { it('should set the underlying logger when valid', () => { - log.use(logger) + loadConfiguredLog({ logger }) log.debug('debug') log.error(error) @@ -396,7 +495,7 @@ describe('log', () => { }) it('be a no op with an empty logger', () => { - log.use(null) + loadConfiguredLog({ logger: null }) log.debug('debug') log.error(error) @@ -405,42 +504,7 @@ describe('log', () => { }) it('be a no op with an invalid logger', () => { - log.use('invalid') - log.debug('debug') - log.error(error) - - sinon.assert.calledWith(console.debug, 'debug') - sinon.assert.calledWith(console.error, error) - }) - }) - - describe('reset', () => { - it('should reset the logger', () => { - log.use(logger) - log.reset() - log.toggle(true) - log.debug('debug') - log.error(error) - - sinon.assert.calledWith(console.debug, 'debug') - sinon.assert.calledWith(console.error, error) - }) - - it('should reset the toggle', () => { - log.use(logger) - log.reset() - log.debug('debug') - log.error(error) - - sinon.assert.notCalled(console.debug) - sinon.assert.notCalled(console.error) - }) - - it('should reset the minimum log level to defaults', () => { - log.use(logger) - log.toggle(true, 'error') - log.reset() - log.toggle(true) + loadConfiguredLog({ logger: 'invalid' }) log.debug('debug') log.error(error) @@ -471,11 +535,7 @@ describe('log', () => { let logWriter beforeEach(() => { - logWriter = require('../src/log/writer') - }) - - afterEach(() => { - logWriter.reset() + logWriter = proxyquire.noPreserveCache()('../src/log/writer', {}) }) describe('error', () => { @@ -486,7 +546,7 @@ describe('log', () => { }) it('should call console.error no matter enable flag value', () => { - logWriter.toggle(false) + logWriter.configure(false) logWriter.error(error) sinon.assert.calledOnceWithExactly(console.error, error) @@ -501,14 +561,14 @@ describe('log', () => { }) it('should call logger debug if warn is not provided', () => { - logWriter.use(logger) + logWriter.configure(false, undefined, logger) logWriter.warn('warn') sinon.assert.calledOnceWithExactly(logger.debug, 'warn') }) it('should call console.warn no matter enable flag value', () => { - logWriter.toggle(false) + logWriter.configure(false) logWriter.warn('warn') sinon.assert.calledOnceWithExactly(console.warn, 'warn') @@ -523,14 +583,14 @@ describe('log', () => { }) it('should call logger debug if info is not provided', () => { - logWriter.use(logger) + logWriter.configure(false, undefined, logger) logWriter.info('info') sinon.assert.calledOnceWithExactly(logger.debug, 'info') }) it('should call console.info no matter enable flag value', () => { - logWriter.toggle(false) + logWriter.configure(false) logWriter.info('info') sinon.assert.calledOnceWithExactly(console.info, 'info') @@ -545,7 +605,7 @@ describe('log', () => { }) it('should call console.debug no matter enable flag value', () => { - logWriter.toggle(false) + logWriter.configure(false) logWriter.debug('debug') sinon.assert.calledOnceWithExactly(console.debug, 'debug') diff --git a/packages/dd-trace/test/opentelemetry/metrics.spec.js b/packages/dd-trace/test/opentelemetry/metrics.spec.js index 5e3b82ad449..ab09405033a 100644 --- a/packages/dd-trace/test/opentelemetry/metrics.spec.js +++ b/packages/dd-trace/test/opentelemetry/metrics.spec.js @@ -29,7 +29,15 @@ describe('OpenTelemetry Meter Provider', () => { process.env.OTEL_METRIC_EXPORT_INTERVAL = '100' process.env.OTEL_EXPORTER_OTLP_METRICS_TIMEOUT = '5000' } - Object.assign(process.env, envOverrides) + if (envOverrides) { + for (const [key, value] of Object.entries(envOverrides)) { + if (value === undefined) { + delete process.env[key] + } else { + process.env[key] = value + } + } + } const dogstatsd = proxyquire.noPreserveCache()('../../src/dogstatsd', {}) @@ -673,9 +681,9 @@ describe('OpenTelemetry Meter Provider', () => { meter.removeBatchObservableCallback(() => {}, []) assert.strictEqual(warnSpy.callCount, 2) - assert.strictEqual(warnSpy.firstCall.args[0], 'addBatchObservableCallback is not implemented') - assert.strictEqual(warnSpy.secondCall.args[0], 'removeBatchObservableCallback is not implemented') + warnSpy.getCalls().some(call => format(...call.args) === 'addBatchObservableCallback is not implemented') + warnSpy.getCalls().some(call => format(...call.args) === 'removeBatchObservableCallback is not implemented') warnSpy.restore() }) }) @@ -786,7 +794,7 @@ describe('OpenTelemetry Meter Provider', () => { }) }) - describe('NonNegInt Configuration Validation', () => { + describe('Allowed Integer Configuration Validation', () => { let log, warnSpy beforeEach(() => { @@ -798,6 +806,10 @@ describe('OpenTelemetry Meter Provider', () => { warnSpy.restore() }) + function hasWarning (message) { + return warnSpy.getCalls().some(call => format(...call.args).includes(message)) + } + it('rejects zero for metrics configs with allowZero=false', () => { setupTracer({ OTEL_BSP_SCHEDULE_DELAY: '0', @@ -808,16 +820,16 @@ describe('OpenTelemetry Meter Provider', () => { OTEL_METRIC_EXPORT_TIMEOUT: '0', OTEL_BSP_MAX_EXPORT_BATCH_SIZE: '0', }, false) - assert(warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_BSP_SCHEDULE_DELAY/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_METRIC_EXPORT_INTERVAL/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_BSP_MAX_EXPORT_BATCH_SIZE/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_BSP_MAX_QUEUE_SIZE/.test(format(...call.args)))) - assert(!warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_EXPORTER_OTLP_TIMEOUT/.test(format(...call.args)))) - assert(!warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_METRIC_EXPORT_TIMEOUT/.test(format(...call.args)))) - assert(!warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT/.test(format(...call.args)))) + assert(hasWarning('Invalid value: 0 for OTEL_BSP_SCHEDULE_DELAY')) + assert(hasWarning('Invalid value: 0 for OTEL_METRIC_EXPORT_INTERVAL')) + assert(hasWarning('Invalid value: 0 for OTEL_BSP_MAX_QUEUE_SIZE')) + assert(hasWarning('Invalid value: 0 for OTEL_EXPORTER_OTLP_TIMEOUT')) + assert(hasWarning('Invalid value: 0 for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT')) + assert(hasWarning('Invalid value: 0 for OTEL_METRIC_EXPORT_TIMEOUT')) + assert(hasWarning('Invalid value: 0 for OTEL_BSP_MAX_EXPORT_BATCH_SIZE')) }) - it('rejects negative values for all configs', () => { + it('rejects negative values for non-negative integer configs', () => { setupTracer({ OTEL_EXPORTER_OTLP_TIMEOUT: '-1', OTEL_EXPORTER_OTLP_LOGS_TIMEOUT: '-1', @@ -828,17 +840,17 @@ describe('OpenTelemetry Meter Provider', () => { OTEL_BSP_MAX_EXPORT_BATCH_SIZE: '-1', OTEL_BSP_MAX_QUEUE_SIZE: '-1', }, false) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_EXPORTER_OTLP_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_EXPORTER_OTLP_LOGS_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_METRIC_EXPORT_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_METRIC_EXPORT_INTERVAL/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_BSP_SCHEDULE_DELAY/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_BSP_MAX_EXPORT_BATCH_SIZE/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_BSP_MAX_QUEUE_SIZE/.test(format(...call.args)))) + assert(hasWarning('Invalid value: -1 for OTEL_EXPORTER_OTLP_TIMEOUT')) + assert(hasWarning('Invalid value: -1 for OTEL_EXPORTER_OTLP_LOGS_TIMEOUT')) + assert(hasWarning('Invalid value: -1 for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT')) + assert(hasWarning('Invalid value: -1 for OTEL_METRIC_EXPORT_TIMEOUT')) + assert(hasWarning('Invalid value: -1 for OTEL_METRIC_EXPORT_INTERVAL')) + assert(hasWarning('Invalid value: -1 for OTEL_BSP_SCHEDULE_DELAY')) + assert(hasWarning('Invalid value: -1 for OTEL_BSP_MAX_EXPORT_BATCH_SIZE')) + assert(hasWarning('Invalid value: -1 for OTEL_BSP_MAX_QUEUE_SIZE')) }) - it('rejects values that are not numbers for all configs', () => { + it('rejects values that are not numbers for integer-based configs', () => { setupTracer({ OTEL_EXPORTER_OTLP_TIMEOUT: 'not a number', OTEL_EXPORTER_OTLP_LOGS_TIMEOUT: 'invalid', @@ -849,14 +861,14 @@ describe('OpenTelemetry Meter Provider', () => { OTEL_BSP_MAX_EXPORT_BATCH_SIZE: 'abc', OTEL_BSP_MAX_QUEUE_SIZE: 'xyz', }, false) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_EXPORTER_OTLP_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_EXPORTER_OTLP_LOGS_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_METRIC_EXPORT_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_METRIC_EXPORT_INTERVAL/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_BSP_SCHEDULE_DELAY/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_BSP_MAX_EXPORT_BATCH_SIZE/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_BSP_MAX_QUEUE_SIZE/.test(format(...call.args)))) + assert(hasWarning("Invalid INT input: 'not a number' for OTEL_EXPORTER_OTLP_TIMEOUT")) + assert(hasWarning("Invalid INT input: 'invalid' for OTEL_EXPORTER_OTLP_LOGS_TIMEOUT")) + assert(hasWarning("Invalid INT input: 'hi sir' for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT")) + assert(hasWarning("Invalid INT input: '@weeeeee' for OTEL_METRIC_EXPORT_TIMEOUT")) + assert(hasWarning("Invalid INT input: 'python!' for OTEL_METRIC_EXPORT_INTERVAL")) + assert(hasWarning("Invalid INT input: 'NaN' for OTEL_BSP_SCHEDULE_DELAY")) + assert(hasWarning("Invalid INT input: 'abc' for OTEL_BSP_MAX_EXPORT_BATCH_SIZE")) + assert(hasWarning("Invalid INT input: 'xyz' for OTEL_BSP_MAX_QUEUE_SIZE")) }) }) diff --git a/packages/dd-trace/test/opentracing/propagation/text_map.spec.js b/packages/dd-trace/test/opentracing/propagation/text_map.spec.js index 5cf016ffb55..45683a7c6a4 100644 --- a/packages/dd-trace/test/opentracing/propagation/text_map.spec.js +++ b/packages/dd-trace/test/opentracing/propagation/text_map.spec.js @@ -660,15 +660,15 @@ describe('TextMapPropagator', () => { // should not add baggage when key list is empty config = getConfigFresh({ - baggageTagKeys: '', + baggageTagKeys: [], }) propagator = new TextMapPropagator(config) const spanContextC = propagator.extract(carrier) assert.deepStrictEqual(spanContextC._trace.tags, {}) - // should not add baggage when key list is empty + // should not add baggage when key list does not contain the key config = getConfigFresh({ - baggageTagKeys: 'customKey', + baggageTagKeys: ['customKey'], }) propagator = new TextMapPropagator(config) carrier = { @@ -683,7 +683,7 @@ describe('TextMapPropagator', () => { // should add all baggage to span tags config = getConfigFresh({ - baggageTagKeys: '*', + baggageTagKeys: ['*'], }) propagator = new TextMapPropagator(config) carrier = { @@ -1215,6 +1215,76 @@ describe('TextMapPropagator', () => { }) }) + describe('with B3 propagation from DD_TRACE_PROPAGATION_STYLE', () => { + beforeEach(() => { + config.tracePropagationStyle.extract = ['b3'] + config.getOrigin = sinon.stub().withArgs('tracePropagationStyle.extract').returns('env_var') + + delete textMap['x-datadog-trace-id'] + delete textMap['x-datadog-parent-id'] + + TextMapPropagator = proxyquire('../../../src/opentracing/propagation/text_map', { + '../../config/helper': { + getConfiguredEnvName: sinon.stub().withArgs('DD_TRACE_PROPAGATION_STYLE') + .returns('DD_TRACE_PROPAGATION_STYLE'), + }, + '../../log': log, + '../../telemetry/metrics': telemetryMetrics, + }) + propagator = new TextMapPropagator(config) + }) + + it('should extract B3 as multiple headers', () => { + textMap['x-b3-traceid'] = '0000000000000123' + textMap['x-b3-spanid'] = '0000000000000456' + textMap['x-b3-sampled'] = '1' + + const spanContext = propagator.extract(textMap) + + assert.deepStrictEqual(spanContext, createContext({ + traceId: id('123', 16), + spanId: id('456', 16), + sampling: { + priority: AUTO_KEEP, + }, + })) + }) + }) + + describe('with B3 propagation from OTEL_PROPAGATORS', () => { + beforeEach(() => { + config.tracePropagationStyle.extract = ['b3'] + config.getOrigin = sinon.stub().withArgs('tracePropagationStyle.extract').returns('env_var') + + delete textMap['x-datadog-trace-id'] + delete textMap['x-datadog-parent-id'] + + TextMapPropagator = proxyquire('../../../src/opentracing/propagation/text_map', { + '../../config/helper': { + getConfiguredEnvName: sinon.stub().withArgs('DD_TRACE_PROPAGATION_STYLE') + .returns('OTEL_PROPAGATORS'), + }, + '../../log': log, + '../../telemetry/metrics': telemetryMetrics, + }) + propagator = new TextMapPropagator(config) + }) + + it('should extract B3 as a single header', () => { + textMap.b3 = '0000000000000123-0000000000000456-1' + + const spanContext = propagator.extract(textMap) + + assert.deepStrictEqual(spanContext, createContext({ + traceId: id('123', 16), + spanId: id('456', 16), + sampling: { + priority: AUTO_KEEP, + }, + })) + }) + }) + describe('with B3 propagation as a single header', () => { beforeEach(() => { config.tracePropagationStyle.extract = ['b3 single header'] diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index ead276ba385..60361002fb6 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -9,6 +9,7 @@ const satisfies = require('semifies') const { assertObjectContains } = require('../../../../integration-tests/helpers') require('../setup/core') +const { getConfigFresh } = require('../helpers/config') const { AgentExporter } = require('../../src/profiling/exporters/agent') const { FileExporter } = require('../../src/profiling/exporters/file') const WallProfiler = require('../../src/profiling/profilers/wall') @@ -22,37 +23,12 @@ const oomMonitoringSupported = process.platform !== 'win32' const isAtLeast24 = satisfies(process.versions.node, '>=24.0.0') const zstdOrGzip = isAtLeast24 ? 'zstd' : 'gzip' +/** @typedef {InstanceType<(typeof import('../../src/profiling/config'))['Config']>} ProfilerConfig */ + describe('config', () => { - let Config let env - const nullLogger = { - debug () { }, - info () { }, - warn () { }, - error () { }, - } beforeEach(() => { - const ProfilingConfig = require('../../src/profiling/config').Config - // Wrap the real profiling Config so tests see a valid default URL when none - // is provided, matching what the tracer Config singleton would provide at runtime. - Config = class TestConfig extends ProfilingConfig { - constructor (options = {}) { - const hasAddress = - options.url !== undefined || - options.hostname !== undefined || - options.port !== undefined - - if (hasAddress) { - super(options) - } else { - super({ - url: 'http://127.0.0.1:8126', - ...options, - }) - } - } - } env = process.env process.env = {} }) @@ -61,82 +37,115 @@ describe('config', () => { process.env = env }) + /** + * @param {Record} [tracerOptions] + * @returns {{config: ProfilerConfig, warnings: string[], errors: string[]}} + */ + function getProfilerConfig (tracerOptions) { + process.env.DD_PROFILING_ENABLED = '1' + + const tracerConfig = getConfigFresh(tracerOptions) + + const ProfilingConfig = require('../../src/profiling/config').Config + const config = /** @type {ProfilerConfig} */ (new ProfilingConfig(tracerConfig)) + + return { + config, + warnings: [], + errors: [], + } + } + it('should have the correct defaults', () => { - const config = new Config() + const { config } = getProfilerConfig() assertObjectContains(config, { - service: 'node', flushInterval: 65 * 1000, + activation: 'manual', + v8ProfilerBugWorkaroundEnabled: true, + cpuProfilingEnabled: samplingContextsAvailable, + uploadCompression: { + method: zstdOrGzip, + level: undefined, + }, }) - - assert.deepStrictEqual(config.tags, { - service: 'node', + assert.strictEqual(typeof config.service, 'string') + assert.ok(config.service.length > 0) + assert.strictEqual(typeof config.version, 'string') + assertObjectContains(config.tags, { + service: config.service, + version: config.version, }) - + assert.strictEqual(config.tags.host, undefined) assert.ok(config.logger instanceof ConsoleLogger) - assert.ok(config.exporters[0] instanceof AgentExporter) - assert.ok(config.profilers[0] instanceof SpaceProfiler) - assert.ok(config.profilers[1] instanceof WallProfiler) - assert.strictEqual(config.profilers[1].codeHotspotsEnabled(), samplingContextsAvailable) - assert.strictEqual(config.v8ProfilerBugWorkaroundEnabled, true) - assert.strictEqual(config.cpuProfilingEnabled, samplingContextsAvailable) - assert.strictEqual(config.uploadCompression.method, zstdOrGzip) - assert.strictEqual(config.uploadCompression.level, undefined) + assert.deepStrictEqual( + config.profilers.slice(0, 2).map(profiler => profiler.constructor), + [SpaceProfiler, WallProfiler] + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[1]).codeHotspotsEnabled(), + samplingContextsAvailable + ) + assert.deepStrictEqual(config.exporters.map(exporter => exporter.constructor), [AgentExporter]) }) it('should support configuration options', () => { - const options = { + process.env = { + DD_PROFILING_EXPORTERS: 'agent,file', + DD_PROFILING_PROFILERS: 'space,wall', + DD_PROFILING_CODEHOTSPOTS_ENABLED: '0', + } + + const { config } = getProfilerConfig({ service: 'test', version: '1.2.3-test.0', - logger: nullLogger, - exporters: 'agent,file', - profilers: 'space,wall', url: 'http://localhost:1234/', - codeHotspotsEnabled: false, reportHostname: true, - } - - const config = new Config(options) + }) - assert.strictEqual(config.service, options.service) - assert.strictEqual(typeof config.tags.host, 'string') - assert.strictEqual(config.version, options.version) - assert.ok(typeof config.tags === 'object' && config.tags !== null) + assertObjectContains(config, { + service: 'test', + version: '1.2.3-test.0', + flushInterval: 65 * 1000, + tags: { + service: 'test', + version: '1.2.3-test.0', + }, + }) assert.strictEqual(typeof config.tags.host, 'string') - assert.strictEqual(config.tags.service, options.service) - assert.strictEqual(config.tags.version, options.version) - assert.strictEqual(config.flushInterval, 65 * 1000) - assert.ok(Array.isArray(config.exporters)) - assert.strictEqual(config.exporters.length, 2) - assert.ok(config.exporters[0] instanceof AgentExporter) - assert.strictEqual(config.exporters[0]._url.toString(), options.url) - assert.ok(config.exporters[1] instanceof FileExporter) - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof SpaceProfiler) - assert.ok(config.profilers[1] instanceof WallProfiler) - assert.strictEqual(config.profilers[1].codeHotspotsEnabled(), false) - if (samplingContextsAvailable) { - assert.ok(config.profilers[2] instanceof EventsProfiler) - } + assert.strictEqual(config.exporters[0]._url.toString(), 'http://localhost:1234/') + assert.deepStrictEqual( + config.exporters.map(exporter => exporter.constructor), + [AgentExporter, FileExporter] + ) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [SpaceProfiler, WallProfiler, EventsProfiler] + : [SpaceProfiler, WallProfiler] + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[1]).codeHotspotsEnabled(), + false + ) }) it('should not include host tag when reportHostname is false', () => { - const config = new Config({ reportHostname: false }) + const { config } = getProfilerConfig({ reportHostname: false }) assert.strictEqual(config.tags.host, undefined) assert.ok(!('host' in config.tags)) }) it('should not include host tag when reportHostname is not set', () => { - const config = new Config({}) + const { config } = getProfilerConfig() assert.strictEqual(config.tags.host, undefined) assert.ok(!('host' in config.tags)) }) it('should include host tag when reportHostname is true', () => { - const config = new Config({ reportHostname: true }) + const { config } = getProfilerConfig({ reportHostname: true }) assert.strictEqual(typeof config.tags.host, 'string') assert.ok(config.tags.host.length > 0) @@ -144,41 +153,38 @@ describe('config', () => { }) it('should filter out invalid profilers', () => { + process.env = { + DD_PROFILING_PROFILERS: 'nope,also_nope', + } + + /** @type {string[]} */ const errors = [] - const options = { - logger: { - debug () {}, - info () {}, - warn () {}, - error (error) { - errors.push(error) - }, + const logger = { + debug () {}, + info () {}, + warn () {}, + error (message) { + errors.push(String(message)) }, - profilers: 'nope,also_nope', } - const config = new Config(options) - - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 0) + const { config } = getProfilerConfig({ logger }) - assert.strictEqual(errors.length, 2) - assert.strictEqual(errors[0], 'Unknown profiler "nope"') - assert.strictEqual(errors[1], 'Unknown profiler "also_nope"') + assert.deepStrictEqual(config.profilers.map(profiler => profiler.constructor), []) + assert.deepStrictEqual(errors, [ + 'Unknown profiler "nope"', + 'Unknown profiler "also_nope"', + ]) }) it('should support profiler config with empty DD_PROFILING_PROFILERS', () => { process.env = { DD_PROFILING_PROFILERS: '', } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 0) + assert.deepStrictEqual(config.profilers.map(profiler => profiler.constructor), []) }) it('should support profiler config with DD_PROFILING_PROFILERS', () => { @@ -186,24 +192,23 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall', DD_PROFILING_V8_PROFILER_BUG_WORKAROUND: '0', } - if (samplingContextsAvailable) { - process.env.DD_PROFILING_EXPERIMENTAL_CPU_ENABLED = '1' - } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 1 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof WallProfiler) - assert.strictEqual(config.profilers[0].codeHotspotsEnabled(), samplingContextsAvailable) - if (samplingContextsAvailable) { - assert.ok(config.profilers[1] instanceof EventsProfiler) - } - assert.strictEqual(config.v8ProfilerBugWorkaroundEnabled, false) - assert.strictEqual(config.cpuProfilingEnabled, samplingContextsAvailable) + assertObjectContains(config, { + v8ProfilerBugWorkaroundEnabled: false, + cpuProfilingEnabled: samplingContextsAvailable, + }) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [WallProfiler, EventsProfiler] + : [WallProfiler] + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[0]).codeHotspotsEnabled(), + samplingContextsAvailable + ) }) it('should support profiler config with DD_PROFILING_XXX_ENABLED', () => { @@ -212,15 +217,10 @@ describe('config', () => { DD_PROFILING_WALLTIME_ENABLED: '0', DD_PROFILING_HEAP_ENABLED: '1', } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 1) - assert.ok(config.profilers[0] instanceof SpaceProfiler) + assert.deepStrictEqual(config.profilers.map(profiler => profiler.constructor), [SpaceProfiler]) }) it('should ensure space profiler is ordered first with DD_PROFILING_HEAP_ENABLED', () => { @@ -228,16 +228,15 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall', DD_PROFILING_HEAP_ENABLED: '1', } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof SpaceProfiler) - assert.ok(config.profilers[1] instanceof WallProfiler) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [SpaceProfiler, WallProfiler, EventsProfiler] + : [SpaceProfiler, WallProfiler] + ) }) it('should ensure space profiler order is preserved when explicitly set with DD_PROFILING_PROFILERS', () => { @@ -245,20 +244,18 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall,space', DD_PROFILING_HEAP_ENABLED: '1', } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof WallProfiler) - assert.ok(config.profilers[1] instanceof SpaceProfiler) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [WallProfiler, SpaceProfiler, EventsProfiler] + : [WallProfiler, SpaceProfiler] + ) }) it('should be able to read some env vars', () => { - const oldenv = process.env process.env = { DD_PROFILING_DEBUG_SOURCE_MAPS: '1', DD_PROFILING_HEAP_SAMPLING_INTERVAL: '1000', @@ -267,18 +264,15 @@ describe('config', () => { DD_PROFILING_TIMELINE_ENABLED: '0', } - const options = { - logger: nullLogger, - } - - const config = new Config(options) - assert.strictEqual(config.debugSourceMaps, true) - assert.strictEqual(config.heapSamplingInterval, 1000) - assert.strictEqual(config.pprofPrefix, 'test-prefix') - assert.strictEqual(config.uploadTimeout, 10000) - assert.strictEqual(config.timelineEnabled, false) + const { config } = getProfilerConfig() - process.env = oldenv + assertObjectContains(config, { + debugSourceMaps: true, + heapSamplingInterval: 1000, + pprofPrefix: 'test-prefix', + uploadTimeout: 10000, + timelineEnabled: false, + }) }) it('should deduplicate profilers', () => { @@ -286,48 +280,20 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall,wall', DD_PROFILING_WALLTIME_ENABLED: '1', } - const options = { - logger: nullLogger, - } - - const config = new Config(options) - - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 1 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof WallProfiler) - if (samplingContextsAvailable) { - assert.ok(config.profilers[1] instanceof EventsProfiler) - } - }) - - it('should prioritize options over env variables', () => { - if (!samplingContextsAvailable) { - return - } - - process.env = { - DD_PROFILING_PROFILERS: 'space', - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1', - } - const options = { - logger: nullLogger, - profilers: ['wall'], - codeHotspotsEnabled: false, - endpointCollection: false, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2) - assert.ok(config.profilers[0] instanceof WallProfiler) - assert.strictEqual(config.profilers[0].codeHotspotsEnabled(), false) - assert.strictEqual(config.profilers[0].endpointCollectionEnabled(), false) - assert.ok(config.profilers[1] instanceof EventsProfiler) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [WallProfiler, EventsProfiler] + : [WallProfiler] + ) }) - it('should prioritize non-experimental env variables and warn about experimental ones', () => { + it('should prioritize non-experimental env variables and warn about experimental ones', function () { if (!samplingContextsAvailable) { + this.skip() return } @@ -338,66 +304,71 @@ describe('config', () => { DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '0', DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED: '1', } - const warnings = [] - const options = { - logger: { - debug () {}, - info () {}, - warn (warning) { - warnings.push(warning) - }, - error () {}, - }, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2) - assert.ok(config.profilers[0] instanceof WallProfiler) - assert.strictEqual(config.profilers[0].codeHotspotsEnabled(), false) - assert.strictEqual(config.profilers[0].endpointCollectionEnabled(), false) - assert.ok(config.profilers[1] instanceof EventsProfiler) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + [WallProfiler, EventsProfiler] + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[0]).codeHotspotsEnabled(), + false + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[0]).endpointCollectionEnabled(), + false + ) }) - function optionOnlyWorksWithGivenCondition (property, name, condition) { - const options = { - [property]: true, + it('should disable code hotspots on unsupported platforms', function () { + process.env = { + DD_PROFILING_CODEHOTSPOTS_ENABLED: '1', } - if (condition) { - // should silently succeed - // eslint-disable-next-line no-new - new Config(options) - } else { - // should throw - // eslint-disable-next-line no-new - assert.throws(() => { new Config(options) }, `${name} not supported on `) + const { config } = getProfilerConfig() + + assert.strictEqual(config.codeHotspotsEnabled, samplingContextsAvailable) + }) + + it('should disable endpoint collection on unsupported platforms', function () { + process.env = { + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1', } - } - function optionOnlyWorksWithSamplingContexts (property, name) { - optionOnlyWorksWithGivenCondition(property, name, samplingContextsAvailable) - } + const { config } = getProfilerConfig() - it('should only allow code hotspots on supported platforms', () => { - optionOnlyWorksWithSamplingContexts('codeHotspotsEnabled', 'Code hotspots') + assert.strictEqual(config.endpointCollectionEnabled, samplingContextsAvailable) }) - it('should only allow endpoint collection on supported platforms', () => { - optionOnlyWorksWithSamplingContexts('endpointCollection', 'Endpoint collection') - }) + it('should disable CPU profiling on unsupported platforms', function () { + process.env = { + DD_PROFILING_CPU_ENABLED: '1', + } + + const { config } = getProfilerConfig() - it('should only allow CPU profiling on supported platforms', () => { - optionOnlyWorksWithSamplingContexts('cpuProfilingEnabled', 'CPU profiling') + assert.strictEqual(config.cpuProfilingEnabled, samplingContextsAvailable) }) - it('should only allow timeline view on supported platforms', () => { - optionOnlyWorksWithSamplingContexts('timelineEnabled', 'Timeline view') + it('should disable timeline view on unsupported platforms', function () { + process.env = { + DD_PROFILING_TIMELINE_ENABLED: '1', + } + + const { config } = getProfilerConfig() + + assert.strictEqual(config.timelineEnabled, samplingContextsAvailable) }) - it('should only allow OOM monitoring on supported platforms', () => { - optionOnlyWorksWithGivenCondition('oomMonitoring', 'OOM monitoring', oomMonitoringSupported) + it('should disable OOM monitoring on unsupported platforms', function () { + process.env = { + DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: '1', + } + + const { config } = getProfilerConfig() + + assert.strictEqual(config.oomMonitoring.enabled, oomMonitoringSupported) }) it('should support tags', () => { @@ -405,7 +376,7 @@ describe('config', () => { env: 'dev', } - const config = new Config({ tags }) + const { config } = getProfilerConfig({ tags }) assertObjectContains(config.tags, tags) }) @@ -420,7 +391,7 @@ describe('config', () => { version: '3.2.1', } - const config = new Config({ env, service, version, tags }) + const { config } = getProfilerConfig({ env, service, version, tags }) assertObjectContains(config.tags, { env, service, version }) }) @@ -429,21 +400,22 @@ describe('config', () => { const DUMMY_GIT_SHA = '13851f2b092e97acebab1b73f6c0e7818e795b50' const DUMMY_REPOSITORY_URL = 'git@github.com:DataDog/sci_git_example.git' - const config = new Config({ - repositoryUrl: DUMMY_REPOSITORY_URL, - commitSHA: DUMMY_GIT_SHA, - }) + process.env = { + DD_GIT_COMMIT_SHA: DUMMY_GIT_SHA, + DD_GIT_REPOSITORY_URL: DUMMY_REPOSITORY_URL, + } + + const { config } = getProfilerConfig() assertObjectContains(config.tags, { 'git.repository_url': DUMMY_REPOSITORY_URL, 'git.commit.sha': DUMMY_GIT_SHA }) }) it('should support IPv6 hostname', () => { - const options = { + const { config } = getProfilerConfig({ hostname: '::1', port: '8126', - } + }) - const config = new Config(options) const exporterUrl = config.exporters[0]._url.toString() const expectedUrl = new URL('http://[::1]:8126').toString() @@ -454,7 +426,8 @@ describe('config', () => { process.env = { DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: 'false', } - const config = new Config({}) + + const { config } = getProfilerConfig() assert.deepStrictEqual(config.oomMonitoring, { enabled: false, @@ -465,12 +438,17 @@ describe('config', () => { }) }) + function assertOomExportCommand (config) { + assert.ok(config.oomMonitoring.exportCommand[3].includes(`service:${config.service}`)) + assert.ok(config.oomMonitoring.exportCommand[3].includes('snapshot:on_oom')) + } + it('should enable OOM heap profiler by default and use process as default strategy', () => { - const config = new Config({ reportHostname: true }) + const { config } = getProfilerConfig({ reportHostname: true }) if (oomMonitoringSupported) { - assert.deepStrictEqual(config.oomMonitoring, { - enabled: oomMonitoringSupported, + assertObjectContains(config.oomMonitoring, { + enabled: true, heapLimitExtensionSize: 0, maxHeapExtensionCount: 0, exportStrategies: ['process'], @@ -478,51 +456,54 @@ describe('config', () => { process.execPath, path.normalize(path.join(__dirname, '../../src/profiling', 'exporter_cli.js')), 'http://127.0.0.1:8126/', - `host:${config.tags.host},service:node,snapshot:on_oom`, 'space', ], }) + assertOomExportCommand(config) } else { assert.strictEqual(config.oomMonitoring.enabled, false) } }) - it('should allow configuring exporters by string or string array', async () => { + it('should allow configuring exporters through DD_PROFILING_EXPORTERS', () => { + /** @type {Array<[string, (typeof AgentExporter | typeof FileExporter)[]]>} */ const checks = [ - 'agent', - ['agent'], + ['agent', [AgentExporter]], + ['agent,file', [AgentExporter, FileExporter]], ] - for (const exporters of checks) { - const config = new Config({ - sourceMap: false, - exporters, - }) + for (const [exporters, expected] of checks) { + process.env = { + DD_PROFILING_EXPORTERS: exporters, + } + + const { config } = getProfilerConfig() - assert.strictEqual(typeof config.exporters[0].export, 'function') + assert.deepStrictEqual(config.exporters.map(exporter => exporter.constructor), expected) } }) - it('should allow configuring profilers by string or string arrays', async () => { + it('should allow configuring profilers through DD_PROFILING_PROFILERS', () => { + /** @type {Array>} */ const checks = [ ['space', SpaceProfiler], ['wall', WallProfiler, EventsProfiler], ['space,wall', SpaceProfiler, WallProfiler, EventsProfiler], ['wall,space', WallProfiler, SpaceProfiler, EventsProfiler], - [['space', 'wall'], SpaceProfiler, WallProfiler, EventsProfiler], - [['wall', 'space'], WallProfiler, SpaceProfiler, EventsProfiler], ].map(profilers => profilers.filter(profiler => samplingContextsAvailable || profiler !== EventsProfiler)) - for (const [profilers, ...expected] of checks) { - const config = new Config({ - sourceMap: false, - profilers, - }) - - assert.strictEqual(config.profilers.length, expected.length) - for (let i = 0; i < expected.length; i++) { - assert.ok(config.profilers[i] instanceof expected[i]) + for (const check of checks) { + const profilers = /** @type {string} */ (check[0]) + const expected = /** @type {Array} */ ( + check.slice(1) + ) + process.env = { + DD_PROFILING_PROFILERS: profilers, } + + const { config } = getProfilerConfig() + + assert.deepStrictEqual(config.profilers.map(profiler => profiler.constructor), expected) } }) @@ -535,9 +516,9 @@ describe('config', () => { DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'process,async,process', } - const config = new Config({ reportHostname: true }) + const { config } = getProfilerConfig({ reportHostname: true, tags: {} }) - assert.deepStrictEqual(config.oomMonitoring, { + assertObjectContains(config.oomMonitoring, { enabled: true, heapLimitExtensionSize: 1000000, maxHeapExtensionCount: 2, @@ -546,10 +527,10 @@ describe('config', () => { process.execPath, path.normalize(path.join(__dirname, '../../src/profiling', 'exporter_cli.js')), 'http://127.0.0.1:8126/', - `host:${config.tags.host},service:node,snapshot:on_oom`, 'space', ], }) + assertOomExportCommand(config) }) } @@ -560,7 +541,7 @@ describe('config', () => { if (!isSupported) { this.skip() } else { - const config = new Config({}) + const { config } = getProfilerConfig() assert.strictEqual(config.asyncContextFrameEnabled, true) } }) @@ -569,16 +550,12 @@ describe('config', () => { if (!isSupported) { this.skip() } else { - process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED = '0' - try { - const config = new Config({ - // In production this comes from the tracer Config singleton; we mimic it here. - url: 'http://127.0.0.1:8126', - }) - assert.strictEqual(config.asyncContextFrameEnabled, false) - } finally { - delete process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED + process.env = { + DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED: '0', } + + const { config } = getProfilerConfig() + assert.strictEqual(config.asyncContextFrameEnabled, false) } }) }) @@ -588,7 +565,7 @@ describe('config', () => { if (isSupported) { this.skip() } else { - const config = new Config({}) + const { config } = getProfilerConfig() assert.strictEqual(config.asyncContextFrameEnabled, false) } }) @@ -597,13 +574,12 @@ describe('config', () => { if (isSupported) { this.skip() } else { - process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED = '1' - try { - const config = new Config() - assert.strictEqual(config.asyncContextFrameEnabled, false) - } finally { - delete process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED + process.env = { + DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED: '1', } + + const { config } = getProfilerConfig() + assert.strictEqual(config.asyncContextFrameEnabled, false) } }) }) @@ -611,30 +587,33 @@ describe('config', () => { describe('upload compression settings', () => { const expectConfig = (env, method, level, warning) => { - process.env = { - DD_PROFILING_DEBUG_UPLOAD_COMPRESSION: env, - } + process.env = env === undefined + ? {} + : { DD_PROFILING_DEBUG_UPLOAD_COMPRESSION: env } + + process.env.DD_TRACE_DEBUG = '1' + /** @type {string[]} */ + const warnings = [] const logger = { - warnings: [], debug () {}, info () {}, warn (message) { - this.warnings.push(message) + warnings.push(message) }, error () {}, } - const config = new Config({ - logger, - // In production this comes from the tracer Config singleton; we mimic it here. - url: 'http://127.0.0.1:8126', + + const { config } = getProfilerConfig({ logger }) + const compressionWarnings = warnings.filter(message => { + return message.includes('DD_PROFILING_DEBUG_UPLOAD_COMPRESSION') || + message.includes('Invalid compression level ') }) if (warning) { - assert.strictEqual(logger.warnings.length, 1) - assert.strictEqual(logger.warnings[0], warning) + assert.match(compressionWarnings.join('\n'), new RegExp(RegExp.escape(warning))) } else { - assert.strictEqual(logger.warnings.length, 0) + assert.deepStrictEqual(compressionWarnings, []) } assert.deepStrictEqual(config.uploadCompression, { method, level }) @@ -649,11 +628,13 @@ describe('config', () => { }) it('should reject unknown methods', () => { - expectConfig('foo', zstdOrGzip, undefined, 'Invalid profile upload compression method "foo". Will use "on".') + expectConfig('foo', zstdOrGzip, undefined, "Invalid value: 'foo' for ") }) it('should accept supported compression levels in methods that support levels', () => { - [['gzip', 9], ['zstd', 22]].forEach(([method, maxLevel]) => { + /** @type {Array<[string, number]>} */ + const methods = [['gzip', 9], ['zstd', 22]] + methods.forEach(([method, maxLevel]) => { for (let i = 1; i <= maxLevel; i++) { expectConfig(`${method}-${i}`, method, i) } @@ -662,28 +643,27 @@ describe('config', () => { it('should reject invalid compression levels in methods that support levels', () => { ['gzip', 'zstd'].forEach((method) => { - expectConfig(`${method}-foo`, method, undefined, - 'Invalid compression level "foo". Will use default level.') + expectConfig(`${method}-foo`, zstdOrGzip, undefined, + `Invalid value: '${method}-foo' for DD_PROFILING_DEBUG_UPLOAD_COMPRESSION (source: env_var), picked default`) }) }) it('should reject compression levels in methods that do not support levels', () => { ['on', 'off'].forEach((method) => { - const effectiveMethod = method === 'on' ? zstdOrGzip : method - expectConfig(`${method}-3`, effectiveMethod, undefined, - `Compression levels are not supported for "${method}".`) - expectConfig(`${method}-foo`, effectiveMethod, undefined, - `Compression levels are not supported for "${method}".`) + expectConfig(`${method}-3`, zstdOrGzip, undefined, + `Invalid value: '${method}-3' for DD_PROFILING_DEBUG_UPLOAD_COMPRESSION (source: env_var), picked default`) + expectConfig(`${method}-foo`, zstdOrGzip, undefined, + `Invalid value: '${method}-foo' for DD_PROFILING_DEBUG_UPLOAD_COMPRESSION (source: env_var), picked default`) }) }) it('should normalize compression levels', () => { - expectConfig('gzip-0', 'gzip', 1, 'Invalid compression level 0. Will use 1.') + expectConfig('gzip-0', zstdOrGzip, undefined, "Invalid value: 'gzip-0'") expectConfig('gzip-10', 'gzip', 9, 'Invalid compression level 10. Will use 9.') - expectConfig('gzip-3.14', 'gzip', 3) - expectConfig('zstd-0', 'zstd', 1, 'Invalid compression level 0. Will use 1.') + expectConfig('gzip-3.14', zstdOrGzip, undefined, "Invalid value: 'gzip-3.14'") + expectConfig('zstd-0', zstdOrGzip, undefined, "Invalid value: 'zstd-0'") expectConfig('zstd-23', 'zstd', 22, 'Invalid compression level 23. Will use 22.') - expectConfig('zstd-3.14', 'zstd', 3) + expectConfig('zstd-3.14', zstdOrGzip, undefined, "Invalid value: 'zstd-3.14'") }) }) }) diff --git a/packages/dd-trace/test/profiling/profiler.spec.js b/packages/dd-trace/test/profiling/profiler.spec.js index 0b0b405e8a8..a1c3ba59b82 100644 --- a/packages/dd-trace/test/profiling/profiler.spec.js +++ b/packages/dd-trace/test/profiling/profiler.spec.js @@ -27,6 +27,29 @@ describe('profiler', function () { let SourceMapperStub let mapperInstance let interval + let flushInterval + + class ConfigStub { + constructor (options) { + const compression = process.env.DD_PROFILING_DEBUG_UPLOAD_COMPRESSION ?? 'off' + const [method, level0] = compression.split('-') + const level = level0 ? Number.parseInt(level0, 10) : undefined + + this.endpointCollectionEnabled = false + this.debugSourceMaps = false + this.exporters = options.exporters ?? exporters + this.flushInterval = options.flushInterval ?? flushInterval + this.logger = options.logger ?? logger + this.profilers = options.profilers ?? profilers + this.sourceMap = options.sourceMap ?? false + this.systemInfoReport = {} + this.tags = { ...options.tags } + this.uploadCompression = { + method, + level: Number.isNaN(level) ? undefined : level, + } + } + } function waitForExport () { return Promise.all([ @@ -39,7 +62,8 @@ describe('profiler', function () { } function setUpProfiler () { - interval = 65 * 1000 + flushInterval = 65 * 1000 + interval = flushInterval clock = sinon.useFakeTimers({ toFake: ['Date', 'setTimeout', 'clearTimeout', 'setInterval', 'clearInterval'], }) @@ -87,7 +111,7 @@ describe('profiler', function () { SourceMapperStub = sinon.stub().returns(mapperInstance) } - function makeStartOptions (overrides = {}) { + function makeStartOptions (overrides) { return { profilers, exporters, @@ -99,6 +123,9 @@ describe('profiler', function () { describe('not serverless', function () { function initProfiler () { Profiler = proxyquire('../../src/profiling/profiler', { + './config': { + Config: ConfigStub, + }, '@datadog/pprof': { SourceMapper: SourceMapperStub, }, @@ -434,6 +461,9 @@ describe('profiler', function () { function initServerlessProfiler () { Profiler = proxyquire('../../src/profiling/profiler', { + './config': { + Config: ConfigStub, + }, '@datadog/pprof': { SourceMapper: SourceMapperStub, }, diff --git a/packages/dd-trace/test/profiling/profilers/events.spec.js b/packages/dd-trace/test/profiling/profilers/events.spec.js index b2bbf3c5784..14532657d0f 100644 --- a/packages/dd-trace/test/profiling/profilers/events.spec.js +++ b/packages/dd-trace/test/profiling/profilers/events.spec.js @@ -7,15 +7,25 @@ const dc = require('dc-polyfill') require('../../setup/core') const { storage } = require('../../../../datadog-core') +const { getConfigFresh } = require('../../helpers/config') const { availableParallelism, effectiveLibuvThreadCount } = require('../../../src/profiling/libuv-size') const EventsProfiler = require('../../../src/profiling/profilers/events') const startCh = dc.channel('apm:dns:lookup:start') const finishCh = dc.channel('apm:dns:lookup:finish') +function getProfilerConfig (tracerOptions) { + const tracerConfig = getConfigFresh(tracerOptions) + const ProfilingConfig = require('../../../src/profiling/config').Config + return new ProfilingConfig({ + url: 'http://127.0.0.1:8126', + ...tracerConfig, + }) +} + describe('profilers/events', () => { it('should provide info', () => { - const info = new EventsProfiler({ samplingInterval: 1 }).getInfo() + const info = new EventsProfiler(getProfilerConfig()).getInfo() assert(info.maxSamples > 0) }) diff --git a/packages/dd-trace/test/proxy.spec.js b/packages/dd-trace/test/proxy.spec.js index fe80590fd37..4c42a2f2a4f 100644 --- a/packages/dd-trace/test/proxy.spec.js +++ b/packages/dd-trace/test/proxy.spec.js @@ -151,9 +151,8 @@ describe('TracerProxy', () => { enabled: true, }, }, - injectionEnabled: [], + injectionEnabled: undefined, logger: 'logger', - debug: true, profiling: {}, apmTracingEnabled: false, appsec: {}, @@ -412,12 +411,12 @@ describe('TracerProxy', () => { sinon.assert.notCalled(appsec.enable) sinon.assert.notCalled(iast.enable) - let conf = { tracing_enabled: false } + let conf = { tracing: false } handlers.get('APM_TRACING')(createApmTracingTransaction('test-config-1', conf)) sinon.assert.notCalled(appsec.disable) sinon.assert.notCalled(iast.disable) - conf = { tracing_enabled: true } + conf = { tracing: true } handlers.get('APM_TRACING')(createApmTracingTransaction('test-config-1', conf, 'modify')) sinon.assert.calledOnce(DatadogTracer) sinon.assert.calledOnce(AppsecSdk) @@ -439,7 +438,7 @@ describe('TracerProxy', () => { config.appsec.enabled = true config.iast.enabled = true config.setRemoteConfig = conf => { - config.tracing = conf.tracing_enabled + config.tracing = conf.tracing } const remoteConfigProxy = new RemoteConfigProxy() @@ -448,12 +447,12 @@ describe('TracerProxy', () => { sinon.assert.calledOnceWithExactly(appsec.enable, config) sinon.assert.calledOnceWithExactly(iast.enable, config, tracer) - let conf = { tracing_enabled: false } + let conf = { tracing: false } handlers.get('APM_TRACING')(createApmTracingTransaction('test-config-2', conf)) sinon.assert.called(appsec.disable) sinon.assert.called(iast.disable) - conf = { tracing_enabled: true } + conf = { tracing: true } handlers.get('APM_TRACING')(createApmTracingTransaction('test-config-2', conf, 'modify')) sinon.assert.calledTwice(appsec.enable) sinon.assert.calledWithExactly(appsec.enable.secondCall, config) diff --git a/packages/dd-trace/test/telemetry/index.spec.js b/packages/dd-trace/test/telemetry/index.spec.js index 697a6b9583e..9aed39a901f 100644 --- a/packages/dd-trace/test/telemetry/index.spec.js +++ b/packages/dd-trace/test/telemetry/index.spec.js @@ -18,6 +18,7 @@ const processTags = require('../../src/process-tags') const DEFAULT_HEARTBEAT_INTERVAL = 60000 let traceAgent +let traceAgentSeqBase describe('telemetry (proxy)', () => { let telemetry @@ -30,7 +31,6 @@ describe('telemetry (proxy)', () => { beforeEach(() => { telemetry = sinon.spy({ start () {}, - stop () {}, updateIntegrations () {}, updateConfig () {}, appClosing () {}, @@ -46,15 +46,13 @@ describe('telemetry (proxy)', () => { proxy.start(config) proxy.updateIntegrations() - proxy.updateConfig() + proxy.updateConfig([], config) proxy.appClosing() - proxy.stop() sinon.assert.calledWith(telemetry.start, config) sinon.assert.called(telemetry.updateIntegrations) sinon.assert.called(telemetry.updateConfig) sinon.assert.called(telemetry.appClosing) - sinon.assert.called(telemetry.stop) }) it('should proxy when enabled from updateConfig', () => { @@ -63,12 +61,10 @@ describe('telemetry (proxy)', () => { proxy.updateConfig([], config) proxy.updateIntegrations() proxy.appClosing() - proxy.stop() sinon.assert.called(telemetry.updateIntegrations) sinon.assert.calledWith(telemetry.updateConfig, [], config) sinon.assert.called(telemetry.appClosing) - sinon.assert.called(telemetry.stop) }) }) @@ -96,6 +92,7 @@ describe('telemetry', () => { }) traceAgent.reqs = [] + traceAgentSeqBase = undefined telemetry = proxyquire('../../src/telemetry/telemetry', { '../exporters/common/docker': { @@ -150,7 +147,6 @@ describe('telemetry', () => { }) after(() => { - telemetry.stop() traceAgent.close() }) @@ -250,8 +246,6 @@ describe('telemetry', () => { }) it('should do nothing when not enabled', (done) => { - telemetry.stop() - const server = http.createServer(() => { assert.fail('server should not be called') }).listen(0, () => { @@ -259,6 +253,7 @@ describe('telemetry', () => { telemetry: { enabled: false, heartbeatInterval: 60000 }, hostname: 'localhost', port: (/** @type {import('net').AddressInfo} */ (server.address())).port, + appsec: { sca: { enabled: false } }, }) setTimeout(() => { @@ -277,7 +272,7 @@ describe('telemetry', () => { }) notEnabledTelemetry.start({ telemetry: { enabled: false, heartbeatInterval: DEFAULT_HEARTBEAT_INTERVAL }, - appsec: { enabled: false }, + appsec: { enabled: false, sca: { enabled: undefined } }, profiling: { enabled: false }, }, { _pluginsByName: pluginsByName, @@ -301,7 +296,6 @@ describe('telemetry app-heartbeat', () => { after(() => { clock.restore() - telemetry.stop() traceAgent.close() }) @@ -360,8 +354,7 @@ describe('Telemetry extended heartbeat', () => { afterEach(() => { clock.restore() - telemetry.stop() - traceAgent.close() + traceAgent?.close() }) it('should be sent every 24 hours', (done) => { @@ -459,9 +452,8 @@ describe('Telemetry extended heartbeat', () => { clock.tick(86400000) assert.deepStrictEqual(configuration, changes) - const updatedChanges = [ - { name: 'test', value: false, origin: 'code', seq_id: 1 }, - ] + const change = { name: 'test', value: false, origin: 'code', seq_id: 1 } + const updatedChanges = [change] telemetry.updateConfig(updatedChanges, config) clock.tick(86400000) assert.deepStrictEqual(configuration, updatedChanges) @@ -469,9 +461,9 @@ describe('Telemetry extended heartbeat', () => { const changeNeedingNameRemapping = [ { name: 'sampleRate', value: 0, origin: 'code', seq_id: 2 }, ] + /** @type {{ name: string, value: unknown, origin: string, seq_id: number }[]} */ const expectedConfigList = [ - updatedChanges[0], - { ...changeNeedingNameRemapping[0], name: 'DD_TRACE_SAMPLE_RATE' }, + ...changeNeedingNameRemapping, ] telemetry.updateConfig(changeNeedingNameRemapping, config) clock.tick(86400000) @@ -480,7 +472,7 @@ describe('Telemetry extended heartbeat', () => { const samplingRule = [ { name: 'sampler.rules', - value: [ + value: JSON.stringify([ { service: '*', sampling_rate: 1 }, { service: 'svc*', @@ -489,32 +481,25 @@ describe('Telemetry extended heartbeat', () => { tags: { 'tag-a': 'ta-v*', 'tag-b': 'tb-v?', 'tag-c': 'tc-v' }, sample_rate: 0.5, }, - ], + ]), origin: 'code', seq_id: 3, }, ] - const expectedConfigListWithSamplingRules = expectedConfigList.concat([ - { - name: 'DD_TRACE_SAMPLING_RULES', - value: '[{"service":"*","sampling_rate":1},' + - '{"service":"svc*","resource":"*abc","name":"op-??",' + - '"tags":{"tag-a":"ta-v*","tag-b":"tb-v?","tag-c":"tc-v"},"sample_rate":0.5}]', - origin: 'code', - seq_id: 3, - }, - ]) + /** @type {{ name: string, value: unknown, origin: string, seq_id: number }[]} */ + const expectedConfigListWithSamplingRules = samplingRule telemetry.updateConfig(samplingRule, config) clock.tick(86400000) assert.deepStrictEqual(configuration, expectedConfigListWithSamplingRules) - const chainedChanges = expectedConfigListWithSamplingRules.concat([ + /** @type {{ name: string, value: unknown, origin: string, seq_id: number }[]} */ + const chainedChanges = [ { name: 'test', value: true, origin: 'env', seq_id: 4 }, { name: 'test', value: false, origin: 'remote_config', seq_id: 5 }, - ]) + ] const samplingRule2 = [ - { name: 'test', value: true, origin: 'env' }, - { name: 'test', value: false, origin: 'remote_config' }, + { name: 'test', value: true, origin: 'env', seq_id: 4 }, + { name: 'test', value: false, origin: 'remote_config', seq_id: 5 }, ] telemetry.updateConfig(samplingRule2, config) @@ -523,6 +508,64 @@ describe('Telemetry extended heartbeat', () => { done() }) + + it('should serialize URL, object, and function config values for extended heartbeat', (done) => { + let configuration + + const sendDataRequest = { + sendData: (config, application, host, reqType, payload, cb = () => {}) => { + if (reqType === 'app-extended-heartbeat') { + configuration = payload.configuration + } + }, + } + + telemetry = proxyquire('../../src/telemetry/telemetry', { + '../exporters/common/docker': { + id () { + return 'test docker id' + }, + }, + './send-data': sendDataRequest, + }) + + const config = { + telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + hostname: 'localhost', + port: 0, + service: 'test service', + version: '1.2.3-beta4', + appsec: { enabled: true }, + profiling: { enabled: true }, + env: 'preprod', + tags: { + 'runtime-id': '1a2b3c', + }, + } + + telemetry.start(config, { _pluginsByName: pluginsByName }) + + clock.tick(86400000) + assert.deepStrictEqual(configuration, []) + + const objectValue = { + foo: 'bar', + nested: { answer: 42 }, + } + + const changes = [ + { name: 'url', value: 'http://example.test:4318/v1/traces', origin: 'code', seq_id: 0 }, + { name: 'payload', value: JSON.stringify(objectValue), origin: 'code', seq_id: 1 }, + { name: 'callback', value: 'telemetryCallback', origin: 'code', seq_id: 2 }, + ] + + telemetry.updateConfig(changes, config) + + clock.tick(86400000) + assert.deepStrictEqual(configuration, changes) + + done() + }) }) // deleted this test for now since the global interval is now used for app-extended heartbeat @@ -973,6 +1016,7 @@ describe('AVM OSS', () => { }) traceAgent.reqs = [] + traceAgentSeqBase = undefined delete require.cache[require.resolve('../../src/telemetry/send-data')] delete require.cache[require.resolve('../../src/telemetry/telemetry')] @@ -1003,16 +1047,15 @@ describe('AVM OSS', () => { after((done) => { clock.restore() - telemetry.stop() traceAgent.close(done) }) it('in app-started message', () => { return testSeq(1, 'app-started', payload => { assert.deepStrictEqual(payload.configuration, [ - { name: 'appsec.sca.enabled', value: scaValue, origin: scaValueOrigin, seq_id: 0 }, + { name: 'appsec.sca.enabled', value: scaValue, origin: scaValueOrigin }, ]) - }, true) + }) }) it('in app-extended-heartbeat message', () => { @@ -1020,9 +1063,9 @@ describe('AVM OSS', () => { clock.tick(86400000) return testSeq(2, 'app-extended-heartbeat', payload => { assert.deepStrictEqual(payload.configuration, [ - { name: 'appsec.sca.enabled', value: scaValue, origin: scaValueOrigin, seq_id: 0 }, + { name: 'appsec.sca.enabled', value: scaValue, origin: scaValueOrigin }, ]) - }, true) + }) }) }) }) @@ -1042,7 +1085,6 @@ describe('AVM OSS', () => { }) after(() => { - telemetry.stop() sinon.restore() }) @@ -1050,7 +1092,7 @@ describe('AVM OSS', () => { telemetry.start( { telemetry: { enabled: false }, - sca: { enabled: true }, + appsec: { sca: { enabled: true } }, } ) @@ -1064,6 +1106,8 @@ async function testSeq (seqId, reqType, validatePayload) { await once(traceAgent, 'handled-req') } const req = traceAgent.reqs[seqId - 1] + traceAgentSeqBase ??= req.body.seq_id - (seqId - 1) + assert.strictEqual(req.method, 'POST') assert.strictEqual(req.url, '/telemetry/proxy/api/v2/apmtelemetry') assertObjectContains(req.headers, { @@ -1098,7 +1142,7 @@ async function testSeq (seqId, reqType, validatePayload) { naming_schema_version: '', request_type: reqType, runtime_id: '1a2b3c', - seq_id: seqId, + seq_id: traceAgentSeqBase + seqId - 1, application: { service_name: 'test service', env: 'preprod', diff --git a/packages/dd-trace/test/telemetry/send-data.spec.js b/packages/dd-trace/test/telemetry/send-data.spec.js index c3e7e78f460..a9cc9a2c0c4 100644 --- a/packages/dd-trace/test/telemetry/send-data.spec.js +++ b/packages/dd-trace/test/telemetry/send-data.spec.js @@ -14,6 +14,7 @@ describe('sendData', () => { language_name: 'nodejs', tracer_version: 'version', } + const host = { hostname: 'test-host' } let sendDataModule let request @@ -25,17 +26,17 @@ describe('sendData', () => { }) }) - it('should call to request (TCP)', () => { + it('sends telemetry to the agent using hostname and port', () => { sendDataModule.sendData({ hostname: '', port: '12345', tags: { 'runtime-id': '123' }, - }, application, 'test', 'req-type') + }, application, host, 'req-type') sinon.assert.calledOnce(request) const options = request.getCall(0).args[1] - assert.deepStrictEqual(options, { + assertObjectContains(options, { method: 'POST', path: '/telemetry/proxy/api/v2/apmtelemetry', headers: { @@ -52,16 +53,16 @@ describe('sendData', () => { }) }) - it('should call to request (UDP)', () => { + it('sends telemetry to the configured socket url', () => { sendDataModule.sendData({ url: 'unix:/foo/bar/baz', tags: { 'runtime-id': '123' }, - }, application, 'test', 'req-type') + }, application, host, 'req-type') sinon.assert.calledOnce(request) const options = request.getCall(0).args[1] - assert.deepStrictEqual(options, { + assertObjectContains(options, { method: 'POST', path: '/telemetry/proxy/api/v2/apmtelemetry', headers: { @@ -78,40 +79,25 @@ describe('sendData', () => { }) }) - it('should add debug header if DD_TELEMETRY_DEBUG is present', () => { + it('adds the debug header when telemetry debug mode is enabled', () => { sendDataModule.sendData({ url: '/test', tags: { 'runtime-id': '123' }, telemetry: { debug: true }, - }, application, 'test', 'req-type') + }, application, host, 'req-type') sinon.assert.calledOnce(request) const options = request.getCall(0).args[1] - assert.deepStrictEqual(options, { - method: 'POST', - path: '/telemetry/proxy/api/v2/apmtelemetry', - headers: { - 'content-type': 'application/json', - 'dd-telemetry-api-version': 'v2', - 'dd-telemetry-request-type': 'req-type', - 'dd-telemetry-debug-enabled': 'true', - 'dd-client-library-language': application.language_name, - 'dd-client-library-version': application.tracer_version, - 'dd-session-id': '123', - }, - url: '/test', - hostname: undefined, - port: undefined, - }) + assert.strictEqual(options.headers['dd-telemetry-debug-enabled'], 'true') }) - it('should include dd-root-session-id header when rootSessionId differs from runtime-id', () => { + it('includes both child and root session ids when provided', () => { sendDataModule.sendData({ url: '/test', tags: { 'runtime-id': 'child-runtime-id' }, - rootSessionId: 'root-runtime-id', - }, application, 'test', 'req-type') + DD_ROOT_JS_SESSION_ID: 'root-runtime-id', + }, application, host, 'req-type') sinon.assert.calledOnce(request) const options = request.getCall(0).args[1] @@ -120,28 +106,14 @@ describe('sendData', () => { assert.strictEqual(options.headers['dd-root-session-id'], 'root-runtime-id') }) - it('should not include dd-root-session-id header when rootSessionId equals runtime-id', () => { - sendDataModule.sendData({ - url: '/test', - tags: { 'runtime-id': 'same-id' }, - rootSessionId: 'same-id', - }, application, 'test', 'req-type') - - sinon.assert.calledOnce(request) - const options = request.getCall(0).args[1] - - assert.strictEqual(options.headers['dd-session-id'], 'same-id') - assert.strictEqual(options.headers['dd-root-session-id'], undefined) - }) - - it('should remove not wanted properties from a payload with object type', () => { + it('removes internal-only fields from object payloads before sending them', () => { const payload = { message: 'test', logger: {}, tags: {}, serviceMapping: {}, } - sendDataModule.sendData({ tags: { 'runtime-id': '123' } }, 'test', 'test', 'req-type', payload) + sendDataModule.sendData({ tags: { 'runtime-id': '123' } }, application, host, 'req-type', payload) sinon.assert.calledOnce(request) const data = JSON.parse(request.getCall(0).args[0]) @@ -150,7 +122,7 @@ describe('sendData', () => { assert.deepStrictEqual(data.payload, trimmedPayload) }) - it('should send batch request with retryPayload', () => { + it('preserves batch payload items when sending message batches', () => { const retryObjData = { payload: { foo: 'bar' }, request_type: 'req-type-1' } const payload = [{ request_type: 'req-type-2', @@ -164,7 +136,7 @@ describe('sendData', () => { }, retryObjData] sendDataModule.sendData({ tags: { 'runtime-id': '123' } }, - { language: 'js' }, 'test', 'message-batch', payload) + application, host, 'message-batch', payload) sinon.assert.calledOnce(request) @@ -185,7 +157,7 @@ describe('sendData', () => { assert.deepStrictEqual(data.payload, expectedPayload) }) - it('should also work in CI Visibility agentless mode', () => { + it('uses the CI Visibility agentless intake when agentless mode is enabled', () => { process.env.DD_CIVISIBILITY_AGENTLESS_ENABLED = '1' sendDataModule.sendData( @@ -195,7 +167,8 @@ describe('sendData', () => { site: 'datadoghq.eu', }, application, - 'test', 'req-type' + host, + 'req-type' ) sinon.assert.calledOnce(request) diff --git a/packages/dd-trace/test/telemetry/session-propagation.spec.js b/packages/dd-trace/test/telemetry/session-propagation.spec.js index 7a40ca4cf68..00363dacbe3 100644 --- a/packages/dd-trace/test/telemetry/session-propagation.spec.js +++ b/packages/dd-trace/test/telemetry/session-propagation.spec.js @@ -2,214 +2,242 @@ const assert = require('node:assert/strict') -const { describe, it, beforeEach, afterEach } = require('mocha') -const sinon = require('sinon') -const dc = require('dc-polyfill') +const { describe, it, beforeEach } = require('mocha') +const proxyquire = require('proxyquire').noPreserveCache() require('../setup/core') +/** + * @typedef {{ + * callArgs?: unknown[], + * shell: boolean, + * command?: string, + * file?: string + * }} ChildProcessContext + */ +/** + * @typedef {{ + * telemetry?: { enabled?: boolean }, + * DD_ROOT_JS_SESSION_ID?: string, + * tags?: { 'runtime-id'?: string } + * }} SessionPropagationConfigOverrides + */ +/** + * @typedef {{ + * subscribe(subscribers: { start?: (context: ChildProcessContext) => void }): void, + * start: { publish(context: ChildProcessContext): void } + * }} FakeTracingChannel + */ + describe('session-propagation', () => { - const childProcessChannel = dc.tracingChannel('datadog:child_process:execution') + /** @type {FakeTracingChannel} */ + let childProcessChannel let sessionPropagation - beforeEach(() => { - // Fresh require to reset the subscribed flag - delete require.cache[require.resolve('../../src/telemetry/session-propagation')] - sessionPropagation = require('../../src/telemetry/session-propagation') - }) - - afterEach(() => { - sinon.restore() - }) - - it('should subscribe to child_process channel', () => { - sessionPropagation.start({ - telemetry: { enabled: true }, - rootSessionId: 'root-id', - tags: { 'runtime-id': 'current-id' }, - }) - - assert.ok(childProcessChannel.start.hasSubscribers) - }) - - it('should not subscribe when telemetry is disabled', () => { - const subscribeSpy = sinon.spy(childProcessChannel, 'subscribe') - - sessionPropagation.start({ - telemetry: { enabled: false }, - rootSessionId: 'root-id', - tags: { 'runtime-id': 'current-id' }, - }) - - assert.strictEqual(subscribeSpy.callCount, 0) - }) - - it('should only subscribe once', () => { - const config = { telemetry: { enabled: true }, rootSessionId: 'root-id', tags: { 'runtime-id': 'current-id' } } - sessionPropagation.start(config) - - const subscribeSpy = sinon.spy(childProcessChannel, 'subscribe') - sessionPropagation.start(config) - - assert.strictEqual(subscribeSpy.callCount, 0) - }) - - it('should unsubscribe and allow re-subscribe after stop()', () => { - sessionPropagation.start({ - telemetry: { enabled: true }, - rootSessionId: 'root-id', - tags: { 'runtime-id': 'current-id' }, - }) - - sessionPropagation.stop() + /** + * @param {SessionPropagationConfigOverrides} [overrides] + */ + function createConfig (overrides = {}) { + /** + * @type {{ + * telemetry: { enabled: boolean }, + * DD_ROOT_JS_SESSION_ID: string | undefined, + * tags: { 'runtime-id': string } + * }} + */ + const config = { + telemetry: { enabled: true, ...overrides.telemetry }, + DD_ROOT_JS_SESSION_ID: undefined, + tags: { 'runtime-id': 'current-id', ...overrides.tags }, + } + + if (overrides.DD_ROOT_JS_SESSION_ID) { + config.DD_ROOT_JS_SESSION_ID = overrides.DD_ROOT_JS_SESSION_ID + } + + return config + } + + /** + * @param {Record} additions + * @returns {NodeJS.ProcessEnv} + */ + function createExpectedEnv (additions) { + return { + ...process.env, + ...additions, + } + } + + /** + * @param {ChildProcessContext} context + * @returns {ChildProcessContext} + */ + function publishStart (context) { + childProcessChannel.start.publish(context) + return context + } + + /** + * @returns {FakeTracingChannel} + */ + function createTracingChannel () { + /** @type {((context: ChildProcessContext) => void)[]} */ + const startSubscribers = [] + + return { + subscribe (subscribers) { + if (typeof subscribers.start === 'function') { + startSubscribers.push(subscribers.start) + } + }, + start: { + publish (context) { + for (const subscriber of startSubscribers) { + subscriber(context) + } + }, + }, + } + } - // After stop(), start() should accept new config - sessionPropagation.start({ - telemetry: { enabled: true }, - rootSessionId: 'new-root', - tags: { 'runtime-id': 'new-id' }, + beforeEach(() => { + childProcessChannel = createTracingChannel() + sessionPropagation = proxyquire('../../src/telemetry/session-propagation', { + 'dc-polyfill': { + tracingChannel () { + return childProcessChannel + }, + }, }) - - const context = { callArgs: ['node', ['test.js'], {}], shell: false } - sessionPropagation._onChildProcessStart(context) - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'new-root') - assert.strictEqual(context.callArgs[2].env.DD_PARENT_JS_SESSION_ID, 'new-id') }) - describe('env injection via callArgs', () => { - let onChildProcessStart - - beforeEach(() => { - sessionPropagation.start({ - telemetry: { enabled: true }, - rootSessionId: 'root-id', - tags: { 'runtime-id': 'current-id' }, - }) - onChildProcessStart = sessionPropagation._onChildProcessStart - }) + describe('child process execution contexts', () => { + it('seeds child process options with the current runtime id when there is no inherited root', () => { + sessionPropagation.start(createConfig()) - it('should inject env vars when callArgs has (file, args, options)', () => { const context = { callArgs: ['node', ['test.js'], { cwd: '/tmp', env: { FOO: 'bar' } }], shell: false, } - onChildProcessStart(context) - - assert.strictEqual(context.callArgs[0], 'node') - assert.deepStrictEqual(context.callArgs[1], ['test.js']) - assert.strictEqual(context.callArgs[2].cwd, '/tmp') - assert.strictEqual(context.callArgs[2].env.FOO, 'bar') - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[2].env.DD_PARENT_JS_SESSION_ID, 'current-id') + publishStart(context) + + assert.deepStrictEqual(context.callArgs, [ + 'node', + ['test.js'], + { + cwd: '/tmp', + env: { + FOO: 'bar', + DD_ROOT_JS_SESSION_ID: 'current-id', + }, + }, + ]) }) - it('should inject env vars when callArgs has (file, options)', () => { - const context = { - callArgs: ['node', { cwd: '/tmp' }], - shell: false, - } - - onChildProcessStart(context) + it('uses process.env as the base when the execution context provides options without env', () => { + sessionPropagation.start(createConfig()) - assert.strictEqual(context.callArgs[0], 'node') - assert.strictEqual(context.callArgs[1].cwd, '/tmp') - assert.strictEqual(context.callArgs[1].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[1].env.DD_PARENT_JS_SESSION_ID, 'current-id') - }) - - it('should inject env vars when callArgs has (file) only for non-shell', () => { const context = { - callArgs: ['node'], + callArgs: ['npm', ['run', 'test'], { cwd: '/tmp' }], shell: false, } - onChildProcessStart(context) + publishStart(context) - assert.strictEqual(context.callArgs[0], 'node') - assert.deepStrictEqual(context.callArgs[1], []) - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[2].env.DD_PARENT_JS_SESSION_ID, 'current-id') + assert.deepStrictEqual(context.callArgs, [ + 'npm', + ['run', 'test'], + { + cwd: '/tmp', + env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'current-id' }), + }, + ]) }) - it('should inject env vars as options for shell commands with no options', () => { + it('adds shell options when the execution context does not provide any', () => { + sessionPropagation.start(createConfig()) + const context = { callArgs: ['ls -la'], shell: true, } - onChildProcessStart(context) + publishStart(context) - assert.strictEqual(context.callArgs[0], 'ls -la') - assert.strictEqual(context.callArgs[1].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[1].env.DD_PARENT_JS_SESSION_ID, 'current-id') + assert.deepStrictEqual(context.callArgs, [ + 'ls -la', + { env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'current-id' }) }, + ]) }) - it('should use process.env as base when no env is specified', () => { + it('preserves callbacks when it needs to insert child process options', () => { + sessionPropagation.start(createConfig()) + + const cb = () => {} const context = { - callArgs: ['node', ['test.js'], {}], + callArgs: ['cmd', cb], shell: false, } - onChildProcessStart(context) + publishStart(context) - const env = context.callArgs[2].env - assert.strictEqual(env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.ok(Object.keys(env).length > 2, 'env should contain process.env keys') + assert.deepStrictEqual(context.callArgs, [ + 'cmd', + [], + { env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'current-id' }) }, + cb, + ]) }) - it('should preserve callback when callArgs has (file, args, cb)', () => { - const cb = () => {} + it('does not change child process execution when telemetry is disabled', () => { + sessionPropagation.start(createConfig({ telemetry: { enabled: false } })) + const context = { - callArgs: ['node', ['-v'], cb], + callArgs: ['node', ['test.js'], { cwd: '/tmp', env: { FOO: 'bar' } }], shell: false, } - onChildProcessStart(context) + publishStart(context) - assert.strictEqual(context.callArgs[0], 'node') - assert.deepStrictEqual(context.callArgs[1], ['-v']) - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[3], cb) + assert.deepStrictEqual(context.callArgs, ['node', ['test.js'], { cwd: '/tmp', env: { FOO: 'bar' } }]) }) - it('should preserve callback when callArgs has (file, cb)', () => { - const cb = () => {} - const context = { - callArgs: ['cmd', cb], - shell: false, - } + it('preserves an inherited root session id instead of replacing it with the current runtime id', () => { + sessionPropagation.start(createConfig({ DD_ROOT_JS_SESSION_ID: 'root-id' })) - onChildProcessStart(context) + const context = publishStart({ callArgs: ['node', ['test.js'], {}], shell: false }) - assert.strictEqual(context.callArgs[0], 'cmd') - assert.deepStrictEqual(context.callArgs[1], []) - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[3], cb) + assert.deepStrictEqual(context.callArgs, [ + 'node', + ['test.js'], + { env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'root-id' }) }, + ]) }) - it('should merge into existing options when args is skipped with undefined', () => { - const context = { - callArgs: ['node', undefined, { cwd: '/tmp', env: { FOO: 'bar' } }], - shell: false, - } + it('uses process.env as the base when it adds options for non-shell commands', () => { + sessionPropagation.start(createConfig()) - onChildProcessStart(context) + const context = publishStart({ callArgs: ['node'], shell: false }) - assert.strictEqual(context.callArgs[2].cwd, '/tmp') - assert.strictEqual(context.callArgs[2].env.FOO, 'bar') - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[2].env.DD_PARENT_JS_SESSION_ID, 'current-id') + assert.deepStrictEqual(context.callArgs, [ + 'node', + [], + { env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'current-id' }) }, + ]) }) - it('should not modify context without callArgs', () => { + it('ignores execution contexts without call arguments', () => { + sessionPropagation.start(createConfig()) + const context = { command: 'node test.js', file: 'node', shell: false, } - onChildProcessStart(context) + publishStart(context) assert.strictEqual(context.callArgs, undefined) })