diff --git a/Cargo.lock b/Cargo.lock index 6250ac9d58..5e93cfd53d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -728,8 +728,10 @@ checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" dependencies = [ "android-tzdata", "iana-time-zone", + "js-sys", "num-traits", "serde", + "wasm-bindgen", "windows-link", ] @@ -950,6 +952,15 @@ dependencies = [ "itertools 0.10.5", ] +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -3288,6 +3299,15 @@ dependencies = [ "serde", ] +[[package]] +name = "rolling-file" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8395b4f860856b740f20a296ea2cd4d823e81a2658cf05ef61be22916026a906" +dependencies = [ + "chrono", +] + [[package]] name = "rust_decimal" version = "1.37.2" @@ -3446,6 +3466,7 @@ dependencies = [ "memory-accounting", "metrics", "rcgen", + "rolling-file", "rustls", "rustls-pemfile", "saluki-api", @@ -3461,6 +3482,7 @@ dependencies = [ "tonic", "tower", "tracing", + "tracing-appender", "tracing-subscriber", ] @@ -4631,6 +4653,18 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror 1.0.69", + "time", + "tracing-subscriber", +] + [[package]] name = "tracing-attributes" version = "0.1.30" diff --git a/Cargo.toml b/Cargo.toml index a512c64864..27dd29770e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -179,6 +179,8 @@ fnv = { version = "1", default-features = false } twox-hash = { version = "2", features = ["xxhash64"] } sha3 = { version = "0.10", default-features = false } pyo3 = { version = "0.25", default-features = false } +tracing-appender = { version = "0.2.3", default-features = false } +rolling-file = { version = "0.2.0", default-features = false } [profile.release] lto = "thin" diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index af825c004d..3c3b77ebdb 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -62,6 +62,7 @@ core-foundation,https://github.com/servo/core-foundation-rs,MIT OR Apache-2.0,Th cpufeatures,https://github.com/RustCrypto/utils,MIT OR Apache-2.0,RustCrypto Developers crc32fast,https://github.com/srijs/rust-crc32fast,MIT OR Apache-2.0,"Sam Rijs , Alex Crichton " criterion-plot,https://github.com/bheisler/criterion.rs,MIT OR Apache-2.0,"Jorge Aparicio , Brook Heisler " +crossbeam-channel,https://github.com/crossbeam-rs/crossbeam,MIT OR Apache-2.0,The crossbeam-channel Authors crossbeam-deque,https://github.com/crossbeam-rs/crossbeam,MIT OR Apache-2.0,The crossbeam-deque Authors crossbeam-epoch,https://github.com/crossbeam-rs/crossbeam,MIT OR Apache-2.0,The crossbeam-epoch Authors crossbeam-queue,https://github.com/crossbeam-rs/crossbeam,MIT OR Apache-2.0,The crossbeam-queue Authors @@ -239,6 +240,7 @@ ring,https://github.com/briansmith/ring,Apache-2.0 AND ISC,The ring Authors rkyv,https://github.com/rkyv/rkyv,MIT,David Koloski rmp,https://github.com/3Hren/msgpack-rust,MIT,Evgeny Safronov rmp-serde,https://github.com/3Hren/msgpack-rust,MIT,Evgeny Safronov +rolling-file,https://github.com/Axcient/rolling-file-rs,MIT OR Apache-2.0,Kevin Hoffman rust_decimal,https://github.com/paupino/rust-decimal,MIT,Paul Mason rustc-demangle,https://github.com/rust-lang/rustc-demangle,MIT OR Apache-2.0,Alex Crichton rustc-hash,https://github.com/rust-lang-nursery/rustc-hash,Apache-2.0 OR MIT,The Rust Project Developers @@ -313,6 +315,7 @@ tonic,https://github.com/hyperium/tonic,MIT,Lucio Franco tower-http,https://github.com/tower-rs/tower-http,MIT,Tower Maintainers tracing,https://github.com/tokio-rs/tracing,MIT,"Eliza Weisman , Tokio Contributors " +tracing-appender,https://github.com/tokio-rs/tracing,MIT,"Zeki Sherif , Tokio Contributors " tracing-attributes,https://github.com/tokio-rs/tracing,MIT,"Tokio Contributors , Eliza Weisman , David Barsky " tracing-core,https://github.com/tokio-rs/tracing,MIT,Tokio Contributors tracing-log,https://github.com/tokio-rs/tracing,MIT,Tokio Contributors diff --git a/bin/agent-data-plane/src/cli/run.rs b/bin/agent-data-plane/src/cli/run.rs index 086bd04d4f..8da1a44014 100644 --- a/bin/agent-data-plane/src/cli/run.rs +++ b/bin/agent-data-plane/src/cli/run.rs @@ -1,4 +1,7 @@ -use std::time::{Duration, Instant}; +use std::{ + path::PathBuf, + time::{Duration, Instant}, +}; use memory_accounting::{ComponentBounds, ComponentRegistry}; use saluki_app::prelude::*; @@ -34,12 +37,7 @@ pub async fn run(started: Instant, run_config: RunConfig) -> Result<(), GenericE ); // Load our configuration and create all high-level primitives (health registry, component registry, environment // provider, etc) that are needed to build the topology. - let configuration = ConfigurationLoader::default() - .try_from_yaml(&run_config.config) - .from_environment("DD")? - .with_default_secrets_resolution() - .await? - .into_generic()?; + let configuration = load_configuration(run_config.config).await?; // Set up all of the building blocks for building our topologies and launching internal processes. let component_registry = ComponentRegistry::default(); @@ -239,3 +237,14 @@ fn write_sizing_guide(bounds: ComponentBounds) -> Result<(), GenericError> { Ok(()) } + +pub async fn load_configuration(config_path: PathBuf) -> Result { + let configuration = ConfigurationLoader::default() + .try_from_yaml(config_path) + .from_environment("DD")? + .with_default_secrets_resolution() + .await? + .into_generic()?; + + Ok(configuration) +} diff --git a/bin/agent-data-plane/src/internal/remote_agent.rs b/bin/agent-data-plane/src/internal/remote_agent.rs index 312fe15fe0..497d5d0ade 100644 --- a/bin/agent-data-plane/src/internal/remote_agent.rs +++ b/bin/agent-data-plane/src/internal/remote_agent.rs @@ -11,6 +11,7 @@ use http::{Request, Uri}; use http_body_util::BodyExt; use rand::{rng, Rng}; use rand_distr::Alphanumeric; +use saluki_app::logging::LoggingConfiguration; use saluki_common::task::spawn_traced_named; use saluki_config::GenericConfiguration; use saluki_core::state::reflector::Reflector; @@ -45,6 +46,7 @@ pub struct RemoteAgentHelperConfiguration { client: RemoteAgentClient, internal_metrics: Reflector, prometheus_listen_addr: Option, + logging_config: LoggingConfiguration, } impl RemoteAgentHelperConfiguration { @@ -59,6 +61,7 @@ impl RemoteAgentHelperConfiguration { .replace("_", "-") .to_lowercase(); let client = RemoteAgentClient::from_configuration(config).await?; + let logging_config = LoggingConfiguration::try_from_config(config)?; Ok(Self { id: format!("{}-{}", formatted_full_name, Uuid::now_v7()), @@ -67,6 +70,7 @@ impl RemoteAgentHelperConfiguration { client, internal_metrics: get_shared_metrics_state().await, prometheus_listen_addr, + logging_config, }) } @@ -80,6 +84,7 @@ impl RemoteAgentHelperConfiguration { started: Utc::now(), internal_metrics: self.internal_metrics.clone(), prometheus_listen_addr: self.prometheus_listen_addr, + logging_config: self.logging_config, }; let service = RemoteAgentServer::new(service_impl); @@ -125,6 +130,7 @@ pub struct RemoteAgentImpl { started: DateTime, internal_metrics: Reflector, prometheus_listen_addr: Option, + logging_config: LoggingConfiguration, } impl RemoteAgentImpl { @@ -201,9 +207,25 @@ impl RemoteAgent for RemoteAgentImpl { async fn get_flare_files( &self, _request: tonic::Request, ) -> Result, tonic::Status> { - let response = GetFlareFilesResponse { - files: HashMap::default(), - }; + let mut files = HashMap::new(); + + if let Some(log_file_name) = self.logging_config.log_file.file_name() { + match tokio::fs::read(&self.logging_config.log_file).await { + Ok(content) => { + files.insert(log_file_name.to_string_lossy().to_string(), content); + } + Err(e) => { + debug!( + "Failed to read {} log file for flare: {}", + self.logging_config.log_file.display(), + e + ); + } + } + } + + let response = GetFlareFilesResponse { files }; + Ok(tonic::Response::new(response)) } diff --git a/bin/agent-data-plane/src/main.rs b/bin/agent-data-plane/src/main.rs index 2cda9221d8..91908bb556 100644 --- a/bin/agent-data-plane/src/main.rs +++ b/bin/agent-data-plane/src/main.rs @@ -5,10 +5,11 @@ #![deny(warnings)] #![deny(missing_docs)] +use std::path::PathBuf; use std::time::Instant; use clap::Parser as _; -use saluki_app::prelude::*; +use saluki_app::{logging::LoggingConfiguration, prelude::*}; use tracing::{error, info}; mod components; @@ -40,9 +41,23 @@ async fn main() { let started = Instant::now(); let cli = Cli::parse(); - if let Err(e) = initialize_dynamic_logging(None).await { + let configuration = cli::run::load_configuration(PathBuf::from("/etc/datadog-agent/datadog.yaml")) + .await + .unwrap_or_else(|e| { + fatal_and_exit(format!("failed to load configuration: {}", e)); + unreachable!() + }); + let logging_config = LoggingConfiguration::try_from_config(&configuration) + .unwrap_or_else(|e| { + fatal_and_exit(format!("failed to load logging configuration: {}", e)); + unreachable!() + }) + .with_reload(true); + + let _guard = initialize_dynamic_logging(&logging_config).await.unwrap_or_else(|e| { fatal_and_exit(format!("failed to initialize logging: {}", e)); - } + unreachable!() + }); if let Err(e) = initialize_metrics("adp").await { fatal_and_exit(format!("failed to initialize metrics: {}", e)); diff --git a/bin/checks-agent/src/main.rs b/bin/checks-agent/src/main.rs index c99e9d5b14..071b8c1867 100644 --- a/bin/checks-agent/src/main.rs +++ b/bin/checks-agent/src/main.rs @@ -1,7 +1,7 @@ use std::time::{Duration, Instant}; use memory_accounting::ComponentRegistry; -use saluki_app::{api::APIBuilder, metrics::emit_startup_metrics, prelude::*}; +use saluki_app::{api::APIBuilder, logging::LoggingConfiguration, metrics::emit_startup_metrics, prelude::*}; use saluki_components::{ destinations::{DatadogMetricsConfiguration, PrometheusConfiguration}, sources::{ChecksConfiguration, InternalMetricsConfiguration}, @@ -38,12 +38,12 @@ static ALLOC: memory_accounting::allocator::TrackingAllocator handler, - Err(e) => { + let _guard = initialize_dynamic_logging(&LoggingConfiguration::default().with_reload(true)) + .await + .unwrap_or_else(|e| { fatal_and_exit(format!("failed to initialize logging: {}", e)); - } - }; + unreachable!() // This will never be reached since fatal_and_exit exits + }); if let Err(e) = initialize_metrics("checks-agent").await { fatal_and_exit(format!("failed to initialize metrics: {}", e)); diff --git a/bin/correctness/airlock/src/main.rs b/bin/correctness/airlock/src/main.rs index fcb0ff235f..a442e2a500 100644 --- a/bin/correctness/airlock/src/main.rs +++ b/bin/correctness/airlock/src/main.rs @@ -1,6 +1,7 @@ #![allow(dead_code)] use clap::Parser as _; +use saluki_app::logging::LoggingConfiguration; use saluki_app::prelude::*; use saluki_error::{generic_error, GenericError}; use tracing::{error, info}; @@ -15,7 +16,7 @@ use self::driver::{Driver, DriverConfig}; async fn main() { let cli = Cli::parse(); - if let Err(e) = initialize_logging(Some(cli.log_level())) { + if let Err(e) = initialize_logging(&LoggingConfiguration::default().with_default_level(cli.log_level())) { fatal_and_exit(format!("failed to initialize logging: {}", e)); } diff --git a/bin/correctness/ground-truth/src/config.rs b/bin/correctness/ground-truth/src/config.rs index 70cba4cd24..919fde71a9 100644 --- a/bin/correctness/ground-truth/src/config.rs +++ b/bin/correctness/ground-truth/src/config.rs @@ -119,6 +119,7 @@ pub struct Cli { impl Cli { /// Gets the configured log level based on the user-supplied verbosity level. + #[allow(unused)] pub fn log_level(&self) -> LevelFilter { match self.verbose { 0 => LevelFilter::INFO, diff --git a/bin/correctness/ground-truth/src/main.rs b/bin/correctness/ground-truth/src/main.rs index 6f132b6ef7..70aa60b0c7 100644 --- a/bin/correctness/ground-truth/src/main.rs +++ b/bin/correctness/ground-truth/src/main.rs @@ -4,6 +4,7 @@ #![deny(missing_docs)] use clap::Parser as _; +use saluki_app::logging::LoggingConfiguration; use saluki_app::prelude::*; use saluki_error::{ErrorContext as _, GenericError}; use tracing::{error, info}; @@ -22,7 +23,7 @@ mod sync; async fn main() { let cli = Cli::parse(); - if let Err(e) = initialize_logging(Some(cli.log_level())) { + if let Err(e) = initialize_logging(&LoggingConfiguration::default()) { fatal_and_exit(format!("failed to initialize logging: {}", e)); } diff --git a/bin/correctness/metrics-intake/src/main.rs b/bin/correctness/metrics-intake/src/main.rs index dc17eb5bb7..d52c8a1e4f 100644 --- a/bin/correctness/metrics-intake/src/main.rs +++ b/bin/correctness/metrics-intake/src/main.rs @@ -8,7 +8,7 @@ use axum::{ routing::{get, post}, Router, }; -use saluki_app::prelude::*; +use saluki_app::{logging::LoggingConfiguration, prelude::*}; use saluki_error::GenericError; use tokio::{ net::TcpListener, @@ -27,7 +27,7 @@ use self::state::*; #[tokio::main] async fn main() { - if let Err(e) = initialize_logging(None) { + if let Err(e) = initialize_logging(&LoggingConfiguration::default()) { fatal_and_exit(format!("failed to initialize logging: {}", e)); } diff --git a/bin/correctness/millstone/src/main.rs b/bin/correctness/millstone/src/main.rs index b1f137bf06..9c3bfa63b3 100644 --- a/bin/correctness/millstone/src/main.rs +++ b/bin/correctness/millstone/src/main.rs @@ -4,6 +4,7 @@ #![deny(warnings)] #![deny(missing_docs)] +use saluki_app::logging::LoggingConfiguration; use saluki_app::prelude::*; use saluki_error::GenericError; use tracing::{error, info}; @@ -19,7 +20,7 @@ use self::driver::Driver; mod target; fn main() { - if let Err(e) = initialize_logging(None) { + if let Err(e) = initialize_logging(&LoggingConfiguration::default()) { fatal_and_exit(format!("failed to initialize logging: {}", e)); } diff --git a/lib/saluki-app/Cargo.toml b/lib/saluki-app/Cargo.toml index a9256a5996..ca45dbe0ba 100644 --- a/lib/saluki-app/Cargo.toml +++ b/lib/saluki-app/Cargo.toml @@ -25,6 +25,7 @@ iana-time-zone = { workspace = true, optional = true } memory-accounting = { workspace = true, optional = true } metrics = { workspace = true, optional = true } rcgen = { workspace = true, features = ["crypto", "aws_lc_rs", "pem"] } +rolling-file = { workspace = true } rustls = { workspace = true, features = ["tls12"] } rustls-pemfile = { workspace = true, features = ["std"] } saluki-api = { workspace = true, optional = true } @@ -40,4 +41,5 @@ tokio = { workspace = true, features = ["macros", "sync"], optional = true } tonic = { workspace = true, features = ["router", "transport"] } tower = { workspace = true, features = ["util"], optional = true } tracing = { workspace = true, optional = true } +tracing-appender = { workspace = true } tracing-subscriber = { workspace = true, features = ["ansi", "env-filter", "fmt", "json", "local-time", "registry", "std", "tracing-log"], optional = true } diff --git a/lib/saluki-app/src/logging.rs b/lib/saluki-app/src/logging.rs index ab9be555df..74d910cc13 100644 --- a/lib/saluki-app/src/logging.rs +++ b/lib/saluki-app/src/logging.rs @@ -10,6 +10,7 @@ use std::{ fmt, + path::PathBuf, str::FromStr as _, sync::{Arc, Mutex, OnceLock}, time::Duration, @@ -20,6 +21,7 @@ use chrono::{ Utc, }; use chrono_tz::Tz; +use rolling_file::{BasicRollingFileAppender, RollingConditionBasic}; use saluki_api::{ extract::{Query, State}, response::IntoResponse, @@ -27,9 +29,12 @@ use saluki_api::{ APIHandler, StatusCode, }; use saluki_common::task::spawn_traced_named; +use saluki_config::GenericConfiguration; +use saluki_error::{ErrorContext as _, GenericError}; use serde::Deserialize; use tokio::{select, sync::mpsc, time::sleep}; use tracing::{error, field, info, level_filters::LevelFilter, Event, Subscriber}; +use tracing_appender::non_blocking::{NonBlocking, WorkerGuard}; use tracing_subscriber::{ field::VisitOutput, fmt::{format::Writer, FmtContext, FormatEvent, FormatFields}, @@ -50,30 +55,131 @@ pub fn fatal_and_exit(message: String) { std::process::exit(1); } +#[cfg(target_os = "linux")] +/// The default log file path for ADP on Linux. +pub const DEFAULT_ADP_LOG_FILE: &str = "/var/log/datadog/adp.log"; + +#[cfg(target_os = "macos")] +/// The default log file path for ADP on non-Linux platforms. +pub const DEFAULT_ADP_LOG_FILE: &str = "/opt/datadog-agent/logs/adp.log"; + +#[cfg(target_os = "windows")] +/// The default log file path for ADP on Windows. +pub const DEFAULT_ADP_LOG_FILE: &str = "C:\\ProgramData\\Datadog\\logs\\adp.log"; + +const DEFAULT_LOG_FILE_MAX_SIZE: u64 = 10485760; +const DEFAULT_LOG_FILE_MAX_ROLLS: usize = 1; +const DEFAULT_LOG_LEVEL: &str = "info"; + +/// Configuration for logging. +#[derive(Deserialize, Default, Debug)] +pub struct LoggingConfiguration { + /// Whether to format logs as JSON. + /// + /// Defaults to `false`. + #[serde(default)] + pub log_format_json: bool, + + /// The log level. + /// + /// Defaults to `info`. + #[serde(default = "default_log_level")] + pub log_level: String, + + /// Whether to enable logging to a file. + /// + /// Defaults to `false`. + #[serde(default, rename = "adp_log_file_enabled")] + pub log_file_enabled: bool, + + /// The maximum size of the log file before it is rolled. + /// + /// Defaults to 10MB. + #[serde(default = "default_log_file_max_size")] + pub log_file_max_size: u64, + + /// The maximum number of log files to keep. + /// + /// Defaults to 1. + #[serde(default = "default_log_file_max_rolls")] + pub log_file_max_rolls: usize, + + /// The path to the log file. + /// + /// Defaults to + /// `/var/log/datadog/adp.log` on Linux, + /// `/opt/datadog-agent/logs/adp.log` on macOS, and + /// `C:\\ProgramData\\Datadog\\logs\\adp.log` on Windows. + #[serde(default = "default_log_file", rename = "adp_log_file")] + pub log_file: PathBuf, + + /// Whether to enable dynamic reloading of the log level filtering directives. + /// + /// Defaults to `false`. + #[serde(default, rename = "adp_log_reload_enabled")] + pub reload_enabled: bool, + + #[serde(skip)] + default_level: Option, +} + +const fn default_log_file_max_size() -> u64 { + DEFAULT_LOG_FILE_MAX_SIZE +} + +const fn default_log_file_max_rolls() -> usize { + DEFAULT_LOG_FILE_MAX_ROLLS +} + +fn default_log_level() -> String { + DEFAULT_LOG_LEVEL.to_owned() +} + +/// The default log file path for ADP. +pub fn default_log_file() -> PathBuf { + PathBuf::from(DEFAULT_ADP_LOG_FILE) +} + +impl LoggingConfiguration { + /// Attempts to read logging configuration from the provided configuration. + /// + /// ## Errors + /// + /// If an error occurs during deserialization, an error will be returned. + pub fn try_from_config(config: &GenericConfiguration) -> Result { + let logging_config = config + .as_typed::() + .error_context("Failed to parse logging configuration.")?; + + Ok(logging_config) + } + + /// Sets the log level used for the default directive. + pub fn with_default_level(mut self, level: LevelFilter) -> Self { + self.default_level = Some(level); + self + } + + /// Sets whether to enable dynamic reloading of the log level filtering directives. + pub fn with_reload(mut self, reload: bool) -> Self { + self.reload_enabled = reload; + self + } +} /// Initializes the logging subsystem for `tracing`. /// -/// This function reads the `DD_LOG_LEVEL` environment variable to determine the log level to use. If the environment -/// variable is not set, the default log level is `INFO`. Additionally, it reads the `DD_LOG_FORMAT_JSON` environment -/// variable to determine which output format to use. If it is set to `json` (case insensitive), the logs will be -/// formatted as JSON. If it is set to any other value, or not set at all, the logs will default to a rich, colored, -/// human-readable format. -/// /// # Errors /// /// If the logging subsystem was already initialized, an error will be returned. -pub fn initialize_logging(default_level: Option) -> Result<(), Box> { - initialize_logging_inner(default_level, false) +pub fn initialize_logging( + config: &LoggingConfiguration, +) -> Result, Box> { + initialize_logging_inner(config) } /// Initializes the logging subsystem for `tracing` with the ability to dynamically update the log filtering directives /// at runtime. /// -/// This function reads the `DD_LOG_LEVEL` environment variable to determine the log level to use. If the environment -/// variable is not set, the default log level is `INFO`. Additionally, it reads the `DD_LOG_FORMAT_JSON` environment -/// variable to determine which output format to use. If it is set to `json` (case insensitive), the logs will be -/// formatted as JSON. If it is set to any other value, or not set at all, the logs will default to a rich, colored, -/// human-readable format. -/// /// An API handler can be acquired (via [`acquires_logging_api_handler`]) to install the API routes which allow for /// dynamically controlling the logging level filtering. See [`LoggingAPIHandler`] for more information. /// @@ -81,53 +187,88 @@ pub fn initialize_logging(default_level: Option) -> Result<(), Box< /// /// If the logging subsystem was already initialized, an error will be returned. pub async fn initialize_dynamic_logging( - default_level: Option, -) -> Result<(), Box> { + config: &LoggingConfiguration, +) -> Result, Box> { // We go through this wrapped initialize approach so that we can mark `initialize_dynamic_logging` as `async`, which // ensures we call it in an asynchronous context, thereby all but ensuring we're in a Tokio context when we try to // spawn the background task that handles reloading the filtering layer. - initialize_logging_inner(default_level, true) + initialize_logging_inner(config) } fn initialize_logging_inner( - default_level: Option, with_reload: bool, -) -> Result<(), Box> { - let is_json = std::env::var("DD_LOG_FORMAT_JSON") - .map(|s| s.trim().to_lowercase()) - .map(|s| s == "true" || s == "1") - .unwrap_or(false); - + config: &LoggingConfiguration, +) -> Result, Box> { // Load our level filtering directives from the environment, or fallback to INFO if the environment variable is not // specified. // // We also do a little bit of a dance to get the filter into the right shape for use in the dynamic filter layer. let level_filter = EnvFilter::builder() - .with_default_directive(default_level.unwrap_or(LevelFilter::INFO).into()) - .with_env_var("DD_LOG_LEVEL") - .from_env_lossy(); + .with_default_directive(config.default_level.unwrap_or(LevelFilter::INFO).into()) + .parse_lossy(&config.log_level); let shared_level_filter = Arc::new(level_filter); let (filter_layer, reload_handle) = ReloadLayer::new(into_shared_dyn_filter(Arc::clone(&shared_level_filter))); - if with_reload { + if config.reload_enabled { API_HANDLER .lock() .unwrap() .replace(LoggingAPIHandler::new(shared_level_filter.clone(), reload_handle)); } + let is_json = config.log_format_json; + + let mut worker_guard = None; + if is_json { let json_layer = initialize_tracing_json(); + let maybe_file_layer = if config.log_file_enabled { + let (file_nb, guard, file_level_filter) = setup_file_logging(config)?; + worker_guard = Some(guard); + Some(initialize_tracing_json_with_file(file_nb).with_filter(file_level_filter)) + } else { + None + }; + tracing_subscriber::registry() .with(json_layer.with_filter(filter_layer)) + .with(maybe_file_layer) .try_init()?; } else { let pretty_layer = initialize_tracing_pretty(); + let maybe_file_layer = if config.log_file_enabled { + let (file_nb, guard, file_level_filter) = setup_file_logging(config)?; + worker_guard = Some(guard); + Some(initialize_tracing_pretty_with_file(file_nb).with_filter(file_level_filter)) + } else { + None + }; + tracing_subscriber::registry() .with(pretty_layer.with_filter(filter_layer)) + .with(maybe_file_layer) .try_init()?; } - Ok(()) + Ok(worker_guard) +} + +fn setup_file_logging( + config: &LoggingConfiguration, +) -> Result<(NonBlocking, WorkerGuard, EnvFilter), Box> { + let adp_log_file = &config.log_file; + let log_file_max_size = config.log_file_max_size; + let log_file_max_rolls = config.log_file_max_rolls; + let file_appender = BasicRollingFileAppender::new( + adp_log_file, + RollingConditionBasic::new().max_size(log_file_max_size), + log_file_max_rolls, + )?; + let (file_nb, guard) = tracing_appender::non_blocking(file_appender); + let file_level_filter = EnvFilter::builder() + .with_default_directive(config.default_level.unwrap_or(LevelFilter::INFO).into()) + .parse_lossy(&config.log_level); + + Ok((file_nb, guard, file_level_filter)) } fn initialize_tracing_json() -> impl Layer @@ -149,6 +290,28 @@ where tracing_subscriber::fmt::Layer::new().event_format(AgentLikeFormatter::new()) } +fn initialize_tracing_json_with_file(file_nb: NonBlocking) -> impl Layer +where + S: Subscriber + for<'a> LookupSpan<'a>, +{ + tracing_subscriber::fmt::Layer::new() + .json() + .flatten_event(true) + .with_target(true) + .with_file(true) + .with_line_number(true) + .with_writer(file_nb) +} + +fn initialize_tracing_pretty_with_file(file_nb: NonBlocking) -> impl Layer +where + S: Subscriber + for<'a> LookupSpan<'a>, +{ + tracing_subscriber::fmt::Layer::new() + .event_format(AgentLikeFormatter::new()) + .with_writer(file_nb) +} + /// Acquires the logging API handler. /// /// This function is mutable, and consumes the handler if it's present. This means it should only be called once, and