From 6fb12510082c4ec3c3e8ec0f56f60fa05f091e16 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Mon, 28 Nov 2022 14:50:50 +0100 Subject: [PATCH 01/22] Add structs and functions necessary for log aggregation --- src/logging/framework.rs | 279 +++++++++++++++++++++++++++++++++++++++ src/logging/mod.rs | 2 + src/logging/spec.rs | 193 +++++++++++++++++++++++++++ 3 files changed, 474 insertions(+) create mode 100644 src/logging/framework.rs create mode 100644 src/logging/spec.rs diff --git a/src/logging/framework.rs b/src/logging/framework.rs new file mode 100644 index 000000000..bb7ce9b97 --- /dev/null +++ b/src/logging/framework.rs @@ -0,0 +1,279 @@ +use std::cmp; + +use super::spec::{ContainerLogConfig, LogLevel}; + +pub fn capture_shell_output( + log_dir: &str, + container: &str, + log_config: Option<&ContainerLogConfig>, +) -> String { + let root_log_level = log_config + .and_then(|config| config.root_log_level()) + .unwrap_or_default(); + let console_log_level = cmp::max( + root_log_level, + log_config + .map(|config| config.console.level_threshold.to_owned()) + .unwrap_or_default(), + ); + let file_log_level = cmp::max( + root_log_level, + log_config + .map(|config| config.file.level_threshold.to_owned()) + .unwrap_or_default(), + ); + + let log_file_dir = format!("{log_dir}/{container}"); + + let stdout_redirect = match ( + console_log_level <= LogLevel::INFO, + file_log_level <= LogLevel::INFO, + ) { + (true, true) => format!(" > >(tee {log_file_dir}/container.stdout.log)"), + (true, false) => "".into(), + (false, true) => format!(" > {log_file_dir}/container.stdout.log"), + (false, false) => " > /dev/null".into(), + }; + + let stderr_redirect = match ( + console_log_level <= LogLevel::ERROR, + file_log_level <= LogLevel::ERROR, + ) { + (true, true) => format!(" 2> >(tee {log_file_dir}/container.stderr.log >&2)"), + (true, false) => "".into(), + (false, true) => format!(" 2> {log_file_dir}/container.stderr.log"), + (false, false) => " 2> /dev/null".into(), + }; + + let mut args = Vec::new(); + if file_log_level <= LogLevel::ERROR { + args.push(format!("mkdir --parents {log_file_dir}")); + } + if stdout_redirect.is_empty() && stderr_redirect.is_empty() { + args.push(":".into()); + } else { + args.push(format!("exec{stdout_redirect}{stderr_redirect}")); + } + + args.join(" && ") +} + +pub fn create_log4j_config( + log_dir: &str, + log_file: &str, + max_size_in_mb: i32, + config: &ContainerLogConfig, +) -> String { + let number_of_archived_log_files = 1; + + let loggers = config + .loggers + .iter() + .filter(|(name, _)| name.as_str() != ContainerLogConfig::ROOT_LOGGER) + .map(|(name, logger_config)| { + format!( + "log4j.logger.{name}={level}\n", + name = name.escape_default(), + level = logger_config.level.to_logback_literal(), + ) + }) + .collect::(); + + format!( + r#"log4j.rootLogger={root_log_level}, CONSOLE, FILE + +log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender +log4j.appender.CONSOLE.Threshold={console_log_level_threshold} +log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout +log4j.appender.CONSOLE.layout.ConversionPattern=%d{{ISO8601}} [myid:%X{{myid}}] - %-5p [%t:%C{{1}}@%L] - %m%n + +log4j.appender.FILE=org.apache.log4j.RollingFileAppender +log4j.appender.FILE.Threshold={file_log_level_threshold} +log4j.appender.FILE.File={log_dir}/{log_file} +log4j.appender.FILE.MaxFileSize={max_log_file_size_in_mb}MB +log4j.appender.FILE.MaxBackupIndex={number_of_archived_log_files} +log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout + +{loggers}"#, + max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), + root_log_level = config + .root_log_level() + .unwrap_or_default() + .to_logback_literal(), + console_log_level_threshold = config.console.level_threshold.to_logback_literal(), + file_log_level_threshold = config.file.level_threshold.to_logback_literal(), + ) +} + +pub fn create_logback_config( + log_dir: &str, + log_file: &str, + max_size_in_mb: i32, + config: &ContainerLogConfig, +) -> String { + let number_of_archived_log_files = 1; + + let loggers = config + .loggers + .iter() + .filter(|(name, _)| name.as_str() != ContainerLogConfig::ROOT_LOGGER) + .map(|(name, logger_config)| { + format!( + " \n", + name = name.escape_default(), + level = logger_config.level.to_logback_literal(), + ) + }) + .collect::(); + + format!( + r#" + + + %d{{ISO8601}} [myid:%X{{myid}}] - %-5p [%t:%C{{1}}@%L] - %m%n + + + {console_log_level_threshold} + + + + + {log_dir}/{log_file} + + + + + {file_log_level_threshold} + + + 1 + {number_of_archived_log_files} + {log_dir}/{log_file}.%i + + + {max_log_file_size_in_mb}MB + + + +{loggers} + + + + + +"#, + max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), + root_log_level = config + .root_log_level() + .unwrap_or_default() + .to_logback_literal(), + console_log_level_threshold = config.console.level_threshold.to_logback_literal(), + file_log_level_threshold = config.file.level_threshold.to_logback_literal(), + ) +} + +pub fn create_vector_config( + log_dir: &str, + vector_aggregator_address: &str, + config: &ContainerLogConfig, +) -> String { + let vector_log_level = config.file.level_threshold.to_owned(); + + let vector_log_level_filter_expression = match vector_log_level { + LogLevel::TRACE => "true", + LogLevel::DEBUG => r#".level != "TRACE""#, + LogLevel::INFO => r#"!includes(["TRACE", "DEBUG"], .metadata.level)"#, + LogLevel::WARN => r#"!includes(["TRACE", "DEBUG", "INFO"], .metadata.level)"#, + LogLevel::ERROR => r#"!includes(["TRACE", "DEBUG", "INFO", "WARN"], .metadata.level)"#, + LogLevel::FATAL => "false", + LogLevel::NONE => "false", + }; + + format!( + r#"data_dir = "/stackable/vector/var" + +[log_schema] +host_key = "pod" + +[sources.vector] +type = "internal_logs" + +[sources.files_stdout] +type = "file" +include = ["{log_dir}/*/*.stdout.log"] + +[sources.files_stderr] +type = "file" +include = ["{log_dir}/*/*.stderr.log"] + +[sources.files_log4j] +type = "file" +include = ["{log_dir}/*/*.log4j.xml"] + +[sources.files_log4j.multiline] +mode = "halt_with" +start_pattern = "^" + string!(.message) + "" +parsed_event = parse_xml!(wrapped_xml_event).root.event +.timestamp = to_timestamp!(to_float!(parsed_event.@timestamp) / 1000) +.logger = parsed_event.@logger +.level = parsed_event.@level +.message = parsed_event.message +''' + +[transforms.filtered_logs_vector] +inputs = ["vector"] +type = "filter" +condition = '{vector_log_level_filter_expression}' + +[transforms.extended_logs_vector] +inputs = ["filtered_logs_vector"] +type = "remap" +source = ''' +.container = "vector" +.level = .metadata.level +.logger = .metadata.module_path +if exists(.file) {{ .processed_file = del(.file) }} +del(.metadata) +del(.pid) +del(.source_type) +''' + +[transforms.extended_logs_files] +inputs = ["processed_files_*"] +type = "remap" +source = ''' +. |= parse_regex!(.file, r'^{log_dir}/(?P.*?)/(?P.*?)$') +del(.source_type) +''' + +[sinks.aggregator] +inputs = ["extended_logs_*"] +type = "vector" +address = "{vector_aggregator_address}" +"# + ) +} diff --git a/src/logging/mod.rs b/src/logging/mod.rs index 99f197f7b..3db01edc9 100644 --- a/src/logging/mod.rs +++ b/src/logging/mod.rs @@ -2,7 +2,9 @@ use tracing; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry}; pub mod controller; +pub mod framework; mod k8s_events; +pub mod spec; #[derive(Debug, Clone, clap::ValueEnum, PartialEq, Eq)] pub enum TracingTarget { diff --git a/src/logging/spec.rs b/src/logging/spec.rs new file mode 100644 index 000000000..8ad7410af --- /dev/null +++ b/src/logging/spec.rs @@ -0,0 +1,193 @@ +use std::collections::BTreeMap; +use std::fmt::Display; + +use crate::config::merge::Atomic; +use crate::config::{fragment::Fragment, merge::Merge}; + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Eq, Fragment, JsonSchema, PartialEq)] +#[fragment(path_overrides(fragment = "crate::config::fragment"))] +#[fragment_attrs( + derive(Clone, Debug, Deserialize, Merge, JsonSchema, PartialEq, Serialize), + merge(path_overrides(merge = "crate::config::merge")), + serde( + bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'de>",), + rename_all = "camelCase", + ) +)] +pub struct Logging +where + T: Clone + Display + Ord, +{ + pub enable_vector_agent: bool, + #[fragment_attrs(serde(default))] + pub containers: BTreeMap, +} + +impl Default for Logging +where + T: Clone + Display + Ord, +{ + fn default() -> Self { + Self { + enable_vector_agent: Default::default(), + containers: Default::default(), + } + } +} + +impl Default for LoggingFragment +where + T: Clone + Display + Ord, +{ + fn default() -> Self { + Self { + enable_vector_agent: Default::default(), + containers: Default::default(), + } + } +} + +#[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] +#[fragment(path_overrides(fragment = "crate::config::fragment"))] +#[fragment_attrs( + derive( + Clone, + Debug, + Default, + Deserialize, + Merge, + JsonSchema, + PartialEq, + Serialize + ), + merge(path_overrides(merge = "crate::config::merge")), + serde(rename_all = "camelCase") +)] +pub struct ContainerLogConfig { + #[fragment_attrs(serde(default))] + pub loggers: BTreeMap, + #[fragment_attrs(serde(default))] + pub console: AppenderConfig, + #[fragment_attrs(serde(default))] + pub file: AppenderConfig, +} + +impl ContainerLogConfig { + pub const ROOT_LOGGER: &'static str = "ROOT"; + + pub fn root_log_level(&self) -> Option { + self.loggers + .get(Self::ROOT_LOGGER) + .map(|root| root.level.to_owned()) + } +} + +#[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] +#[fragment(path_overrides(fragment = "crate::config::fragment"))] +#[fragment_attrs( + derive( + Clone, + Debug, + Default, + Deserialize, + Merge, + JsonSchema, + PartialEq, + Serialize + ), + merge(path_overrides(merge = "crate::config::merge")), + serde(rename_all = "camelCase") +)] +pub struct LoggerConfig { + pub level: LogLevel, +} + +#[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] +#[fragment(path_overrides(fragment = "crate::config::fragment"))] +#[fragment_attrs( + derive( + Clone, + Debug, + Default, + Deserialize, + Merge, + JsonSchema, + PartialEq, + Serialize + ), + merge(path_overrides(merge = "crate::config::merge")), + serde(rename_all = "camelCase") +)] +pub struct AppenderConfig { + #[fragment_attrs(serde(default))] + pub level_threshold: LogLevel, +} + +#[derive( + Clone, Copy, Debug, Deserialize, Eq, JsonSchema, Ord, PartialEq, PartialOrd, Serialize, +)] +pub enum LogLevel { + TRACE, + DEBUG, + INFO, + WARN, + ERROR, + FATAL, + NONE, +} + +impl Default for LogLevel { + fn default() -> Self { + LogLevel::INFO + } +} + +impl Atomic for LogLevel {} + +impl LogLevel { + pub fn to_logback_literal(&self) -> String { + match self { + LogLevel::TRACE => "TRACE", + LogLevel::DEBUG => "DEBUG", + LogLevel::INFO => "INFO", + LogLevel::WARN => "WARN", + LogLevel::ERROR => "ERROR", + LogLevel::FATAL => "FATAL", + LogLevel::NONE => "OFF", + } + .into() + } +} + +pub fn default_logging() -> LoggingFragment +where + T: Clone + Display + Ord + strum::IntoEnumIterator, +{ + LoggingFragment { + enable_vector_agent: Some(true), + containers: T::iter() + .map(|container| (container, default_container_log_config())) + .collect(), + } +} + +pub fn default_container_log_config() -> ContainerLogConfigFragment { + ContainerLogConfigFragment { + loggers: [( + ContainerLogConfig::ROOT_LOGGER.into(), + LoggerConfigFragment { + level: Some(LogLevel::INFO), + }, + )] + .into(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + } +} From 72a1ac9ac1be7d1db67d1f4273ea5e482d619090 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Mon, 28 Nov 2022 16:41:23 +0100 Subject: [PATCH 02/22] Add Vector container builder --- src/logging/framework.rs | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index bb7ce9b97..14e76d143 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -1,7 +1,14 @@ use std::cmp; +use k8s_openapi::api::core::v1::Container; + +use crate::builder::ContainerBuilder; + use super::spec::{ContainerLogConfig, LogLevel}; +const STACKABLE_CONFIG_DIR: &str = "/stackable/config"; +const STACKABLE_LOG_DIR: &str = "/stackable/log"; + pub fn capture_shell_output( log_dir: &str, container: &str, @@ -173,7 +180,6 @@ pub fn create_logback_config( } pub fn create_vector_config( - log_dir: &str, vector_aggregator_address: &str, config: &ContainerLogConfig, ) -> String { @@ -200,15 +206,15 @@ type = "internal_logs" [sources.files_stdout] type = "file" -include = ["{log_dir}/*/*.stdout.log"] +include = ["{STACKABLE_LOG_DIR}/*/*.stdout.log"] [sources.files_stderr] type = "file" -include = ["{log_dir}/*/*.stderr.log"] +include = ["{STACKABLE_LOG_DIR}/*/*.stderr.log"] [sources.files_log4j] type = "file" -include = ["{log_dir}/*/*.log4j.xml"] +include = ["{STACKABLE_LOG_DIR}/*/*.log4j.xml"] [sources.files_log4j.multiline] mode = "halt_with" @@ -266,7 +272,7 @@ del(.source_type) inputs = ["processed_files_*"] type = "remap" source = ''' -. |= parse_regex!(.file, r'^{log_dir}/(?P.*?)/(?P.*?)$') +. |= parse_regex!(.file, r'^{STACKABLE_LOG_DIR}/(?P.*?)/(?P.*?)$') del(.source_type) ''' @@ -277,3 +283,17 @@ address = "{vector_aggregator_address}" "# ) } + +pub fn vector_container(image: &str, config_volume_name: &str, log_volume_name: &str) -> Container { + ContainerBuilder::new("vector") + .unwrap() + .image(image) + .command(vec!["/stackable/vector/bin/vector".into()]) + .args(vec![ + "--config".into(), + format!("{STACKABLE_CONFIG_DIR}/vector.toml"), + ]) + .add_volume_mount(config_volume_name, STACKABLE_CONFIG_DIR) + .add_volume_mount(log_volume_name, STACKABLE_LOG_DIR) + .build() +} From 2e30ccdc4e04d367cdc0db69611d910ee0ceaae6 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Mon, 28 Nov 2022 17:23:27 +0100 Subject: [PATCH 03/22] Add Vector config file name as constant --- src/logging/framework.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index 14e76d143..08946f33d 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -9,6 +9,8 @@ use super::spec::{ContainerLogConfig, LogLevel}; const STACKABLE_CONFIG_DIR: &str = "/stackable/config"; const STACKABLE_LOG_DIR: &str = "/stackable/log"; +pub const VECTOR_CONFIG_FILE: &str = "vector.toml"; + pub fn capture_shell_output( log_dir: &str, container: &str, @@ -291,7 +293,7 @@ pub fn vector_container(image: &str, config_volume_name: &str, log_volume_name: .command(vec!["/stackable/vector/bin/vector".into()]) .args(vec![ "--config".into(), - format!("{STACKABLE_CONFIG_DIR}/vector.toml"), + format!("{STACKABLE_CONFIG_DIR}/{VECTOR_CONFIG_FILE}"), ]) .add_volume_mount(config_volume_name, STACKABLE_CONFIG_DIR) .add_volume_mount(log_volume_name, STACKABLE_LOG_DIR) From c9f85149d6f620d424d458cbe992ab415f84e1c8 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Wed, 30 Nov 2022 16:24:17 +0100 Subject: [PATCH 04/22] Add custom container log config --- src/logging/framework.rs | 37 ++-- src/logging/spec.rs | 451 +++++++++++++++++++++++++++++++++++---- 2 files changed, 421 insertions(+), 67 deletions(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index 08946f33d..d090fba67 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -4,7 +4,7 @@ use k8s_openapi::api::core::v1::Container; use crate::builder::ContainerBuilder; -use super::spec::{ContainerLogConfig, LogLevel}; +use super::spec::{AutomaticContainerLogConfig, LogLevel}; const STACKABLE_CONFIG_DIR: &str = "/stackable/config"; const STACKABLE_LOG_DIR: &str = "/stackable/log"; @@ -14,23 +14,11 @@ pub const VECTOR_CONFIG_FILE: &str = "vector.toml"; pub fn capture_shell_output( log_dir: &str, container: &str, - log_config: Option<&ContainerLogConfig>, + log_config: &AutomaticContainerLogConfig, ) -> String { - let root_log_level = log_config - .and_then(|config| config.root_log_level()) - .unwrap_or_default(); - let console_log_level = cmp::max( - root_log_level, - log_config - .map(|config| config.console.level_threshold.to_owned()) - .unwrap_or_default(), - ); - let file_log_level = cmp::max( - root_log_level, - log_config - .map(|config| config.file.level_threshold.to_owned()) - .unwrap_or_default(), - ); + let root_log_level = log_config.root_log_level().unwrap_or_default(); + let console_log_level = cmp::max(root_log_level, log_config.console.level_threshold); + let file_log_level = cmp::max(root_log_level, log_config.file.level_threshold); let log_file_dir = format!("{log_dir}/{container}"); @@ -71,14 +59,14 @@ pub fn create_log4j_config( log_dir: &str, log_file: &str, max_size_in_mb: i32, - config: &ContainerLogConfig, + config: &AutomaticContainerLogConfig, ) -> String { let number_of_archived_log_files = 1; let loggers = config .loggers .iter() - .filter(|(name, _)| name.as_str() != ContainerLogConfig::ROOT_LOGGER) + .filter(|(name, _)| name.as_str() != AutomaticContainerLogConfig::ROOT_LOGGER) .map(|(name, logger_config)| { format!( "log4j.logger.{name}={level}\n", @@ -118,14 +106,14 @@ pub fn create_logback_config( log_dir: &str, log_file: &str, max_size_in_mb: i32, - config: &ContainerLogConfig, + config: &AutomaticContainerLogConfig, ) -> String { let number_of_archived_log_files = 1; let loggers = config .loggers .iter() - .filter(|(name, _)| name.as_str() != ContainerLogConfig::ROOT_LOGGER) + .filter(|(name, _)| name.as_str() != AutomaticContainerLogConfig::ROOT_LOGGER) .map(|(name, logger_config)| { format!( " \n", @@ -183,9 +171,11 @@ pub fn create_logback_config( pub fn create_vector_config( vector_aggregator_address: &str, - config: &ContainerLogConfig, + config: Option<&AutomaticContainerLogConfig>, ) -> String { - let vector_log_level = config.file.level_threshold.to_owned(); + let vector_log_level = config + .map(|config| config.file.level_threshold) + .unwrap_or_default(); let vector_log_level_filter_expression = match vector_log_level { LogLevel::TRACE => "true", @@ -287,6 +277,7 @@ address = "{vector_aggregator_address}" } pub fn vector_container(image: &str, config_volume_name: &str, log_volume_name: &str) -> Container { + // TODO Increase verbosity if root log level is lower than INFO. ContainerBuilder::new("vector") .unwrap() .image(image) diff --git a/src/logging/spec.rs b/src/logging/spec.rs index 8ad7410af..caf5b6707 100644 --- a/src/logging/spec.rs +++ b/src/logging/spec.rs @@ -1,16 +1,29 @@ use std::collections::BTreeMap; use std::fmt::Display; -use crate::config::merge::Atomic; +use crate::config::fragment::{self, FromFragment}; +use crate::config::merge::{self, Atomic}; use crate::config::{fragment::Fragment, merge::Merge}; +use derivative::Derivative; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -#[derive(Clone, Debug, Eq, Fragment, JsonSchema, PartialEq)] +#[derive(Clone, Debug, Derivative, Eq, Fragment, JsonSchema, PartialEq)] +#[derivative(Default(bound = ""))] #[fragment(path_overrides(fragment = "crate::config::fragment"))] #[fragment_attrs( - derive(Clone, Debug, Deserialize, Merge, JsonSchema, PartialEq, Serialize), + derive( + Clone, + Debug, + Derivative, + Deserialize, + JsonSchema, + Merge, + PartialEq, + Serialize + ), + derivative(Default(bound = "")), merge(path_overrides(merge = "crate::config::merge")), serde( bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'de>",), @@ -26,26 +39,60 @@ where pub containers: BTreeMap, } -impl Default for Logging -where - T: Clone + Display + Ord, -{ - fn default() -> Self { - Self { - enable_vector_agent: Default::default(), - containers: Default::default(), - } - } +#[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] +#[fragment(path_overrides(fragment = "crate::config::fragment"))] +#[fragment_attrs( + derive( + Clone, + Debug, + Default, + Deserialize, + JsonSchema, + Merge, + PartialEq, + Serialize + ), + merge(path_overrides(merge = "crate::config::merge")), + serde(rename_all = "camelCase") +)] +pub struct ContainerLogConfig { + #[fragment_attrs(serde(flatten))] + pub choice: Option, } -impl Default for LoggingFragment -where - T: Clone + Display + Ord, -{ - fn default() -> Self { - Self { - enable_vector_agent: Default::default(), - containers: Default::default(), +#[derive(Clone, Debug, Derivative, Eq, JsonSchema, PartialEq)] +#[derivative(Default)] +pub enum ContainerLogConfigChoice { + Custom(CustomContainerLogConfig), + #[derivative(Default)] + Automatic(AutomaticContainerLogConfig), +} + +#[derive(Clone, Debug, Derivative, Deserialize, JsonSchema, Merge, PartialEq, Serialize)] +#[derivative(Default)] +#[merge(path_overrides(merge = "crate::config::merge"))] +#[serde(untagged)] +pub enum ContainerLogConfigChoiceFragment { + Custom(CustomContainerLogConfigFragment), + #[derivative(Default)] + Automatic(AutomaticContainerLogConfigFragment), +} + +impl FromFragment for ContainerLogConfigChoice { + type Fragment = ContainerLogConfigChoiceFragment; + type RequiredFragment = ContainerLogConfigChoiceFragment; + + fn from_fragment( + fragment: Self::Fragment, + validator: fragment::Validator, + ) -> Result { + match fragment { + Self::Fragment::Custom(fragment) => Ok(Self::Custom(FromFragment::from_fragment( + fragment, validator, + )?)), + Self::Fragment::Automatic(fragment) => Ok(Self::Automatic( + FromFragment::from_fragment(fragment, validator)?, + )), } } } @@ -58,15 +105,56 @@ where Debug, Default, Deserialize, + JsonSchema, Merge, + PartialEq, + Serialize + ), + merge(path_overrides(merge = "crate::config::merge")), + serde(rename_all = "camelCase") +)] +pub struct CustomContainerLogConfig { + pub custom: ConfigMapLogConfig, +} + +#[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] +#[fragment(path_overrides(fragment = "crate::config::fragment"))] +#[fragment_attrs( + derive( + Clone, + Debug, + Default, + Deserialize, JsonSchema, + Merge, PartialEq, Serialize ), merge(path_overrides(merge = "crate::config::merge")), serde(rename_all = "camelCase") )] -pub struct ContainerLogConfig { +pub struct ConfigMapLogConfig { + #[fragment_attrs(serde(default))] + pub config_map: String, +} + +#[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] +#[fragment(path_overrides(fragment = "crate::config::fragment"))] +#[fragment_attrs( + derive( + Clone, + Debug, + Default, + Deserialize, + JsonSchema, + Merge, + PartialEq, + Serialize + ), + merge(path_overrides(merge = "crate::config::merge")), + serde(rename_all = "camelCase") +)] +pub struct AutomaticContainerLogConfig { #[fragment_attrs(serde(default))] pub loggers: BTreeMap, #[fragment_attrs(serde(default))] @@ -75,7 +163,7 @@ pub struct ContainerLogConfig { pub file: AppenderConfig, } -impl ContainerLogConfig { +impl AutomaticContainerLogConfig { pub const ROOT_LOGGER: &'static str = "ROOT"; pub fn root_log_level(&self) -> Option { @@ -93,9 +181,9 @@ impl ContainerLogConfig { Debug, Default, Deserialize, - Merge, JsonSchema, PartialEq, + Merge, Serialize ), merge(path_overrides(merge = "crate::config::merge")), @@ -113,8 +201,8 @@ pub struct LoggerConfig { Debug, Default, Deserialize, - Merge, JsonSchema, + Merge, PartialEq, Serialize ), @@ -127,11 +215,23 @@ pub struct AppenderConfig { } #[derive( - Clone, Copy, Debug, Deserialize, Eq, JsonSchema, Ord, PartialEq, PartialOrd, Serialize, + Clone, + Copy, + Debug, + Derivative, + Deserialize, + Eq, + JsonSchema, + Ord, + PartialEq, + PartialOrd, + Serialize, )] +#[derivative(Default)] pub enum LogLevel { TRACE, DEBUG, + #[derivative(Default)] INFO, WARN, ERROR, @@ -139,12 +239,6 @@ pub enum LogLevel { NONE, } -impl Default for LogLevel { - fn default() -> Self { - LogLevel::INFO - } -} - impl Atomic for LogLevel {} impl LogLevel { @@ -176,18 +270,287 @@ where pub fn default_container_log_config() -> ContainerLogConfigFragment { ContainerLogConfigFragment { - loggers: [( - ContainerLogConfig::ROOT_LOGGER.into(), - LoggerConfigFragment { - level: Some(LogLevel::INFO), + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: [( + AutomaticContainerLogConfig::ROOT_LOGGER.into(), + LoggerConfigFragment { + level: Some(LogLevel::INFO), + }, + )] + .into(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + }, + )), + } +} + +#[cfg(test)] +mod tests { + use std::collections::BTreeMap; + + use crate::config::{fragment, merge}; + + use super::{ + AppenderConfig, AppenderConfigFragment, AutomaticContainerLogConfig, + AutomaticContainerLogConfigFragment, ConfigMapLogConfig, ConfigMapLogConfigFragment, + ContainerLogConfig, ContainerLogConfigChoice, ContainerLogConfigChoiceFragment, + ContainerLogConfigFragment, CustomContainerLogConfig, CustomContainerLogConfigFragment, + LogLevel, + }; + + #[test] + fn serialize_container_log_config() { + assert_eq!( + "{\"loggers\":{},\"console\":{\"levelThreshold\":\"INFO\"},\"file\":{\"levelThreshold\":\"WARN\"}}".to_string(), + serde_json::to_string(&ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }, + }, + )), + }) + .unwrap() + ); + + assert_eq!( + "{\"custom\":{\"configMap\":\"configMap\"}}".to_string(), + serde_json::to_string(&ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Custom( + CustomContainerLogConfigFragment { + custom: ConfigMapLogConfigFragment { + config_map: Some("configMap".into()) + } + }, + )), + }) + .unwrap() + ); + } + + #[test] + fn deserialize_container_log_config() { + assert_eq!( + ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }, + }, + )), + }, + serde_json::from_str::( + "{\"loggers\":{},\"console\":{\"levelThreshold\":\"INFO\"},\"file\":{\"levelThreshold\":\"WARN\"}}" + ) + .unwrap() + ); + + assert_eq!( + ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Custom( + CustomContainerLogConfigFragment { + custom: ConfigMapLogConfigFragment { + config_map: Some("configMap".into()) + } + } + )), + }, + serde_json::from_str::( + "{\"custom\":{\"configMap\":\"configMap\"}}" + ) + .unwrap() + ); + + assert_eq!( + ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: None, + }, + file: AppenderConfigFragment { + level_threshold: None, + }, + }, + )), + }, + serde_json::from_str::("{}").unwrap() + ); + + assert_eq!( + ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Custom( + CustomContainerLogConfigFragment { + custom: ConfigMapLogConfigFragment { + config_map: Some("configMap".into()) + } + } + )), + }, + serde_json::from_str::( + "{\"custom\":{\"configMap\":\"configMap\"},\"loggers\":{},\"console\":{\"levelThreshold\":\"INFO\"},\"file\":{\"levelThreshold\":\"WARN\"}}" + ) + .unwrap() + ); + } + + #[test] + fn merge_container_log_config() { + assert_eq!( + ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }, + }, + )), + }, + merge::merge( + ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }, + }, + )), + }, + &ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Custom( + CustomContainerLogConfigFragment { + custom: ConfigMapLogConfigFragment { + config_map: Some("configMap".into()) + } + }, + )), + } + ) + ); + + assert_eq!( + ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }, + }, + )), + }, + merge::merge( + ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: None, + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }, + }, + )), + }, + &ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: None, + }, + }, + )), + } + ) + ); + } + + #[test] + fn validate_automatic_container_log_config() { + assert_eq!( + ContainerLogConfig { + choice: Some(ContainerLogConfigChoice::Automatic( + AutomaticContainerLogConfig { + loggers: BTreeMap::new(), + console: AppenderConfig { + level_threshold: LogLevel::INFO + }, + file: AppenderConfig { + level_threshold: LogLevel::WARN + }, + } + )) + }, + fragment::validate::(ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Automatic( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }, + file: AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }, + }, + )), + }) + .unwrap() + ); + } + + #[test] + fn validate_custom_container_log_config() { + assert_eq!( + ContainerLogConfig { + choice: Some(ContainerLogConfigChoice::Custom(CustomContainerLogConfig { + custom: ConfigMapLogConfig { + config_map: "configMap".into() + } + })) }, - )] - .into(), - console: AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), - }, + fragment::validate::(ContainerLogConfigFragment { + choice: Some(ContainerLogConfigChoiceFragment::Custom( + CustomContainerLogConfigFragment { + custom: ConfigMapLogConfigFragment { + config_map: Some("configMap".into()) + } + }, + )), + }) + .unwrap() + ); } } From 817fd5e1b0436a47c338fa1534e8593031da8729 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Thu, 1 Dec 2022 10:07:09 +0100 Subject: [PATCH 05/22] Use resolved product image for the Vector container --- src/logging/framework.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index d090fba67..110bb1147 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -2,7 +2,7 @@ use std::cmp; use k8s_openapi::api::core::v1::Container; -use crate::builder::ContainerBuilder; +use crate::{builder::ContainerBuilder, commons::product_image_selection::ResolvedProductImage}; use super::spec::{AutomaticContainerLogConfig, LogLevel}; @@ -276,11 +276,15 @@ address = "{vector_aggregator_address}" ) } -pub fn vector_container(image: &str, config_volume_name: &str, log_volume_name: &str) -> Container { +pub fn vector_container( + image: &ResolvedProductImage, + config_volume_name: &str, + log_volume_name: &str, +) -> Container { // TODO Increase verbosity if root log level is lower than INFO. ContainerBuilder::new("vector") .unwrap() - .image(image) + .image_from_product_image(image) .command(vec!["/stackable/vector/bin/vector".into()]) .args(vec![ "--config".into(), From f163e71fb3df0c3b5968a3f9f021219948147523 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Thu, 1 Dec 2022 16:04:40 +0100 Subject: [PATCH 06/22] Make some fields in the logging structure optional --- src/logging/framework.rs | 53 +++++++-- src/logging/spec.rs | 246 ++++++++++++++++++++++++++++----------- 2 files changed, 220 insertions(+), 79 deletions(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index 110bb1147..df8e6b162 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -16,9 +16,23 @@ pub fn capture_shell_output( container: &str, log_config: &AutomaticContainerLogConfig, ) -> String { - let root_log_level = log_config.root_log_level().unwrap_or_default(); - let console_log_level = cmp::max(root_log_level, log_config.console.level_threshold); - let file_log_level = cmp::max(root_log_level, log_config.file.level_threshold); + let root_log_level = log_config.root_log_level(); + let console_log_level = cmp::max( + root_log_level, + log_config + .console + .as_ref() + .and_then(|console| console.level_threshold) + .unwrap_or_default(), + ); + let file_log_level = cmp::max( + root_log_level, + log_config + .file + .as_ref() + .and_then(|file| file.level_threshold) + .unwrap_or_default(), + ); let log_file_dir = format!("{log_dir}/{container}"); @@ -93,12 +107,19 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout {loggers}"#, max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), - root_log_level = config - .root_log_level() + root_log_level = config.root_log_level().to_logback_literal(), + console_log_level_threshold = config + .console + .as_ref() + .and_then(|console| console.level_threshold) + .unwrap_or_default() + .to_logback_literal(), + file_log_level_threshold = config + .file + .as_ref() + .and_then(|file| file.level_threshold) .unwrap_or_default() .to_logback_literal(), - console_log_level_threshold = config.console.level_threshold.to_logback_literal(), - file_log_level_threshold = config.file.level_threshold.to_logback_literal(), ) } @@ -160,12 +181,19 @@ pub fn create_logback_config( "#, max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), - root_log_level = config - .root_log_level() + root_log_level = config.root_log_level().to_logback_literal(), + console_log_level_threshold = config + .console + .as_ref() + .and_then(|console| console.level_threshold) + .unwrap_or_default() + .to_logback_literal(), + file_log_level_threshold = config + .file + .as_ref() + .and_then(|file| file.level_threshold) .unwrap_or_default() .to_logback_literal(), - console_log_level_threshold = config.console.level_threshold.to_logback_literal(), - file_log_level_threshold = config.file.level_threshold.to_logback_literal(), ) } @@ -174,7 +202,8 @@ pub fn create_vector_config( config: Option<&AutomaticContainerLogConfig>, ) -> String { let vector_log_level = config - .map(|config| config.file.level_threshold) + .and_then(|config| config.file.as_ref()) + .and_then(|file| file.level_threshold) .unwrap_or_default(); let vector_log_level_filter_expression = match vector_log_level { diff --git a/src/logging/spec.rs b/src/logging/spec.rs index caf5b6707..3389c2138 100644 --- a/src/logging/spec.rs +++ b/src/logging/spec.rs @@ -2,7 +2,7 @@ use std::collections::BTreeMap; use std::fmt::Display; use crate::config::fragment::{self, FromFragment}; -use crate::config::merge::{self, Atomic}; +use crate::config::merge::Atomic; use crate::config::{fragment::Fragment, merge::Merge}; use derivative::Derivative; @@ -141,35 +141,44 @@ pub struct ConfigMapLogConfig { #[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] #[fragment(path_overrides(fragment = "crate::config::fragment"))] #[fragment_attrs( - derive( - Clone, - Debug, - Default, - Deserialize, - JsonSchema, - Merge, - PartialEq, - Serialize - ), - merge(path_overrides(merge = "crate::config::merge")), + derive(Clone, Debug, Default, Deserialize, JsonSchema, PartialEq, Serialize), serde(rename_all = "camelCase") )] pub struct AutomaticContainerLogConfig { #[fragment_attrs(serde(default))] pub loggers: BTreeMap, - #[fragment_attrs(serde(default))] - pub console: AppenderConfig, - #[fragment_attrs(serde(default))] - pub file: AppenderConfig, + pub console: Option, + pub file: Option, +} + +impl Merge for AutomaticContainerLogConfigFragment { + fn merge(&mut self, defaults: &Self) { + self.loggers.merge(&defaults.loggers); + if let Some(console) = &mut self.console { + if let Some(defaults_console) = &defaults.console { + console.merge(defaults_console); + } + } else { + self.console = defaults.console.clone(); + } + if let Some(file) = &mut self.file { + if let Some(defaults_file) = &defaults.file { + file.merge(defaults_file); + } + } else { + self.file = defaults.file.clone(); + } + } } impl AutomaticContainerLogConfig { pub const ROOT_LOGGER: &'static str = "ROOT"; - pub fn root_log_level(&self) -> Option { + pub fn root_log_level(&self) -> LogLevel { self.loggers .get(Self::ROOT_LOGGER) .map(|root| root.level.to_owned()) + .unwrap_or_default() } } @@ -210,8 +219,7 @@ pub struct LoggerConfig { serde(rename_all = "camelCase") )] pub struct AppenderConfig { - #[fragment_attrs(serde(default))] - pub level_threshold: LogLevel, + pub level_threshold: Option, } #[derive( @@ -279,12 +287,12 @@ pub fn default_container_log_config() -> ContainerLogConfigFragment { }, )] .into(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, + }), }, )), } @@ -312,12 +320,12 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::WARN), - }, + }), }, )), }) @@ -346,12 +354,12 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::WARN), - }, + }), }, )), }, @@ -382,12 +390,8 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { - level_threshold: None, - }, - file: AppenderConfigFragment { - level_threshold: None, - }, + console: None, + file: None, }, )), }, @@ -411,6 +415,114 @@ mod tests { ); } + #[test] + fn merge_automatic_container_log_config_fragment() { + assert_eq!( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: None, + file: None, + }, + merge::merge( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: None, + file: None, + }, + &AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: None, + file: None, + } + ) + ); + assert_eq!( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }), + file: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }), + }, + merge::merge( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }), + file: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }), + }, + &AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: None, + file: None, + } + ) + ); + assert_eq!( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }), + file: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }), + }, + merge::merge( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: None, + file: None, + }, + &AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }), + file: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }), + } + ) + ); + assert_eq!( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }), + file: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::ERROR), + }), + }, + merge::merge( + AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: Some(AppenderConfigFragment { + level_threshold: None, + }), + file: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::ERROR), + }), + }, + &AutomaticContainerLogConfigFragment { + loggers: BTreeMap::new(), + console: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::INFO), + }), + file: Some(AppenderConfigFragment { + level_threshold: Some(LogLevel::WARN), + }), + } + ) + ); + } + #[test] fn merge_container_log_config() { assert_eq!( @@ -418,12 +530,12 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::WARN), - }, + }), }, )), }, @@ -432,12 +544,12 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::WARN), - }, + }), }, )), }, @@ -458,12 +570,12 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::WARN), - }, + }), }, )), }, @@ -472,12 +584,12 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: None, - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::WARN), - }, + }), }, )), }, @@ -485,12 +597,12 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: None, - }, + }), }, )), } @@ -505,12 +617,12 @@ mod tests { choice: Some(ContainerLogConfigChoice::Automatic( AutomaticContainerLogConfig { loggers: BTreeMap::new(), - console: AppenderConfig { - level_threshold: LogLevel::INFO - }, - file: AppenderConfig { - level_threshold: LogLevel::WARN - }, + console: Some(AppenderConfig { + level_threshold: Some(LogLevel::INFO) + }), + file: Some(AppenderConfig { + level_threshold: Some(LogLevel::WARN) + }), } )) }, @@ -518,12 +630,12 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: AppenderConfigFragment { + console: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::INFO), - }, - file: AppenderConfigFragment { + }), + file: Some(AppenderConfigFragment { level_threshold: Some(LogLevel::WARN), - }, + }), }, )), }) From ac141d04bbcf931bf1853a921dd43dc882462f77 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Mon, 5 Dec 2022 11:43:29 +0100 Subject: [PATCH 07/22] Set Vector log level according to the configured root log level --- src/logging/framework.rs | 16 ++++++++++++++-- src/logging/spec.rs | 13 +++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index df8e6b162..4048beba6 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -4,7 +4,9 @@ use k8s_openapi::api::core::v1::Container; use crate::{builder::ContainerBuilder, commons::product_image_selection::ResolvedProductImage}; -use super::spec::{AutomaticContainerLogConfig, LogLevel}; +use super::spec::{ + AutomaticContainerLogConfig, ContainerLogConfig, ContainerLogConfigChoice, LogLevel, +}; const STACKABLE_CONFIG_DIR: &str = "/stackable/config"; const STACKABLE_LOG_DIR: &str = "/stackable/log"; @@ -309,8 +311,17 @@ pub fn vector_container( image: &ResolvedProductImage, config_volume_name: &str, log_volume_name: &str, + log_config: &ContainerLogConfig, ) -> Container { - // TODO Increase verbosity if root log level is lower than INFO. + let log_level = if let ContainerLogConfig { + choice: Some(ContainerLogConfigChoice::Automatic(automatic_log_config)), + } = log_config + { + automatic_log_config.root_log_level() + } else { + LogLevel::INFO + }; + ContainerBuilder::new("vector") .unwrap() .image_from_product_image(image) @@ -319,6 +330,7 @@ pub fn vector_container( "--config".into(), format!("{STACKABLE_CONFIG_DIR}/{VECTOR_CONFIG_FILE}"), ]) + .add_env_var("VECTOR_LOG", log_level.to_vector_literal()) .add_volume_mount(config_volume_name, STACKABLE_CONFIG_DIR) .add_volume_mount(log_volume_name, STACKABLE_LOG_DIR) .build() diff --git a/src/logging/spec.rs b/src/logging/spec.rs index 3389c2138..f022b5e7b 100644 --- a/src/logging/spec.rs +++ b/src/logging/spec.rs @@ -250,6 +250,19 @@ pub enum LogLevel { impl Atomic for LogLevel {} impl LogLevel { + pub fn to_vector_literal(&self) -> String { + match self { + LogLevel::TRACE => "TRACE", + LogLevel::DEBUG => "DEBUG", + LogLevel::INFO => "INFO", + LogLevel::WARN => "WARN", + LogLevel::ERROR => "ERROR", + LogLevel::FATAL => "ERROR", + LogLevel::NONE => "ERROR", + } + .into() + } + pub fn to_logback_literal(&self) -> String { match self { LogLevel::TRACE => "TRACE", From bc77bc11fb2a2efdb53e05543cb187afd4c2c947 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Mon, 5 Dec 2022 12:09:11 +0100 Subject: [PATCH 08/22] Make log config optional for the vector_container function --- src/logging/framework.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index 4048beba6..a9cb8f934 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -311,11 +311,11 @@ pub fn vector_container( image: &ResolvedProductImage, config_volume_name: &str, log_volume_name: &str, - log_config: &ContainerLogConfig, + log_config: Option<&ContainerLogConfig>, ) -> Container { - let log_level = if let ContainerLogConfig { + let log_level = if let Some(ContainerLogConfig { choice: Some(ContainerLogConfigChoice::Automatic(automatic_log_config)), - } = log_config + }) = log_config { automatic_log_config.root_log_level() } else { From 0920ca20ce02e9bfdc02fab031877d1a2d814cdf Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Thu, 8 Dec 2022 14:05:36 +0100 Subject: [PATCH 09/22] Rename "levelThreshold" to "level" --- src/logging/framework.rs | 30 +++++++-------- src/logging/spec.rs | 83 +++++++++++++++++++--------------------- 2 files changed, 54 insertions(+), 59 deletions(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index a9cb8f934..d76c76b1b 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -24,7 +24,7 @@ pub fn capture_shell_output( log_config .console .as_ref() - .and_then(|console| console.level_threshold) + .and_then(|console| console.level) .unwrap_or_default(), ); let file_log_level = cmp::max( @@ -32,7 +32,7 @@ pub fn capture_shell_output( log_config .file .as_ref() - .and_then(|file| file.level_threshold) + .and_then(|file| file.level) .unwrap_or_default(), ); @@ -96,12 +96,12 @@ pub fn create_log4j_config( r#"log4j.rootLogger={root_log_level}, CONSOLE, FILE log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender -log4j.appender.CONSOLE.Threshold={console_log_level_threshold} +log4j.appender.CONSOLE.Threshold={console_log_level} log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout log4j.appender.CONSOLE.layout.ConversionPattern=%d{{ISO8601}} [myid:%X{{myid}}] - %-5p [%t:%C{{1}}@%L] - %m%n log4j.appender.FILE=org.apache.log4j.RollingFileAppender -log4j.appender.FILE.Threshold={file_log_level_threshold} +log4j.appender.FILE.Threshold={file_log_level} log4j.appender.FILE.File={log_dir}/{log_file} log4j.appender.FILE.MaxFileSize={max_log_file_size_in_mb}MB log4j.appender.FILE.MaxBackupIndex={number_of_archived_log_files} @@ -110,16 +110,16 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout {loggers}"#, max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), root_log_level = config.root_log_level().to_logback_literal(), - console_log_level_threshold = config + console_log_level = config .console .as_ref() - .and_then(|console| console.level_threshold) + .and_then(|console| console.level) .unwrap_or_default() .to_logback_literal(), - file_log_level_threshold = config + file_log_level = config .file .as_ref() - .and_then(|file| file.level_threshold) + .and_then(|file| file.level) .unwrap_or_default() .to_logback_literal(), ) @@ -153,7 +153,7 @@ pub fn create_logback_config( %d{{ISO8601}} [myid:%X{{myid}}] - %-5p [%t:%C{{1}}@%L] - %m%n - {console_log_level_threshold} + {console_log_level} @@ -163,7 +163,7 @@ pub fn create_logback_config( - {file_log_level_threshold} + {file_log_level} 1 @@ -184,16 +184,16 @@ pub fn create_logback_config( "#, max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), root_log_level = config.root_log_level().to_logback_literal(), - console_log_level_threshold = config + console_log_level = config .console .as_ref() - .and_then(|console| console.level_threshold) + .and_then(|console| console.level) .unwrap_or_default() .to_logback_literal(), - file_log_level_threshold = config + file_log_level = config .file .as_ref() - .and_then(|file| file.level_threshold) + .and_then(|file| file.level) .unwrap_or_default() .to_logback_literal(), ) @@ -205,7 +205,7 @@ pub fn create_vector_config( ) -> String { let vector_log_level = config .and_then(|config| config.file.as_ref()) - .and_then(|file| file.level_threshold) + .and_then(|file| file.level) .unwrap_or_default(); let vector_log_level_filter_expression = match vector_log_level { diff --git a/src/logging/spec.rs b/src/logging/spec.rs index f022b5e7b..b1c7e5742 100644 --- a/src/logging/spec.rs +++ b/src/logging/spec.rs @@ -219,7 +219,7 @@ pub struct LoggerConfig { serde(rename_all = "camelCase") )] pub struct AppenderConfig { - pub level_threshold: Option, + pub level: Option, } #[derive( @@ -301,10 +301,10 @@ pub fn default_container_log_config() -> ContainerLogConfigFragment { )] .into(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), }, )), @@ -328,16 +328,17 @@ mod tests { #[test] fn serialize_container_log_config() { assert_eq!( - "{\"loggers\":{},\"console\":{\"levelThreshold\":\"INFO\"},\"file\":{\"levelThreshold\":\"WARN\"}}".to_string(), + "{\"loggers\":{},\"console\":{\"level\":\"INFO\"},\"file\":{\"level\":\"WARN\"}}" + .to_string(), serde_json::to_string(&ContainerLogConfigFragment { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, )), @@ -368,16 +369,16 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, )), }, serde_json::from_str::( - "{\"loggers\":{},\"console\":{\"levelThreshold\":\"INFO\"},\"file\":{\"levelThreshold\":\"WARN\"}}" + "{\"loggers\":{},\"console\":{\"level\":\"INFO\"},\"file\":{\"level\":\"WARN\"}}" ) .unwrap() ); @@ -422,7 +423,7 @@ mod tests { )), }, serde_json::from_str::( - "{\"custom\":{\"configMap\":\"configMap\"},\"loggers\":{},\"console\":{\"levelThreshold\":\"INFO\"},\"file\":{\"levelThreshold\":\"WARN\"}}" + "{\"custom\":{\"configMap\":\"configMap\"},\"loggers\":{},\"console\":{\"level\":\"INFO\"},\"file\":{\"level\":\"WARN\"}}" ) .unwrap() ); @@ -453,20 +454,20 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, merge::merge( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, &AutomaticContainerLogConfigFragment { @@ -480,10 +481,10 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, merge::merge( @@ -495,10 +496,10 @@ mod tests { &AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), } ) @@ -507,29 +508,27 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::ERROR), + level: Some(LogLevel::ERROR), }), }, merge::merge( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: Some(AppenderConfigFragment { - level_threshold: None, - }), + console: Some(AppenderConfigFragment { level: None }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::ERROR), + level: Some(LogLevel::ERROR), }), }, &AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), } ) @@ -544,10 +543,10 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, )), @@ -558,10 +557,10 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, )), @@ -584,10 +583,10 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, )), @@ -597,11 +596,9 @@ mod tests { choice: Some(ContainerLogConfigChoiceFragment::Automatic( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), - console: Some(AppenderConfigFragment { - level_threshold: None, - }), + console: Some(AppenderConfigFragment { level: None }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, )), @@ -611,11 +608,9 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), - }), - file: Some(AppenderConfigFragment { - level_threshold: None, + level: Some(LogLevel::INFO), }), + file: Some(AppenderConfigFragment { level: None }), }, )), } @@ -631,10 +626,10 @@ mod tests { AutomaticContainerLogConfig { loggers: BTreeMap::new(), console: Some(AppenderConfig { - level_threshold: Some(LogLevel::INFO) + level: Some(LogLevel::INFO) }), file: Some(AppenderConfig { - level_threshold: Some(LogLevel::WARN) + level: Some(LogLevel::WARN) }), } )) @@ -644,10 +639,10 @@ mod tests { AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), console: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::INFO), + level: Some(LogLevel::INFO), }), file: Some(AppenderConfigFragment { - level_threshold: Some(LogLevel::WARN), + level: Some(LogLevel::WARN), }), }, )), From 7a4e2e47ffba0888c126643b52dedb23f17fa606 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Thu, 8 Dec 2022 17:04:24 +0100 Subject: [PATCH 10/22] Document logging modules --- src/logging/framework.rs | 20 +++++++++--- src/logging/spec.rs | 67 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 83 insertions(+), 4 deletions(-) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index d76c76b1b..1fa0c064d 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -1,3 +1,5 @@ +//! Log aggregation framework + use std::cmp; use k8s_openapi::api::core::v1::Container; @@ -8,11 +10,17 @@ use super::spec::{ AutomaticContainerLogConfig, ContainerLogConfig, ContainerLogConfigChoice, LogLevel, }; +/// Config directory used in the Vector log agent container const STACKABLE_CONFIG_DIR: &str = "/stackable/config"; +/// Directory which contains a subdirectory for every container which themselves contain the +/// corresponding log files const STACKABLE_LOG_DIR: &str = "/stackable/log"; +/// File name of the Vector config file pub const VECTOR_CONFIG_FILE: &str = "vector.toml"; +/// Create a Bash command which filters stdout and stderr according to the given log configuration +/// and additionally stores the output in log files pub fn capture_shell_output( log_dir: &str, container: &str, @@ -71,6 +79,7 @@ pub fn capture_shell_output( args.join(" && ") } +/// Create the content of a log4j properties file according to the given log configuration pub fn create_log4j_config( log_dir: &str, log_file: &str, @@ -87,7 +96,7 @@ pub fn create_log4j_config( format!( "log4j.logger.{name}={level}\n", name = name.escape_default(), - level = logger_config.level.to_logback_literal(), + level = logger_config.level.to_log4j_literal(), ) }) .collect::(); @@ -109,22 +118,23 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout {loggers}"#, max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), - root_log_level = config.root_log_level().to_logback_literal(), + root_log_level = config.root_log_level().to_log4j_literal(), console_log_level = config .console .as_ref() .and_then(|console| console.level) .unwrap_or_default() - .to_logback_literal(), + .to_log4j_literal(), file_log_level = config .file .as_ref() .and_then(|file| file.level) .unwrap_or_default() - .to_logback_literal(), + .to_log4j_literal(), ) } +/// Create the content of a logback XML configuration file according to the given log configuration pub fn create_logback_config( log_dir: &str, log_file: &str, @@ -199,6 +209,7 @@ pub fn create_logback_config( ) } +/// Create the content of a Vector configuration file according to the given log configuration pub fn create_vector_config( vector_aggregator_address: &str, config: Option<&AutomaticContainerLogConfig>, @@ -307,6 +318,7 @@ address = "{vector_aggregator_address}" ) } +/// Create the specification of the Vector log agent container pub fn vector_container( image: &ResolvedProductImage, config_volume_name: &str, diff --git a/src/logging/spec.rs b/src/logging/spec.rs index b1c7e5742..44de41e12 100644 --- a/src/logging/spec.rs +++ b/src/logging/spec.rs @@ -1,3 +1,5 @@ +//! Logging structure used within Custom Resource Definitions + use std::collections::BTreeMap; use std::fmt::Display; @@ -9,6 +11,7 @@ use derivative::Derivative; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +/// Logging configuration #[derive(Clone, Debug, Derivative, Eq, Fragment, JsonSchema, PartialEq)] #[derivative(Default(bound = ""))] #[fragment(path_overrides(fragment = "crate::config::fragment"))] @@ -34,11 +37,14 @@ pub struct Logging where T: Clone + Display + Ord, { + /// Wether or not to deploy a container with the Vector log agent pub enable_vector_agent: bool, + /// Log configuration per container #[fragment_attrs(serde(default))] pub containers: BTreeMap, } +/// Log configuration of the container #[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] #[fragment(path_overrides(fragment = "crate::config::fragment"))] #[fragment_attrs( @@ -56,25 +62,34 @@ where serde(rename_all = "camelCase") )] pub struct ContainerLogConfig { + /// Custom or automatic log configuration #[fragment_attrs(serde(flatten))] pub choice: Option, } +/// Custom or automatic log configuration +/// +/// The custom log configuration takes precedence over the automatic one. #[derive(Clone, Debug, Derivative, Eq, JsonSchema, PartialEq)] #[derivative(Default)] pub enum ContainerLogConfigChoice { + /// Custom log configuration provided in a ConfigMap Custom(CustomContainerLogConfig), + /// Automatic log configuration according to the given values #[derivative(Default)] Automatic(AutomaticContainerLogConfig), } +/// Fragment derived from `ContainerLogConfigChoice` #[derive(Clone, Debug, Derivative, Deserialize, JsonSchema, Merge, PartialEq, Serialize)] #[derivative(Default)] #[merge(path_overrides(merge = "crate::config::merge"))] #[serde(untagged)] pub enum ContainerLogConfigChoiceFragment { + /// Custom log configuration provided in a ConfigMap Custom(CustomContainerLogConfigFragment), #[derivative(Default)] + /// Automatic log configuration according to the given values Automatic(AutomaticContainerLogConfigFragment), } @@ -97,6 +112,7 @@ impl FromFragment for ContainerLogConfigChoice { } } +/// Log configuration for a container provided in a ConfigMap #[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] #[fragment(path_overrides(fragment = "crate::config::fragment"))] #[fragment_attrs( @@ -117,6 +133,7 @@ pub struct CustomContainerLogConfig { pub custom: ConfigMapLogConfig, } +/// Log configuration provided in a ConfigMap #[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] #[fragment(path_overrides(fragment = "crate::config::fragment"))] #[fragment_attrs( @@ -134,10 +151,12 @@ pub struct CustomContainerLogConfig { serde(rename_all = "camelCase") )] pub struct ConfigMapLogConfig { + /// ConfigMap containing the log configuration files #[fragment_attrs(serde(default))] pub config_map: String, } +/// Generic log configuration #[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] #[fragment(path_overrides(fragment = "crate::config::fragment"))] #[fragment_attrs( @@ -145,9 +164,12 @@ pub struct ConfigMapLogConfig { serde(rename_all = "camelCase") )] pub struct AutomaticContainerLogConfig { + /// Configuration per logger #[fragment_attrs(serde(default))] pub loggers: BTreeMap, + /// Configuration for the console appender pub console: Option, + /// Configuration for the file appender pub file: Option, } @@ -172,8 +194,10 @@ impl Merge for AutomaticContainerLogConfigFragment { } impl AutomaticContainerLogConfig { + /// Name of the root logger pub const ROOT_LOGGER: &'static str = "ROOT"; + /// Return the log level of the root logger pub fn root_log_level(&self) -> LogLevel { self.loggers .get(Self::ROOT_LOGGER) @@ -182,6 +206,7 @@ impl AutomaticContainerLogConfig { } } +/// Configuration of a logger #[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] #[fragment(path_overrides(fragment = "crate::config::fragment"))] #[fragment_attrs( @@ -199,9 +224,13 @@ impl AutomaticContainerLogConfig { serde(rename_all = "camelCase") )] pub struct LoggerConfig { + /// The log level threshold + /// + /// Log events with a lower log level are discarded. pub level: LogLevel, } +/// Configuration of a log appender #[derive(Clone, Debug, Default, Eq, Fragment, JsonSchema, PartialEq)] #[fragment(path_overrides(fragment = "crate::config::fragment"))] #[fragment_attrs( @@ -219,9 +248,13 @@ pub struct LoggerConfig { serde(rename_all = "camelCase") )] pub struct AppenderConfig { + /// The log level threshold + /// + /// Log events with a lower log level are discarded. pub level: Option, } +/// Log levels #[derive( Clone, Copy, @@ -244,12 +277,14 @@ pub enum LogLevel { WARN, ERROR, FATAL, + /// Turn logging off NONE, } impl Atomic for LogLevel {} impl LogLevel { + /// Convert the log level to a string understood by Vector pub fn to_vector_literal(&self) -> String { match self { LogLevel::TRACE => "TRACE", @@ -263,6 +298,7 @@ impl LogLevel { .into() } + /// Convert the log level to a string understood by logback pub fn to_logback_literal(&self) -> String { match self { LogLevel::TRACE => "TRACE", @@ -275,8 +311,23 @@ impl LogLevel { } .into() } + + /// Convert the log level to a string understood by log4j + pub fn to_log4j_literal(&self) -> String { + match self { + LogLevel::TRACE => "TRACE", + LogLevel::DEBUG => "DEBUG", + LogLevel::INFO => "INFO", + LogLevel::WARN => "WARN", + LogLevel::ERROR => "ERROR", + LogLevel::FATAL => "FATAL", + LogLevel::NONE => "OFF", + } + .into() + } } +/// Create the default logging configuration pub fn default_logging() -> LoggingFragment where T: Clone + Display + Ord + strum::IntoEnumIterator, @@ -289,6 +340,7 @@ where } } +/// Create the default logging configuration for a container pub fn default_container_log_config() -> ContainerLogConfigFragment { ContainerLogConfigFragment { choice: Some(ContainerLogConfigChoiceFragment::Automatic( @@ -327,6 +379,7 @@ mod tests { #[test] fn serialize_container_log_config() { + // automatic configuration assert_eq!( "{\"loggers\":{},\"console\":{\"level\":\"INFO\"},\"file\":{\"level\":\"WARN\"}}" .to_string(), @@ -346,6 +399,7 @@ mod tests { .unwrap() ); + // custom configuration assert_eq!( "{\"custom\":{\"configMap\":\"configMap\"}}".to_string(), serde_json::to_string(&ContainerLogConfigFragment { @@ -363,6 +417,7 @@ mod tests { #[test] fn deserialize_container_log_config() { + // automatic configuration if only automatic configuration is given assert_eq!( ContainerLogConfigFragment { choice: Some(ContainerLogConfigChoiceFragment::Automatic( @@ -383,6 +438,7 @@ mod tests { .unwrap() ); + // custom configuration if only custom configuration is given assert_eq!( ContainerLogConfigFragment { choice: Some(ContainerLogConfigChoiceFragment::Custom( @@ -399,6 +455,7 @@ mod tests { .unwrap() ); + // automatic configuration if no configuration is given assert_eq!( ContainerLogConfigFragment { choice: Some(ContainerLogConfigChoiceFragment::Automatic( @@ -412,6 +469,7 @@ mod tests { serde_json::from_str::("{}").unwrap() ); + // custom configuration if custom and automatic configurations are given assert_eq!( ContainerLogConfigFragment { choice: Some(ContainerLogConfigChoiceFragment::Custom( @@ -431,6 +489,7 @@ mod tests { #[test] fn merge_automatic_container_log_config_fragment() { + // no overriding log level + no default log level -> no log level assert_eq!( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), @@ -450,6 +509,8 @@ mod tests { } ) ); + + // overriding log level + no default log level -> overriding log level assert_eq!( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), @@ -477,6 +538,8 @@ mod tests { } ) ); + + // no overriding log level + default log level -> default log level assert_eq!( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), @@ -504,6 +567,8 @@ mod tests { } ) ); + + // overriding log level + default log level -> overriding log level assert_eq!( AutomaticContainerLogConfigFragment { loggers: BTreeMap::new(), @@ -537,6 +602,7 @@ mod tests { #[test] fn merge_container_log_config() { + // overriding automatic config + default custom config -> overriding automatic config assert_eq!( ContainerLogConfigFragment { choice: Some(ContainerLogConfigChoiceFragment::Automatic( @@ -577,6 +643,7 @@ mod tests { ) ); + // overriding automatic config + default automatic config -> merged automatic config assert_eq!( ContainerLogConfigFragment { choice: Some(ContainerLogConfigChoiceFragment::Automatic( From 29f5113328e2a7a034932afb365b295168c81ea2 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Thu, 8 Dec 2022 17:08:13 +0100 Subject: [PATCH 11/22] Add modules for log aggregation to the changelog --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 31d3b0544..77c4ce4c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,12 @@ All notable changes to this project will be documented in this file. ## [Unreleased] +### Added + +- Modules for log aggregation added ([#517]). + +[#517]: https://github.com/stackabletech/operator-rs/pull/517 + ## [0.28.8] - 2022-12-08 ### Added From 713e97a8bc334c0c32198fb276cc5045600b394a Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Fri, 9 Dec 2022 11:37:41 +0100 Subject: [PATCH 12/22] Add examples to the documentation of the logging module --- src/logging/framework.rs | 243 +++++++++++++++++++++++++++++++++++++++ src/logging/spec.rs | 33 ++++++ 2 files changed, 276 insertions(+) diff --git a/src/logging/framework.rs b/src/logging/framework.rs index 1fa0c064d..9e475c3f0 100644 --- a/src/logging/framework.rs +++ b/src/logging/framework.rs @@ -21,6 +21,50 @@ pub const VECTOR_CONFIG_FILE: &str = "vector.toml"; /// Create a Bash command which filters stdout and stderr according to the given log configuration /// and additionally stores the output in log files +/// +/// # Example +/// +/// ``` +/// use stackable_operator::{ +/// builder::ContainerBuilder, +/// config::fragment, +/// logging, +/// logging::spec::{ +/// ContainerLogConfig, ContainerLogConfigChoice, Logging, +/// }, +/// }; +/// # use stackable_operator::logging::spec::default_logging; +/// # use strum::{Display, EnumIter}; +/// # +/// # #[derive(Clone, Display, Eq, EnumIter, Ord, PartialEq, PartialOrd)] +/// # pub enum Container { +/// # Init, +/// # } +/// # +/// # let logging = fragment::validate::>(default_logging()).unwrap(); +/// +/// const STACKABLE_LOG_DIR: &str = "/stackable/log"; +/// +/// let mut args = Vec::new(); +/// +/// if let Some(ContainerLogConfig { +/// choice: Some(ContainerLogConfigChoice::Automatic(log_config)), +/// }) = logging.containers.get(&Container::Init) +/// { +/// args.push(logging::framework::capture_shell_output( +/// STACKABLE_LOG_DIR, +/// "init", +/// &log_config, +/// )); +/// } +/// args.push("echo Test".into()); +/// +/// let init_container = ContainerBuilder::new("init") +/// .unwrap() +/// .command(vec!["bash".to_string(), "-c".to_string()]) +/// .args(vec![args.join(" && ")]) +/// .build(); +/// ``` pub fn capture_shell_output( log_dir: &str, container: &str, @@ -80,6 +124,56 @@ pub fn capture_shell_output( } /// Create the content of a log4j properties file according to the given log configuration +/// +/// # Example +/// +/// ``` +/// use stackable_operator::{ +/// builder::{ +/// ConfigMapBuilder, +/// meta::ObjectMetaBuilder, +/// }, +/// config::fragment, +/// logging, +/// logging::spec::{ +/// ContainerLogConfig, ContainerLogConfigChoice, Logging, +/// }, +/// }; +/// # use stackable_operator::logging::spec::default_logging; +/// # use strum::{Display, EnumIter}; +/// # +/// # #[derive(Clone, Display, Eq, EnumIter, Ord, PartialEq, PartialOrd)] +/// # pub enum Container { +/// # MyProduct, +/// # } +/// # +/// # let logging = fragment::validate::>(default_logging()).unwrap(); +/// +/// const STACKABLE_LOG_DIR: &str = "/stackable/log"; +/// const LOG4J_CONFIG_FILE: &str = "log4j.properties"; +/// const MY_PRODUCT_LOG_FILE: &str = "my-product.log4j.xml"; +/// const MAX_LOG_FILE_SIZE_IN_MB: i32 = 1000; +/// +/// let mut cm_builder = ConfigMapBuilder::new(); +/// cm_builder.metadata(ObjectMetaBuilder::default().build()); +/// +/// if let Some(ContainerLogConfig { +/// choice: Some(ContainerLogConfigChoice::Automatic(log_config)), +/// }) = logging.containers.get(&Container::MyProduct) +/// { +/// cm_builder.add_data( +/// LOG4J_CONFIG_FILE, +/// logging::framework::create_log4j_config( +/// &format!("{STACKABLE_LOG_DIR}/my-product"), +/// MY_PRODUCT_LOG_FILE, +/// MAX_LOG_FILE_SIZE_IN_MB, +/// log_config, +/// ), +/// ); +/// } +/// +/// cm_builder.build().unwrap(); +/// ``` pub fn create_log4j_config( log_dir: &str, log_file: &str, @@ -135,6 +229,58 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout } /// Create the content of a logback XML configuration file according to the given log configuration +/// +/// # Example +/// +/// ``` +/// use stackable_operator::{ +/// builder::{ +/// ConfigMapBuilder, +/// meta::ObjectMetaBuilder, +/// }, +/// logging, +/// logging::spec::{ +/// ContainerLogConfig, ContainerLogConfigChoice, Logging, +/// }, +/// }; +/// # use stackable_operator::{ +/// # config::fragment, +/// # logging::spec::default_logging, +/// # }; +/// # use strum::{Display, EnumIter}; +/// # +/// # #[derive(Clone, Display, Eq, EnumIter, Ord, PartialEq, PartialOrd)] +/// # pub enum Container { +/// # MyProduct, +/// # } +/// # +/// # let logging = fragment::validate::>(default_logging()).unwrap(); +/// +/// const STACKABLE_LOG_DIR: &str = "/stackable/log"; +/// const LOGBACK_CONFIG_FILE: &str = "logback.xml"; +/// const MY_PRODUCT_LOG_FILE: &str = "my-product.log4j.xml"; +/// const MAX_LOG_FILE_SIZE_IN_MB: i32 = 1000; +/// +/// let mut cm_builder = ConfigMapBuilder::new(); +/// cm_builder.metadata(ObjectMetaBuilder::default().build()); +/// +/// if let Some(ContainerLogConfig { +/// choice: Some(ContainerLogConfigChoice::Automatic(log_config)), +/// }) = logging.containers.get(&Container::MyProduct) +/// { +/// cm_builder.add_data( +/// LOGBACK_CONFIG_FILE, +/// logging::framework::create_logback_config( +/// &format!("{STACKABLE_LOG_DIR}/my-product"), +/// MY_PRODUCT_LOG_FILE, +/// MAX_LOG_FILE_SIZE_IN_MB, +/// log_config, +/// ), +/// ); +/// } +/// +/// cm_builder.build().unwrap(); +/// ``` pub fn create_logback_config( log_dir: &str, log_file: &str, @@ -210,6 +356,58 @@ pub fn create_logback_config( } /// Create the content of a Vector configuration file according to the given log configuration +/// +/// # Example +/// +/// ``` +/// use stackable_operator::{ +/// builder::{ +/// ConfigMapBuilder, +/// meta::ObjectMetaBuilder, +/// }, +/// logging, +/// logging::spec::{ +/// ContainerLogConfig, ContainerLogConfigChoice, Logging, +/// }, +/// }; +/// # use stackable_operator::{ +/// # config::fragment, +/// # logging::spec::default_logging, +/// # }; +/// # use strum::{Display, EnumIter}; +/// # +/// # #[derive(Clone, Display, Eq, EnumIter, Ord, PartialEq, PartialOrd)] +/// # pub enum Container { +/// # Vector, +/// # } +/// # +/// # let logging = fragment::validate::>(default_logging()).unwrap(); +/// # let vector_aggregator_address = "vector-aggregator:6000"; +/// +/// let mut cm_builder = ConfigMapBuilder::new(); +/// cm_builder.metadata(ObjectMetaBuilder::default().build()); +/// +/// let vector_log_config = if let Some(ContainerLogConfig { +/// choice: Some(ContainerLogConfigChoice::Automatic(log_config)), +/// }) = logging.containers.get(&Container::Vector) +/// { +/// Some(log_config) +/// } else { +/// None +/// }; +/// +/// if logging.enable_vector_agent { +/// cm_builder.add_data( +/// logging::framework::VECTOR_CONFIG_FILE, +/// logging::framework::create_vector_config( +/// vector_aggregator_address, +/// vector_log_config, +/// ), +/// ); +/// } +/// +/// cm_builder.build().unwrap(); +/// ``` pub fn create_vector_config( vector_aggregator_address: &str, config: Option<&AutomaticContainerLogConfig>, @@ -319,6 +517,51 @@ address = "{vector_aggregator_address}" } /// Create the specification of the Vector log agent container +/// +/// ``` +/// use stackable_operator::{ +/// builder::{ +/// meta::ObjectMetaBuilder, +/// PodBuilder, +/// }, +/// logging, +/// }; +/// # use stackable_operator::{ +/// # commons::product_image_selection::ResolvedProductImage, +/// # config::fragment, +/// # logging::spec::{default_logging, Logging}, +/// # }; +/// # use strum::{Display, EnumIter}; +/// # +/// # #[derive(Clone, Display, Eq, EnumIter, Ord, PartialEq, PartialOrd)] +/// # pub enum Container { +/// # Vector, +/// # } +/// # +/// # let logging = fragment::validate::>(default_logging()).unwrap(); +/// +/// # let resolved_product_image = ResolvedProductImage { +/// # product_version: "1.0.0".into(), +/// # app_version_label: "1.0.0".into(), +/// # image: "docker.stackable.tech/stackable/my-product:1.0.0-stackable1.0.0".into(), +/// # image_pull_policy: "Always".into(), +/// # pull_secrets: None, +/// # }; +/// +/// let mut pod_builder = PodBuilder::new(); +/// pod_builder.metadata(ObjectMetaBuilder::default().build()); +/// +/// if logging.enable_vector_agent { +/// pod_builder.add_container(logging::framework::vector_container( +/// &resolved_product_image, +/// "config", +/// "log", +/// logging.containers.get(&Container::Vector), +/// )); +/// } +/// +/// pod_builder.build().unwrap(); +/// ``` pub fn vector_container( image: &ResolvedProductImage, config_volume_name: &str, diff --git a/src/logging/spec.rs b/src/logging/spec.rs index 44de41e12..23ce033b2 100644 --- a/src/logging/spec.rs +++ b/src/logging/spec.rs @@ -12,6 +12,39 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; /// Logging configuration +/// +/// The type parameter `T` should be an enum listing all containers: +/// +/// ``` +/// use serde::{Deserialize, Serialize}; +/// use stackable_operator::{ +/// logging, +/// schemars::JsonSchema, +/// }; +/// use strum::{Display, EnumIter}; +/// +/// #[derive( +/// Clone, +/// Debug, +/// Deserialize, +/// Display, +/// Eq, +/// EnumIter, +/// JsonSchema, +/// Ord, +/// PartialEq, +/// PartialOrd, +/// Serialize, +/// )] +/// #[serde(rename_all = "camelCase")] +/// pub enum Container { +/// Init, +/// Product, +/// Vector, +/// } +/// +/// let logging = logging::spec::default_logging::(); +/// ``` #[derive(Clone, Debug, Derivative, Eq, Fragment, JsonSchema, PartialEq)] #[derivative(Default(bound = ""))] #[fragment(path_overrides(fragment = "crate::config::fragment"))] From 88bbc78a66970299fb254f2bd4bc3999f9607fca Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Mon, 12 Dec 2022 13:05:11 +0100 Subject: [PATCH 13/22] Move modules framework and spec into the module product_logging; Expose console conversion pattern as parameter --- src/lib.rs | 1 + src/logging/mod.rs | 2 - src/{logging => product_logging}/framework.rs | 50 +++++++++++-------- src/product_logging/mod.rs | 4 ++ src/{logging => product_logging}/spec.rs | 4 +- 5 files changed, 35 insertions(+), 26 deletions(-) rename src/{logging => product_logging}/framework.rs (92%) create mode 100644 src/product_logging/mod.rs rename src/{logging => product_logging}/spec.rs (99%) diff --git a/src/lib.rs b/src/lib.rs index 2ade4e604..03d2a53a5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,6 +14,7 @@ pub mod memory; pub mod namespace; pub mod pod_utils; pub mod product_config_utils; +pub mod product_logging; pub mod role_utils; pub mod utils; pub mod validation; diff --git a/src/logging/mod.rs b/src/logging/mod.rs index 3db01edc9..99f197f7b 100644 --- a/src/logging/mod.rs +++ b/src/logging/mod.rs @@ -2,9 +2,7 @@ use tracing; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry}; pub mod controller; -pub mod framework; mod k8s_events; -pub mod spec; #[derive(Debug, Clone, clap::ValueEnum, PartialEq, Eq)] pub enum TracingTarget { diff --git a/src/logging/framework.rs b/src/product_logging/framework.rs similarity index 92% rename from src/logging/framework.rs rename to src/product_logging/framework.rs index 9e475c3f0..1bd754bf1 100644 --- a/src/logging/framework.rs +++ b/src/product_logging/framework.rs @@ -28,12 +28,12 @@ pub const VECTOR_CONFIG_FILE: &str = "vector.toml"; /// use stackable_operator::{ /// builder::ContainerBuilder, /// config::fragment, -/// logging, -/// logging::spec::{ +/// product_logging, +/// product_logging::spec::{ /// ContainerLogConfig, ContainerLogConfigChoice, Logging, /// }, /// }; -/// # use stackable_operator::logging::spec::default_logging; +/// # use stackable_operator::product_logging::spec::default_logging; /// # use strum::{Display, EnumIter}; /// # /// # #[derive(Clone, Display, Eq, EnumIter, Ord, PartialEq, PartialOrd)] @@ -51,7 +51,7 @@ pub const VECTOR_CONFIG_FILE: &str = "vector.toml"; /// choice: Some(ContainerLogConfigChoice::Automatic(log_config)), /// }) = logging.containers.get(&Container::Init) /// { -/// args.push(logging::framework::capture_shell_output( +/// args.push(product_logging::framework::capture_shell_output( /// STACKABLE_LOG_DIR, /// "init", /// &log_config, @@ -134,12 +134,12 @@ pub fn capture_shell_output( /// meta::ObjectMetaBuilder, /// }, /// config::fragment, -/// logging, -/// logging::spec::{ +/// product_logging, +/// product_logging::spec::{ /// ContainerLogConfig, ContainerLogConfigChoice, Logging, /// }, /// }; -/// # use stackable_operator::logging::spec::default_logging; +/// # use stackable_operator::product_logging::spec::default_logging; /// # use strum::{Display, EnumIter}; /// # /// # #[derive(Clone, Display, Eq, EnumIter, Ord, PartialEq, PartialOrd)] @@ -153,6 +153,7 @@ pub fn capture_shell_output( /// const LOG4J_CONFIG_FILE: &str = "log4j.properties"; /// const MY_PRODUCT_LOG_FILE: &str = "my-product.log4j.xml"; /// const MAX_LOG_FILE_SIZE_IN_MB: i32 = 1000; +/// const CONSOLE_CONVERSION_PATTERN: &str = "%d{ISO8601} %-5p %m%n"; /// /// let mut cm_builder = ConfigMapBuilder::new(); /// cm_builder.metadata(ObjectMetaBuilder::default().build()); @@ -163,10 +164,11 @@ pub fn capture_shell_output( /// { /// cm_builder.add_data( /// LOG4J_CONFIG_FILE, -/// logging::framework::create_log4j_config( +/// product_logging::framework::create_log4j_config( /// &format!("{STACKABLE_LOG_DIR}/my-product"), /// MY_PRODUCT_LOG_FILE, /// MAX_LOG_FILE_SIZE_IN_MB, +/// CONSOLE_CONVERSION_PATTERN, /// log_config, /// ), /// ); @@ -178,6 +180,7 @@ pub fn create_log4j_config( log_dir: &str, log_file: &str, max_size_in_mb: i32, + console_conversion_pattern: &str, config: &AutomaticContainerLogConfig, ) -> String { let number_of_archived_log_files = 1; @@ -201,7 +204,7 @@ pub fn create_log4j_config( log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender log4j.appender.CONSOLE.Threshold={console_log_level} log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout -log4j.appender.CONSOLE.layout.ConversionPattern=%d{{ISO8601}} [myid:%X{{myid}}] - %-5p [%t:%C{{1}}@%L] - %m%n +log4j.appender.CONSOLE.layout.ConversionPattern={console_conversion_pattern} log4j.appender.FILE=org.apache.log4j.RollingFileAppender log4j.appender.FILE.Threshold={file_log_level} @@ -238,14 +241,14 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// ConfigMapBuilder, /// meta::ObjectMetaBuilder, /// }, -/// logging, -/// logging::spec::{ +/// product_logging, +/// product_logging::spec::{ /// ContainerLogConfig, ContainerLogConfigChoice, Logging, /// }, /// }; /// # use stackable_operator::{ /// # config::fragment, -/// # logging::spec::default_logging, +/// # product_logging::spec::default_logging, /// # }; /// # use strum::{Display, EnumIter}; /// # @@ -260,6 +263,7 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// const LOGBACK_CONFIG_FILE: &str = "logback.xml"; /// const MY_PRODUCT_LOG_FILE: &str = "my-product.log4j.xml"; /// const MAX_LOG_FILE_SIZE_IN_MB: i32 = 1000; +/// const CONSOLE_CONVERSION_PATTERN: &str = "%d{ISO8601} %-5p %m%n"; /// /// let mut cm_builder = ConfigMapBuilder::new(); /// cm_builder.metadata(ObjectMetaBuilder::default().build()); @@ -270,10 +274,11 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// { /// cm_builder.add_data( /// LOGBACK_CONFIG_FILE, -/// logging::framework::create_logback_config( +/// product_logging::framework::create_logback_config( /// &format!("{STACKABLE_LOG_DIR}/my-product"), /// MY_PRODUCT_LOG_FILE, /// MAX_LOG_FILE_SIZE_IN_MB, +/// CONSOLE_CONVERSION_PATTERN, /// log_config, /// ), /// ); @@ -285,6 +290,7 @@ pub fn create_logback_config( log_dir: &str, log_file: &str, max_size_in_mb: i32, + console_conversion_pattern: &str, config: &AutomaticContainerLogConfig, ) -> String { let number_of_archived_log_files = 1; @@ -306,7 +312,7 @@ pub fn create_logback_config( r#" - %d{{ISO8601}} [myid:%X{{myid}}] - %-5p [%t:%C{{1}}@%L] - %m%n + {console_conversion_pattern} {console_log_level} @@ -365,14 +371,14 @@ pub fn create_logback_config( /// ConfigMapBuilder, /// meta::ObjectMetaBuilder, /// }, -/// logging, -/// logging::spec::{ +/// product_logging, +/// product_logging::spec::{ /// ContainerLogConfig, ContainerLogConfigChoice, Logging, /// }, /// }; /// # use stackable_operator::{ /// # config::fragment, -/// # logging::spec::default_logging, +/// # product_logging::spec::default_logging, /// # }; /// # use strum::{Display, EnumIter}; /// # @@ -398,8 +404,8 @@ pub fn create_logback_config( /// /// if logging.enable_vector_agent { /// cm_builder.add_data( -/// logging::framework::VECTOR_CONFIG_FILE, -/// logging::framework::create_vector_config( +/// product_logging::framework::VECTOR_CONFIG_FILE, +/// product_logging::framework::create_vector_config( /// vector_aggregator_address, /// vector_log_config, /// ), @@ -524,12 +530,12 @@ address = "{vector_aggregator_address}" /// meta::ObjectMetaBuilder, /// PodBuilder, /// }, -/// logging, +/// product_logging, /// }; /// # use stackable_operator::{ /// # commons::product_image_selection::ResolvedProductImage, /// # config::fragment, -/// # logging::spec::{default_logging, Logging}, +/// # product_logging::spec::{default_logging, Logging}, /// # }; /// # use strum::{Display, EnumIter}; /// # @@ -552,7 +558,7 @@ address = "{vector_aggregator_address}" /// pod_builder.metadata(ObjectMetaBuilder::default().build()); /// /// if logging.enable_vector_agent { -/// pod_builder.add_container(logging::framework::vector_container( +/// pod_builder.add_container(product_logging::framework::vector_container( /// &resolved_product_image, /// "config", /// "log", diff --git a/src/product_logging/mod.rs b/src/product_logging/mod.rs new file mode 100644 index 000000000..12c9991f1 --- /dev/null +++ b/src/product_logging/mod.rs @@ -0,0 +1,4 @@ +//! Modules for product logging + +pub mod framework; +pub mod spec; diff --git a/src/logging/spec.rs b/src/product_logging/spec.rs similarity index 99% rename from src/logging/spec.rs rename to src/product_logging/spec.rs index 23ce033b2..4e76ad1fa 100644 --- a/src/logging/spec.rs +++ b/src/product_logging/spec.rs @@ -18,7 +18,7 @@ use serde::{Deserialize, Serialize}; /// ``` /// use serde::{Deserialize, Serialize}; /// use stackable_operator::{ -/// logging, +/// product_logging, /// schemars::JsonSchema, /// }; /// use strum::{Display, EnumIter}; @@ -43,7 +43,7 @@ use serde::{Deserialize, Serialize}; /// Vector, /// } /// -/// let logging = logging::spec::default_logging::(); +/// let logging = product_logging::spec::default_logging::(); /// ``` #[derive(Clone, Debug, Derivative, Eq, Fragment, JsonSchema, PartialEq)] #[derivative(Default(bound = ""))] From 56bc7b23aa9162cd498b4c2301c5750e07bbca18 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Mon, 12 Dec 2022 16:48:10 +0100 Subject: [PATCH 14/22] Map log level FATAL to ERROR in logback --- src/product_logging/spec.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/product_logging/spec.rs b/src/product_logging/spec.rs index 4e76ad1fa..203f166f0 100644 --- a/src/product_logging/spec.rs +++ b/src/product_logging/spec.rs @@ -339,7 +339,7 @@ impl LogLevel { LogLevel::INFO => "INFO", LogLevel::WARN => "WARN", LogLevel::ERROR => "ERROR", - LogLevel::FATAL => "FATAL", + LogLevel::FATAL => "ERROR", LogLevel::NONE => "OFF", } .into() From 1ecb6775ff28154b330da3dbb5ab026b2561d574 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Mon, 12 Dec 2022 17:03:49 +0100 Subject: [PATCH 15/22] Fix Vector log levels --- src/product_logging/spec.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/product_logging/spec.rs b/src/product_logging/spec.rs index 203f166f0..61ad743c2 100644 --- a/src/product_logging/spec.rs +++ b/src/product_logging/spec.rs @@ -320,13 +320,13 @@ impl LogLevel { /// Convert the log level to a string understood by Vector pub fn to_vector_literal(&self) -> String { match self { - LogLevel::TRACE => "TRACE", - LogLevel::DEBUG => "DEBUG", - LogLevel::INFO => "INFO", - LogLevel::WARN => "WARN", - LogLevel::ERROR => "ERROR", - LogLevel::FATAL => "ERROR", - LogLevel::NONE => "ERROR", + LogLevel::TRACE => "trace", + LogLevel::DEBUG => "debug", + LogLevel::INFO => "info", + LogLevel::WARN => "warn", + LogLevel::ERROR => "error", + LogLevel::FATAL => "error", + LogLevel::NONE => "off", } .into() } From bdb3930ace66ffd857197e5f9f20299582066584 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Tue, 13 Dec 2022 10:55:01 +0100 Subject: [PATCH 16/22] Fix log file size if less than 2 MB are requested --- src/product_logging/framework.rs | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/src/product_logging/framework.rs b/src/product_logging/framework.rs index 1bd754bf1..f33575579 100644 --- a/src/product_logging/framework.rs +++ b/src/product_logging/framework.rs @@ -125,6 +125,16 @@ pub fn capture_shell_output( /// Create the content of a log4j properties file according to the given log configuration /// +/// # Arguments +/// +/// * `log_dir` - Directory where the log files are stored +/// * `log_file` - Name of the active log file; When the file is rolled over then a number is +/// appended. +/// * `max_size_in_mb` - Maximum size of all log files in MB; This values can be slightly exceeded. +/// The value is set to 2 if the given value is lower. +/// * `console_conversion_pattern` - Logback conversion pattern for the console appender +/// * `config` - The logging configuration for the container +/// /// # Example /// /// ``` @@ -152,7 +162,7 @@ pub fn capture_shell_output( /// const STACKABLE_LOG_DIR: &str = "/stackable/log"; /// const LOG4J_CONFIG_FILE: &str = "log4j.properties"; /// const MY_PRODUCT_LOG_FILE: &str = "my-product.log4j.xml"; -/// const MAX_LOG_FILE_SIZE_IN_MB: i32 = 1000; +/// const MAX_LOG_FILE_SIZE_IN_MB: u32 = 10; /// const CONSOLE_CONVERSION_PATTERN: &str = "%d{ISO8601} %-5p %m%n"; /// /// let mut cm_builder = ConfigMapBuilder::new(); @@ -179,7 +189,7 @@ pub fn capture_shell_output( pub fn create_log4j_config( log_dir: &str, log_file: &str, - max_size_in_mb: i32, + max_size_in_mb: u32, console_conversion_pattern: &str, config: &AutomaticContainerLogConfig, ) -> String { @@ -214,7 +224,7 @@ log4j.appender.FILE.MaxBackupIndex={number_of_archived_log_files} log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout {loggers}"#, - max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), + max_log_file_size_in_mb = cmp::max(1, max_size_in_mb / (1 + number_of_archived_log_files)), root_log_level = config.root_log_level().to_log4j_literal(), console_log_level = config .console @@ -233,6 +243,16 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// Create the content of a logback XML configuration file according to the given log configuration /// +/// # Arguments +/// +/// * `log_dir` - Directory where the log files are stored +/// * `log_file` - Name of the active log file; When the file is rolled over then a number is +/// appended. +/// * `max_size_in_mb` - Maximum size of all log files in MB; This values can be slightly exceeded. +/// The value is set to 2 if the given value is lower. +/// * `console_conversion_pattern` - Logback conversion pattern for the console appender +/// * `config` - The logging configuration for the container +/// /// # Example /// /// ``` @@ -262,7 +282,7 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// const STACKABLE_LOG_DIR: &str = "/stackable/log"; /// const LOGBACK_CONFIG_FILE: &str = "logback.xml"; /// const MY_PRODUCT_LOG_FILE: &str = "my-product.log4j.xml"; -/// const MAX_LOG_FILE_SIZE_IN_MB: i32 = 1000; +/// const MAX_LOG_FILE_SIZE_IN_MB: u32 = 10; /// const CONSOLE_CONVERSION_PATTERN: &str = "%d{ISO8601} %-5p %m%n"; /// /// let mut cm_builder = ConfigMapBuilder::new(); @@ -289,7 +309,7 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout pub fn create_logback_config( log_dir: &str, log_file: &str, - max_size_in_mb: i32, + max_size_in_mb: u32, console_conversion_pattern: &str, config: &AutomaticContainerLogConfig, ) -> String { @@ -344,7 +364,7 @@ pub fn create_logback_config( "#, - max_log_file_size_in_mb = max_size_in_mb / (1 + number_of_archived_log_files), + max_log_file_size_in_mb = cmp::max(1, max_size_in_mb / (1 + number_of_archived_log_files)), root_log_level = config.root_log_level().to_logback_literal(), console_log_level = config .console From b22897a0e6d72c0ea8a56ef80b35a7b094223eef Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Wed, 14 Dec 2022 17:07:29 +0100 Subject: [PATCH 17/22] Change log file size parameter from MB to MiB --- src/product_logging/framework.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/product_logging/framework.rs b/src/product_logging/framework.rs index f33575579..66bdd746f 100644 --- a/src/product_logging/framework.rs +++ b/src/product_logging/framework.rs @@ -130,7 +130,7 @@ pub fn capture_shell_output( /// * `log_dir` - Directory where the log files are stored /// * `log_file` - Name of the active log file; When the file is rolled over then a number is /// appended. -/// * `max_size_in_mb` - Maximum size of all log files in MB; This values can be slightly exceeded. +/// * `max_size_in_mib` - Maximum size of all log files in MiB; This value can be slightly exceeded. /// The value is set to 2 if the given value is lower. /// * `console_conversion_pattern` - Logback conversion pattern for the console appender /// * `config` - The logging configuration for the container @@ -162,7 +162,7 @@ pub fn capture_shell_output( /// const STACKABLE_LOG_DIR: &str = "/stackable/log"; /// const LOG4J_CONFIG_FILE: &str = "log4j.properties"; /// const MY_PRODUCT_LOG_FILE: &str = "my-product.log4j.xml"; -/// const MAX_LOG_FILE_SIZE_IN_MB: u32 = 10; +/// const MAX_LOG_FILE_SIZE_IN_MIB: u32 = 10; /// const CONSOLE_CONVERSION_PATTERN: &str = "%d{ISO8601} %-5p %m%n"; /// /// let mut cm_builder = ConfigMapBuilder::new(); @@ -177,7 +177,7 @@ pub fn capture_shell_output( /// product_logging::framework::create_log4j_config( /// &format!("{STACKABLE_LOG_DIR}/my-product"), /// MY_PRODUCT_LOG_FILE, -/// MAX_LOG_FILE_SIZE_IN_MB, +/// MAX_LOG_FILE_SIZE_IN_MIB, /// CONSOLE_CONVERSION_PATTERN, /// log_config, /// ), @@ -248,7 +248,7 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// * `log_dir` - Directory where the log files are stored /// * `log_file` - Name of the active log file; When the file is rolled over then a number is /// appended. -/// * `max_size_in_mb` - Maximum size of all log files in MB; This values can be slightly exceeded. +/// * `max_size_in_mib` - Maximum size of all log files in MiB; This value can be slightly exceeded. /// The value is set to 2 if the given value is lower. /// * `console_conversion_pattern` - Logback conversion pattern for the console appender /// * `config` - The logging configuration for the container @@ -282,7 +282,7 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// const STACKABLE_LOG_DIR: &str = "/stackable/log"; /// const LOGBACK_CONFIG_FILE: &str = "logback.xml"; /// const MY_PRODUCT_LOG_FILE: &str = "my-product.log4j.xml"; -/// const MAX_LOG_FILE_SIZE_IN_MB: u32 = 10; +/// const MAX_LOG_FILE_SIZE_IN_MIB: u32 = 10; /// const CONSOLE_CONVERSION_PATTERN: &str = "%d{ISO8601} %-5p %m%n"; /// /// let mut cm_builder = ConfigMapBuilder::new(); @@ -297,7 +297,7 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// product_logging::framework::create_logback_config( /// &format!("{STACKABLE_LOG_DIR}/my-product"), /// MY_PRODUCT_LOG_FILE, -/// MAX_LOG_FILE_SIZE_IN_MB, +/// MAX_LOG_FILE_SIZE_IN_MIB, /// CONSOLE_CONVERSION_PATTERN, /// log_config, /// ), From 76ba35cc831d6189a635892f6271e3a43edd14a4 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Thu, 15 Dec 2022 08:52:23 +0100 Subject: [PATCH 18/22] Extend logs with namespace, cluster, role, and role group --- src/product_logging/framework.rs | 50 +++++++++++++++++++++++++------- src/product_logging/spec.rs | 8 +++-- 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/src/product_logging/framework.rs b/src/product_logging/framework.rs index 66bdd746f..c4d9018d1 100644 --- a/src/product_logging/framework.rs +++ b/src/product_logging/framework.rs @@ -2,9 +2,10 @@ use std::cmp; -use k8s_openapi::api::core::v1::Container; - -use crate::{builder::ContainerBuilder, commons::product_image_selection::ResolvedProductImage}; +use crate::{ + builder::ContainerBuilder, commons::product_image_selection::ResolvedProductImage, + k8s_openapi::api::core::v1::Container, kube::Resource, role_utils::RoleGroupRef, +}; use super::spec::{ AutomaticContainerLogConfig, ContainerLogConfig, ContainerLogConfigChoice, LogLevel, @@ -398,7 +399,10 @@ pub fn create_logback_config( /// }; /// # use stackable_operator::{ /// # config::fragment, +/// # k8s_openapi::api::core::v1::Pod, +/// # kube::runtime::reflector::ObjectRef, /// # product_logging::spec::default_logging, +/// # role_utils::RoleGroupRef, /// # }; /// # use strum::{Display, EnumIter}; /// # @@ -409,6 +413,11 @@ pub fn create_logback_config( /// # /// # let logging = fragment::validate::>(default_logging()).unwrap(); /// # let vector_aggregator_address = "vector-aggregator:6000"; +/// # let role_group = RoleGroupRef { +/// # cluster: ObjectRef::::new("test-cluster"), +/// # role: "role".into(), +/// # role_group: "role-group".into(), +/// # }; /// /// let mut cm_builder = ConfigMapBuilder::new(); /// cm_builder.metadata(ObjectMetaBuilder::default().build()); @@ -426,6 +435,7 @@ pub fn create_logback_config( /// cm_builder.add_data( /// product_logging::framework::VECTOR_CONFIG_FILE, /// product_logging::framework::create_vector_config( +/// &role_group, /// vector_aggregator_address, /// vector_log_config, /// ), @@ -434,10 +444,14 @@ pub fn create_logback_config( /// /// cm_builder.build().unwrap(); /// ``` -pub fn create_vector_config( +pub fn create_vector_config( + role_group: &RoleGroupRef, vector_aggregator_address: &str, config: Option<&AutomaticContainerLogConfig>, -) -> String { +) -> String +where + T: Resource, +{ let vector_log_level = config .and_then(|config| config.file.as_ref()) .and_then(|file| file.level) @@ -508,6 +522,14 @@ parsed_event = parse_xml!(wrapped_xml_event).root.event .message = parsed_event.message ''' +[transforms.extended_logs_files] +inputs = ["processed_files_*"] +type = "remap" +source = ''' +. |= parse_regex!(.file, r'^{STACKABLE_LOG_DIR}/(?P.*?)/(?P.*?)$') +del(.source_type) +''' + [transforms.filtered_logs_vector] inputs = ["vector"] type = "filter" @@ -526,19 +548,25 @@ del(.pid) del(.source_type) ''' -[transforms.extended_logs_files] -inputs = ["processed_files_*"] +[transforms.extended_logs] +inputs = ["extended_logs_*"] type = "remap" source = ''' -. |= parse_regex!(.file, r'^{STACKABLE_LOG_DIR}/(?P.*?)/(?P.*?)$') -del(.source_type) +.namespace = "{namespace}" +.cluster = "{cluster_name}" +.role = "{role_name}" +.roleGroup = "{role_group_name}" ''' [sinks.aggregator] -inputs = ["extended_logs_*"] +inputs = ["extended_logs"] type = "vector" address = "{vector_aggregator_address}" -"# +"#, + namespace = role_group.cluster.namespace.clone().unwrap_or_default(), + cluster_name = role_group.cluster.name, + role_name = role_group.role, + role_group_name = role_group.role_group ) } diff --git a/src/product_logging/spec.rs b/src/product_logging/spec.rs index 61ad743c2..652479b0b 100644 --- a/src/product_logging/spec.rs +++ b/src/product_logging/spec.rs @@ -3,9 +3,11 @@ use std::collections::BTreeMap; use std::fmt::Display; -use crate::config::fragment::{self, FromFragment}; -use crate::config::merge::Atomic; -use crate::config::{fragment::Fragment, merge::Merge}; +use crate::config::{ + fragment::{self, Fragment, FromFragment}, + merge::Atomic, + merge::Merge, +}; use derivative::Derivative; use schemars::JsonSchema; From a7872527f54ec4f180d34b131e00b2085232b55e Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Thu, 15 Dec 2022 17:19:07 +0100 Subject: [PATCH 19/22] Extend the documentation for max_size_in_mib --- src/product_logging/framework.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/product_logging/framework.rs b/src/product_logging/framework.rs index c4d9018d1..195a76ba3 100644 --- a/src/product_logging/framework.rs +++ b/src/product_logging/framework.rs @@ -131,8 +131,9 @@ pub fn capture_shell_output( /// * `log_dir` - Directory where the log files are stored /// * `log_file` - Name of the active log file; When the file is rolled over then a number is /// appended. -/// * `max_size_in_mib` - Maximum size of all log files in MiB; This value can be slightly exceeded. -/// The value is set to 2 if the given value is lower. +/// * `max_size_in_mib` - Maximum size of all log files in MiB; This value can be slightly +/// exceeded. The value is set to 2 if the given value is lower (1 MiB for the active log +/// file and 1 MiB for the archived one). /// * `console_conversion_pattern` - Logback conversion pattern for the console appender /// * `config` - The logging configuration for the container /// @@ -249,8 +250,9 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout /// * `log_dir` - Directory where the log files are stored /// * `log_file` - Name of the active log file; When the file is rolled over then a number is /// appended. -/// * `max_size_in_mib` - Maximum size of all log files in MiB; This value can be slightly exceeded. -/// The value is set to 2 if the given value is lower. +/// * `max_size_in_mib` - Maximum size of all log files in MiB; This value can be slightly +/// exceeded. The value is set to 2 if the given value is lower (1 MiB for the active log +/// file and 1 MiB for the archived one). /// * `console_conversion_pattern` - Logback conversion pattern for the console appender /// * `config` - The logging configuration for the container /// From 0840dd9d38109be9ce3c2956d6472457dea56f24 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Fri, 16 Dec 2022 07:56:40 +0100 Subject: [PATCH 20/22] Fix RustDoc warning --- src/product_config_utils.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/product_config_utils.rs b/src/product_config_utils.rs index 1bdf0e39d..ffb404778 100644 --- a/src/product_config_utils.rs +++ b/src/product_config_utils.rs @@ -92,19 +92,16 @@ impl Configuration for Box { } /// Type to sort config properties via kind (files, env, cli), via groups and via roles. -/// HashMap>>> pub type RoleConfigByPropertyKind = HashMap>>>>; /// Type to sort config properties via kind (files, env, cli) and via groups. -/// HashMap>> pub type RoleGroupConfigByPropertyKind = HashMap>>>; /// Type to sort config properties via kind (files, env, cli), via groups and via roles. This /// is the validated output to be used in other operators. The difference to [`RoleConfigByPropertyKind`] /// is that the properties BTreeMap does not contain any options. -/// /// HashMap>>>> pub type ValidatedRoleConfigByPropertyKind = HashMap>>>; From 203f45d1e022fc21b121ecf25ab7289d7b77c7ca Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Fri, 16 Dec 2022 10:57:33 +0100 Subject: [PATCH 21/22] Rename parameters max_size_in_mb to max_size_in_mib --- src/product_logging/framework.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/product_logging/framework.rs b/src/product_logging/framework.rs index 195a76ba3..197092390 100644 --- a/src/product_logging/framework.rs +++ b/src/product_logging/framework.rs @@ -191,7 +191,7 @@ pub fn capture_shell_output( pub fn create_log4j_config( log_dir: &str, log_file: &str, - max_size_in_mb: u32, + max_size_in_mib: u32, console_conversion_pattern: &str, config: &AutomaticContainerLogConfig, ) -> String { @@ -221,12 +221,13 @@ log4j.appender.CONSOLE.layout.ConversionPattern={console_conversion_pattern} log4j.appender.FILE=org.apache.log4j.RollingFileAppender log4j.appender.FILE.Threshold={file_log_level} log4j.appender.FILE.File={log_dir}/{log_file} -log4j.appender.FILE.MaxFileSize={max_log_file_size_in_mb}MB +log4j.appender.FILE.MaxFileSize={max_log_file_size_in_mib}MB log4j.appender.FILE.MaxBackupIndex={number_of_archived_log_files} log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout {loggers}"#, - max_log_file_size_in_mb = cmp::max(1, max_size_in_mb / (1 + number_of_archived_log_files)), + max_log_file_size_in_mib = + cmp::max(1, max_size_in_mib / (1 + number_of_archived_log_files)), root_log_level = config.root_log_level().to_log4j_literal(), console_log_level = config .console @@ -312,7 +313,7 @@ log4j.appender.FILE.layout=org.apache.log4j.xml.XMLLayout pub fn create_logback_config( log_dir: &str, log_file: &str, - max_size_in_mb: u32, + max_size_in_mib: u32, console_conversion_pattern: &str, config: &AutomaticContainerLogConfig, ) -> String { @@ -356,7 +357,7 @@ pub fn create_logback_config( {log_dir}/{log_file}.%i - {max_log_file_size_in_mb}MB + {max_log_file_size_in_mib}MB @@ -367,7 +368,8 @@ pub fn create_logback_config( "#, - max_log_file_size_in_mb = cmp::max(1, max_size_in_mb / (1 + number_of_archived_log_files)), + max_log_file_size_in_mib = + cmp::max(1, max_size_in_mib / (1 + number_of_archived_log_files)), root_log_level = config.root_log_level().to_logback_literal(), console_log_level = config .console From a6e7d9780ee31ab57f8b096ba246b60a4ad76293 Mon Sep 17 00:00:00 2001 From: Siegfried Weber Date: Fri, 16 Dec 2022 11:41:05 +0100 Subject: [PATCH 22/22] Append stack traces to the logging messages --- src/product_logging/framework.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/product_logging/framework.rs b/src/product_logging/framework.rs index 197092390..ef7140756 100644 --- a/src/product_logging/framework.rs +++ b/src/product_logging/framework.rs @@ -523,7 +523,10 @@ parsed_event = parse_xml!(wrapped_xml_event).root.event .timestamp = to_timestamp!(to_float!(parsed_event.@timestamp) / 1000) .logger = parsed_event.@logger .level = parsed_event.@level -.message = parsed_event.message +.message = join!( + filter([parsed_event.message, parsed_event.throwable]) -> |_index, value| {{ + !is_nullish(value) + }}, "\n") ''' [transforms.extended_logs_files]