From d637eea7ead3abb602f07abedf8c4af624f231e6 Mon Sep 17 00:00:00 2001 From: estk Date: Tue, 15 Dec 2020 19:06:29 -0800 Subject: [PATCH] 1.0 Release (#154) * Expose errors as failure::Error (#138) * Issue #129: Drop XML Config support (#137) * Rename file feature (#147) * Rename to config_parsing * Add workflows for devel * Update changelog * Add an init function which takes a RawConfig (#150) * Clippy * Changelog, cleanup * json macro * qualify * typo * Remove unused dir from .gitignore * Init raw on all platforms * Remove log4rs::FormatError::XmlFeatureFlagRequired (#156) * Expand env vars in the path for File and RollingFile appenders (#155) * Reorganize config (#157) * Reorganize config * allow missing docs so tests run * lint abatement * some renames * Use anyhow/thiserror (#159) * Use anyhow/thiserror * Change addtivity to additive to match the actual code (#163) * Errors 1.0 (#160) Co-authored-by: shmapdy * clippy * Standard derives rebase (#175) * Bump serde-value * Alpha 1 version * Update highlight colors to be the same as env_logger (#167) * Custom err handler (#183) * WIP custom err handling * Compiling * Add an method to the public api * re-enable Debug * bump ver * fix: init_raw_config forcing max_log_level to Info (#200) Co-authored-by: braindead * pattern encoder: Set trace to default color, reset formatting after (#186) * pattern encoder: Set trace to default color, reset formatting after * Fix formatting of last commit * lol apparently rustfmt doesn't like commas? idk if this will work * Set trace color to cyan Co-authored-by: Richard M Co-authored-by: shmapdy Co-authored-by: Charles Giguere Co-authored-by: 1c7718e7 Co-authored-by: braindead Co-authored-by: Julia DeMille <8127111+judemille@users.noreply.github.com> --- .github/workflows/main.yml | 2 + .gitignore | 1 - CHANGELOG.md | 10 + Cargo.toml | 27 +- src/append/console.rs | 39 +- src/append/file.rs | 47 +- src/append/mod.rs | 156 ++++++- src/append/rolling_file/mod.rs | 73 ++-- .../rolling_file/policy/compound/mod.rs | 36 +- .../policy/compound/roll/delete.rs | 23 +- .../policy/compound/roll/fixed_window.rs | 42 +- .../rolling_file/policy/compound/roll/mod.rs | 10 +- .../policy/compound/trigger/mod.rs | 10 +- .../policy/compound/trigger/size.rs | 29 +- src/append/rolling_file/policy/mod.rs | 11 +- src/{priv_file.rs => config/file.rs} | 130 ++---- src/config/mod.rs | 86 ++++ src/{file.rs => config/raw.rs} | 224 ++-------- src/{config.rs => config/runtime.rs} | 413 +++++++++--------- src/encode/json.rs | 32 +- src/encode/mod.rs | 45 +- src/encode/pattern/mod.rs | 100 ++--- src/encode/pattern/parser.rs | 6 +- src/encode/writer/ansi.rs | 2 +- src/encode/writer/simple.rs | 2 +- src/filter/mod.rs | 18 +- src/filter/threshold.rs | 21 +- src/lib.rs | 150 +++++-- 28 files changed, 912 insertions(+), 833 deletions(-) rename src/{priv_file.rs => config/file.rs} (60%) create mode 100644 src/config/mod.rs rename src/{file.rs => config/raw.rs} (71%) rename src/{config.rs => config/runtime.rs} (80%) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 06d660a4..49da54c6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -4,9 +4,11 @@ on: push: branches: - master + - devel pull_request: branches: - master + - devel jobs: lint: diff --git a/.gitignore b/.gitignore index 8be72874..a0db182e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ target/ Cargo.lock -!codegen/Cargo.lock .idea/ *.iml .vscode/ diff --git a/CHANGELOG.md b/CHANGELOG.md index b9a0b506..540902ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,10 +2,20 @@ ## [Unreleased] + ### New +* Custom error handling +* Allow parsing of config from string +* Expand env vars in file path of file and RollingFile appenders PR#155 + ### Changed +* Colors changed to match `env_logger` +* Drop XML config support +* Rename feature `file` to `config_parsing` +* Use `thiserror`/`anyhow` for errors + ### Fixed ## [0.13.0] diff --git a/Cargo.toml b/Cargo.toml index 177a8de1..658d1246 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "log4rs" -version = "0.13.0" +version = "1.0.0-alpha-2" authors = ["Steven Fackler ", "Evan Simmons "] description = "A highly configurable multi-output logging implementation for the `log` facade" license = "MIT/Apache-2.0" @@ -10,22 +10,21 @@ keywords = ["log", "logger", "logging", "log4"] edition = '2018' [features] -default = ["all_components", "file", "yaml_format", "gzip"] +default = ["all_components", "config_parsing", "yaml_format"] -file = ["humantime", "serde", "serde_derive", "serde-value", "typemap", "log/serde"] +config_parsing = ["humantime", "serde", "serde-value", "typemap", "log/serde"] yaml_format = ["serde_yaml"] json_format = ["serde_json"] toml_format = ["toml"] -xml_format = ["serde-xml-rs"] console_appender = ["console_writer", "simple_writer", "pattern_encoder"] -file_appender = ["parking_lot", "simple_writer", "pattern_encoder"] -rolling_file_appender = ["parking_lot", "simple_writer", "pattern_encoder"] +file_appender = ["parking_lot", "simple_writer", "pattern_encoder", "regex"] +rolling_file_appender = ["parking_lot", "simple_writer", "pattern_encoder", "regex"] compound_policy = [] delete_roller = [] fixed_window_roller = [] size_trigger = [] -json_encoder = ["serde", "serde_json", "chrono", "log-mdc", "serde_derive", "log/serde", "thread-id"] +json_encoder = ["serde", "serde_json", "chrono", "log-mdc", "log/serde", "thread-id"] pattern_encoder = ["chrono", "log-mdc", "thread-id"] ansi_writer = [] console_writer = ["ansi_writer", "libc", "winapi"] @@ -57,19 +56,21 @@ arc-swap = "0.4" chrono = { version = "0.4", optional = true } flate2 = { version = "1.0", optional = true } fnv = "1.0" -humantime = { version = "1.0", optional = true } +humantime = { version = "2.0", optional = true } log = { version = "0.4.0", features = ["std"] } log-mdc = { version = "0.1", optional = true } -serde = { version = "1.0", optional = true } -serde_derive = { version = "1.0", optional = true } -serde-value = { version = "0.6", optional = true } +serde = { version = "1.0", optional = true, features = ["derive"] } +serde-value = { version = "0.7", optional = true } thread-id = { version = "3.3", optional = true } typemap = { version = "0.3", optional = true } serde_json = { version = "1.0", optional = true } serde_yaml = { version = "0.8.4", optional = true } toml = { version = "0.5", optional = true } -serde-xml-rs = { version = "0.4", optional = true } parking_lot = { version = "0.11.0", optional = true } +regex = { version = "1", optional = true } +thiserror = "1.0.15" +anyhow = "1.0.28" +derivative = "2.1.1" [target.'cfg(windows)'.dependencies] winapi = { version = "0.3", optional = true, features = ["handleapi", "minwindef", "processenv", "winbase", "wincon"] } @@ -80,7 +81,7 @@ libc = { version = "0.2", optional = true } [dev-dependencies] lazy_static = "1.4" streaming-stats = "0.2.3" -humantime = "1.0.0" +humantime = "2.0" tempfile = "3.1.0" [[example]] diff --git a/src/append/console.rs b/src/append/console.rs index 2292e9b7..3877ce33 100644 --- a/src/append/console.rs +++ b/src/append/console.rs @@ -2,19 +2,17 @@ //! //! Requires the `console_appender` feature. +use derivative::Derivative; use log::Record; -#[cfg(feature = "file")] -use serde_derive::Deserialize; use std::{ - error::Error, fmt, io::{self, Write}, }; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; +#[cfg(feature = "config_parsing")] use crate::encode::EncoderConfig; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; use crate::{ append::Append, encode::{ @@ -30,16 +28,16 @@ use crate::{ }; /// The console appender's configuration. -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] #[serde(deny_unknown_fields)] +#[derive(Debug, serde::Deserialize)] pub struct ConsoleAppenderConfig { target: Option, encoder: Option, } -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] +#[derive(Debug, serde::Deserialize)] enum ConfigTarget { #[serde(rename = "stdout")] Stdout, @@ -109,21 +107,16 @@ impl<'a> encode::Write for WriterLock<'a> { /// /// It supports output styling if standard out is a console buffer on Windows /// or is a TTY on Unix. +#[derive(Derivative)] +#[derivative(Debug)] pub struct ConsoleAppender { + #[derivative(Debug = "ignore")] writer: Writer, encoder: Box, } -impl fmt::Debug for ConsoleAppender { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("ConsoleAppender") - .field("encoder", &self.encoder) - .finish() - } -} - impl Append for ConsoleAppender { - fn append(&self, record: &Record) -> Result<(), Box> { + fn append(&self, record: &Record) -> anyhow::Result<()> { let mut writer = self.writer.lock(); self.encoder.encode(&mut writer, record)?; writer.flush()?; @@ -187,6 +180,7 @@ impl ConsoleAppenderBuilder { } /// The stream to log to. +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub enum Target { /// Standard output. Stdout, @@ -208,10 +202,11 @@ pub enum Target { /// encoder: /// kind: pattern /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct ConsoleAppenderDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for ConsoleAppenderDeserializer { type Trait = dyn Append; @@ -221,7 +216,7 @@ impl Deserialize for ConsoleAppenderDeserializer { &self, config: ConsoleAppenderConfig, deserializers: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { let mut appender = ConsoleAppender::builder(); if let Some(target) = config.target { let target = match target { diff --git a/src/append/file.rs b/src/append/file.rs index 6ceae436..2021aa33 100644 --- a/src/append/file.rs +++ b/src/append/file.rs @@ -2,31 +2,29 @@ //! //! Requires the `file_appender` feature. +use derivative::Derivative; use log::Record; use parking_lot::Mutex; -#[cfg(feature = "file")] -use serde_derive::Deserialize; use std::{ - error::Error, - fmt, fs::{self, File, OpenOptions}, io::{self, BufWriter, Write}, path::{Path, PathBuf}, }; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; +#[cfg(feature = "config_parsing")] use crate::encode::EncoderConfig; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; + use crate::{ append::Append, encode::{pattern::PatternEncoder, writer::simple::SimpleWriter, Encode}, }; /// The file appender's configuration. -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] #[serde(deny_unknown_fields)] +#[derive(Clone, Eq, PartialEq, Hash, Debug, Default, serde::Deserialize)] pub struct FileAppenderConfig { path: String, encoder: Option, @@ -34,23 +32,17 @@ pub struct FileAppenderConfig { } /// An appender which logs to a file. +#[derive(Derivative)] +#[derivative(Debug)] pub struct FileAppender { path: PathBuf, + #[derivative(Debug = "ignore")] file: Mutex>>, encoder: Box, } -impl fmt::Debug for FileAppender { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("FileAppender") - .field("file", &self.path) - .field("encoder", &self.encoder) - .finish() - } -} - impl Append for FileAppender { - fn append(&self, record: &Record) -> Result<(), Box> { + fn append(&self, record: &Record) -> anyhow::Result<()> { let mut file = self.file.lock(); self.encoder.encode(&mut *file, record)?; file.flush()?; @@ -92,8 +84,12 @@ impl FileAppenderBuilder { } /// Consumes the `FileAppenderBuilder`, producing a `FileAppender`. + /// The path argument can contain environment variables of the form $ENV{name_here}, + /// where 'name_here' will be the name of the environment variable that + /// will be resolved. Note that if the variable fails to resolve, + /// $ENV{name_here} will NOT be replaced in the path. pub fn build>(self, path: P) -> io::Result { - let path = path.as_ref().to_owned(); + let path = super::env_util::expand_env_vars(path.as_ref().to_path_buf()); if let Some(parent) = path.parent() { fs::create_dir_all(parent)?; } @@ -122,6 +118,10 @@ impl FileAppenderBuilder { /// kind: file /// /// # The path of the log file. Required. +/// # The path can contain environment variables of the form $ENV{name_here}, +/// # where 'name_here' will be the name of the environment variable that +/// # will be resolved. Note that if the variable fails to resolve, +/// # $ENV{name_here} will NOT be replaced in the path. /// path: log/foo.log /// /// # Specifies if the appender should append to or truncate the log file if it @@ -132,10 +132,11 @@ impl FileAppenderBuilder { /// encoder: /// kind: pattern /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct FileAppenderDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for FileAppenderDeserializer { type Trait = dyn Append; @@ -145,7 +146,7 @@ impl Deserialize for FileAppenderDeserializer { &self, config: FileAppenderConfig, deserializers: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { let mut appender = FileAppender::builder(); if let Some(append) = config.append { appender = appender.append(append); diff --git a/src/append/mod.rs b/src/append/mod.rs index a5207c41..eafa6cc0 100644 --- a/src/append/mod.rs +++ b/src/append/mod.rs @@ -1,17 +1,17 @@ //! Appenders use log::{Log, Record}; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use serde::{de, Deserialize, Deserializer}; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use serde_value::Value; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use std::collections::BTreeMap; -use std::{error::Error, fmt}; +use std::fmt; -#[cfg(feature = "file")] -use crate::file::Deserializable; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +use crate::config::Deserializable; +#[cfg(feature = "config_parsing")] use crate::filter::FilterConfig; #[cfg(feature = "console_appender")] @@ -21,19 +21,34 @@ pub mod file; #[cfg(feature = "rolling_file_appender")] pub mod rolling_file; +#[cfg(any(feature = "file_appender", feature = "rolling_file_appender"))] +mod env_util { + #[allow(clippy::redundant_clone)] + pub fn expand_env_vars(path: std::path::PathBuf) -> std::path::PathBuf { + let mut path: String = path.to_string_lossy().into(); + let matcher = regex::Regex::new(r#"\$ENV\{([\w][\w|\d|\.|_]*)\}"#).unwrap(); + matcher.captures_iter(&path.clone()).for_each(|c| { + if let Ok(s) = std::env::var(&c[1]) { + path = path.replace(&c[0], &s); + } + }); + path.into() + } +} + /// A trait implemented by log4rs appenders. /// /// Appenders take a log record and processes them, for example, by writing it /// to a file or the console. pub trait Append: fmt::Debug + Send + Sync + 'static { /// Processes the provided `Record`. - fn append(&self, record: &Record) -> Result<(), Box>; + fn append(&self, record: &Record) -> anyhow::Result<()>; /// Flushes all in-flight records. fn flush(&self); } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserializable for dyn Append { fn name() -> &'static str { "appender" @@ -41,7 +56,7 @@ impl Deserializable for dyn Append { } impl Append for T { - fn append(&self, record: &Record) -> Result<(), Box> { + fn append(&self, record: &Record) -> anyhow::Result<()> { self.log(record); Ok(()) } @@ -52,8 +67,8 @@ impl Append for T { } /// Configuration for an appender. -#[cfg(feature = "file")] -#[derive(PartialEq, Eq, Debug, Clone)] +#[cfg(feature = "config_parsing")] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct AppenderConfig { /// The appender kind. pub kind: String, @@ -63,7 +78,7 @@ pub struct AppenderConfig { pub config: Value, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl<'de> Deserialize<'de> for AppenderConfig { fn deserialize(d: D) -> Result where @@ -88,3 +103,118 @@ impl<'de> Deserialize<'de> for AppenderConfig { }) } } + +#[cfg(test)] +mod test { + #[cfg(any(feature = "file_appender", feature = "rolling_file_appender"))] + use std::{ + env::{set_var, var}, + path::PathBuf, + }; + + #[test] + #[cfg(any(feature = "file_appender", feature = "rolling_file_appender"))] + fn expand_env_vars_tests() { + set_var("HELLO_WORLD", "GOOD BYE"); + #[cfg(not(target_os = "windows"))] + let test_cases = vec![ + ("$ENV{HOME}", PathBuf::from(var("HOME").unwrap())), + ( + "$ENV{HELLO_WORLD}", + PathBuf::from(var("HELLO_WORLD").unwrap()), + ), + ( + "$ENV{HOME}/test", + PathBuf::from(format!("{}/test", var("HOME").unwrap())), + ), + ( + "/test/$ENV{HOME}", + PathBuf::from(format!("/test/{}", var("HOME").unwrap())), + ), + ( + "/test/$ENV{HOME}/test", + PathBuf::from(format!("/test/{}/test", var("HOME").unwrap())), + ), + ( + "/test$ENV{HOME}/test", + PathBuf::from(format!("/test{}/test", var("HOME").unwrap())), + ), + ( + "test/$ENV{HOME}/test", + PathBuf::from(format!("test/{}/test", var("HOME").unwrap())), + ), + ( + "/$ENV{HOME}/test/$ENV{USER}", + PathBuf::from(format!( + "/{}/test/{}", + var("HOME").unwrap(), + var("USER").unwrap() + )), + ), + ( + "$ENV{SHOULD_NOT_EXIST}", + PathBuf::from("$ENV{SHOULD_NOT_EXIST}"), + ), + ( + "/$ENV{HOME}/test/$ENV{SHOULD_NOT_EXIST}", + PathBuf::from(format!( + "/{}/test/$ENV{{SHOULD_NOT_EXIST}}", + var("HOME").unwrap() + )), + ), + ]; + + #[cfg(target_os = "windows")] + let test_cases = vec![ + ("$ENV{HOMEPATH}", PathBuf::from(var("HOMEPATH").unwrap())), + ( + "$ENV{HELLO_WORLD}", + PathBuf::from(var("HELLO_WORLD").unwrap()), + ), + ( + "$ENV{HOMEPATH}/test", + PathBuf::from(format!("{}/test", var("HOMEPATH").unwrap())), + ), + ( + "/test/$ENV{USERNAME}", + PathBuf::from(format!("/test/{}", var("USERNAME").unwrap())), + ), + ( + "/test/$ENV{USERNAME}/test", + PathBuf::from(format!("/test/{}/test", var("USERNAME").unwrap())), + ), + ( + "/test$ENV{USERNAME}/test", + PathBuf::from(format!("/test{}/test", var("USERNAME").unwrap())), + ), + ( + "test/$ENV{USERNAME}/test", + PathBuf::from(format!("test/{}/test", var("USERNAME").unwrap())), + ), + ( + "$ENV{HOMEPATH}/test/$ENV{USERNAME}", + PathBuf::from(format!( + "{}/test/{}", + var("HOMEPATH").unwrap(), + var("USERNAME").unwrap() + )), + ), + ( + "$ENV{SHOULD_NOT_EXIST}", + PathBuf::from("$ENV{SHOULD_NOT_EXIST}"), + ), + ( + "$ENV{HOMEPATH}/test/$ENV{SHOULD_NOT_EXIST}", + PathBuf::from(format!( + "{}/test/$ENV{{SHOULD_NOT_EXIST}}", + var("HOMEPATH").unwrap() + )), + ), + ]; + + for (input, expected) in test_cases { + let res = super::env_util::expand_env_vars(input.into()); + assert_eq!(res, expected) + } + } +} diff --git a/src/append/rolling_file/mod.rs b/src/append/rolling_file/mod.rs index 4f252ed2..a7c6c74f 100644 --- a/src/append/rolling_file/mod.rs +++ b/src/append/rolling_file/mod.rs @@ -16,37 +16,36 @@ //! //! Requires the `rolling_file_appender` feature. +use derivative::Derivative; use log::Record; use parking_lot::Mutex; -#[cfg(feature = "file")] -use serde_derive::Deserialize; -#[cfg(feature = "file")] -use serde_value::Value; -#[cfg(feature = "file")] -use std::collections::BTreeMap; use std::{ - error::Error, - fmt, fs::{self, File, OpenOptions}, io::{self, BufWriter, Write}, path::{Path, PathBuf}, }; -#[cfg(feature = "file")] -use crate::encode::EncoderConfig; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; +#[cfg(feature = "config_parsing")] +use serde_value::Value; +#[cfg(feature = "config_parsing")] +use std::collections::BTreeMap; + use crate::{ append::Append, encode::{self, pattern::PatternEncoder, Encode}, }; +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; +#[cfg(feature = "config_parsing")] +use crate::encode::EncoderConfig; + pub mod policy; /// Configuration for the rolling file appender. -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] #[serde(deny_unknown_fields)] +#[derive(Clone, Eq, PartialEq, Hash, Debug, serde::Deserialize)] pub struct RollingFileAppenderConfig { path: String, append: Option, @@ -54,13 +53,14 @@ pub struct RollingFileAppenderConfig { policy: Policy, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] struct Policy { kind: String, config: Value, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl<'de> serde::Deserialize<'de> for Policy { fn deserialize(d: D) -> Result where @@ -80,6 +80,7 @@ impl<'de> serde::Deserialize<'de> for Policy { } } +#[derive(Debug)] struct LogWriter { file: BufWriter, len: u64, @@ -101,6 +102,7 @@ impl io::Write for LogWriter { impl encode::Write for LogWriter {} /// Information about the active log file. +#[derive(Debug)] pub struct LogFile<'a> { writer: &'a mut Option, path: &'a Path, @@ -149,7 +151,10 @@ impl<'a> LogFile<'a> { } /// An appender which archives log files in a configurable strategy. +#[derive(Derivative)] +#[derivative(Debug)] pub struct RollingFileAppender { + #[derivative(Debug = "ignore")] writer: Mutex>, path: PathBuf, append: bool, @@ -157,19 +162,8 @@ pub struct RollingFileAppender { policy: Box, } -impl fmt::Debug for RollingFileAppender { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("RollingFileAppender") - .field("path", &self.path) - .field("append", &self.append) - .field("encoder", &self.encoder) - .field("policy", &self.policy) - .finish() - } -} - impl Append for RollingFileAppender { - fn append(&self, record: &Record) -> Result<(), Box> { + fn append(&self, record: &Record) -> anyhow::Result<()> { // TODO(eas): Perhaps this is better as a concurrent queue? let mut writer = self.writer.lock(); @@ -251,6 +245,10 @@ impl RollingFileAppenderBuilder { } /// Constructs a `RollingFileAppender`. + /// The path argument can contain environment variables of the form $ENV{name_here}, + /// where 'name_here' will be the name of the environment variable that + /// will be resolved. Note that if the variable fails to resolve, + /// $ENV{name_here} will NOT be replaced in the path. pub fn build

( self, path: P, @@ -259,9 +257,10 @@ impl RollingFileAppenderBuilder { where P: AsRef, { + let path = super::env_util::expand_env_vars(path.as_ref().to_path_buf()); let appender = RollingFileAppender { writer: Mutex::new(None), - path: path.as_ref().to_owned(), + path, append: self.append, encoder: self .encoder @@ -288,6 +287,10 @@ impl RollingFileAppenderBuilder { /// kind: rolling_file /// /// # The path of the log file. Required. +/// # The path can contain environment variables of the form $ENV{name_here}, +/// # where 'name_here' will be the name of the environment variable that +/// # will be resolved. Note that if the variable fails to resolve, +/// # $ENV{name_here} will NOT be replaced in the path. /// path: log/foo.log /// /// # Specifies if the appender should append to or truncate the log file if it @@ -313,10 +316,11 @@ impl RollingFileAppenderBuilder { /// roller: /// kind: delete /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct RollingFileAppenderDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for RollingFileAppenderDeserializer { type Trait = dyn Append; @@ -326,7 +330,7 @@ impl Deserialize for RollingFileAppenderDeserializer { &self, config: RollingFileAppenderConfig, deserializers: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { let mut builder = RollingFileAppender::builder(); if let Some(append) = config.append { builder = builder.append(append); @@ -345,7 +349,6 @@ impl Deserialize for RollingFileAppenderDeserializer { #[cfg(test)] mod test { use std::{ - error::Error, fs::File, io::{Read, Write}, }; @@ -356,7 +359,7 @@ mod test { #[test] #[cfg(feature = "yaml_format")] fn deserialize() { - use crate::file::{Deserializers, RawConfig}; + use crate::config::{Deserializers, RawConfig}; let dir = tempfile::tempdir().unwrap(); @@ -399,7 +402,7 @@ appenders: struct NopPolicy; impl Policy for NopPolicy { - fn process(&self, _: &mut LogFile) -> Result<(), Box> { + fn process(&self, _: &mut LogFile) -> anyhow::Result<()> { Ok(()) } } diff --git a/src/append/rolling_file/policy/compound/mod.rs b/src/append/rolling_file/policy/compound/mod.rs index 038db83d..4a689c51 100644 --- a/src/append/rolling_file/policy/compound/mod.rs +++ b/src/append/rolling_file/policy/compound/mod.rs @@ -1,42 +1,40 @@ //! The compound rolling policy. //! //! Requires the `compound_policy` feature. -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use serde::{self, de}; -#[cfg(feature = "file")] -use serde_derive::Deserialize; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use serde_value::Value; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use std::collections::BTreeMap; -use std::error::Error; use crate::append::rolling_file::{ policy::{compound::roll::Roll, Policy}, LogFile, }; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; pub mod roll; pub mod trigger; /// Configuration for the compound policy. -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] #[serde(deny_unknown_fields)] +#[derive(Clone, Eq, PartialEq, Hash, Debug, serde::Deserialize)] pub struct CompoundPolicyConfig { trigger: Trigger, roller: Roller, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] struct Trigger { kind: String, config: Value, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl<'de> serde::Deserialize<'de> for Trigger { fn deserialize(d: D) -> Result where @@ -56,13 +54,14 @@ impl<'de> serde::Deserialize<'de> for Trigger { } } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] struct Roller { kind: String, config: Value, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl<'de> serde::Deserialize<'de> for Roller { fn deserialize(d: D) -> Result where @@ -101,7 +100,7 @@ impl CompoundPolicy { } impl Policy for CompoundPolicy { - fn process(&self, log: &mut LogFile) -> Result<(), Box> { + fn process(&self, log: &mut LogFile) -> anyhow::Result<()> { if self.trigger.trigger(log)? { log.roll(); self.roller.roll(log.path())?; @@ -136,10 +135,11 @@ impl Policy for CompoundPolicy { /// # The remainder of the configuration is passed to the roller's /// # deserializer, and will vary based on the kind of roller. /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct CompoundPolicyDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for CompoundPolicyDeserializer { type Trait = dyn Policy; @@ -149,7 +149,7 @@ impl Deserialize for CompoundPolicyDeserializer { &self, config: CompoundPolicyConfig, deserializers: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { let trigger = deserializers.deserialize(&config.trigger.kind, config.trigger.config)?; let roller = deserializers.deserialize(&config.roller.kind, config.roller.config)?; Ok(Box::new(CompoundPolicy::new(trigger, roller))) diff --git a/src/append/rolling_file/policy/compound/roll/delete.rs b/src/append/rolling_file/policy/compound/roll/delete.rs index 92956a0c..34c4b5d9 100644 --- a/src/append/rolling_file/policy/compound/roll/delete.rs +++ b/src/append/rolling_file/policy/compound/roll/delete.rs @@ -2,29 +2,27 @@ //! //! Requires the `delete_roller` feature. -#[cfg(feature = "file")] -use serde_derive::Deserialize; -use std::{error::Error, fs, path::Path}; +use std::{fs, path::Path}; use crate::append::rolling_file::policy::compound::roll::Roll; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; /// Configuration for the delete roller. -#[cfg(feature = "file")] -#[derive(Deserialize, Clone)] +#[cfg(feature = "config_parsing")] #[serde(deny_unknown_fields)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default, serde::Deserialize)] pub struct DeleteRollerConfig { #[serde(skip_deserializing)] _p: (), } /// A roller which deletes the log file. -#[derive(Debug, Default)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct DeleteRoller(()); impl Roll for DeleteRoller { - fn roll(&self, file: &Path) -> Result<(), Box> { + fn roll(&self, file: &Path) -> anyhow::Result<()> { fs::remove_file(file).map_err(Into::into) } } @@ -43,10 +41,11 @@ impl DeleteRoller { /// ```yaml /// kind: delete /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct DeleteRollerDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for DeleteRollerDeserializer { type Trait = dyn Roll; @@ -56,7 +55,7 @@ impl Deserialize for DeleteRollerDeserializer { &self, _: DeleteRollerConfig, _: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { Ok(Box::new(DeleteRoller::default())) } } diff --git a/src/append/rolling_file/policy/compound/roll/fixed_window.rs b/src/append/rolling_file/policy/compound/roll/fixed_window.rs index c1524c5b..fa2f3dee 100644 --- a/src/append/rolling_file/policy/compound/roll/fixed_window.rs +++ b/src/append/rolling_file/policy/compound/roll/fixed_window.rs @@ -2,33 +2,31 @@ //! //! Requires the `fixed_window_roller` feature. +use anyhow::bail; #[cfg(feature = "background_rotation")] use parking_lot::{Condvar, Mutex}; -#[cfg(feature = "file")] -use serde_derive::Deserialize; #[cfg(feature = "background_rotation")] use std::sync::Arc; use std::{ - error::Error, fs, io, path::{Path, PathBuf}, }; use crate::append::rolling_file::policy::compound::roll::Roll; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; /// Configuration for the fixed window roller. -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] #[serde(deny_unknown_fields)] +#[derive(Clone, Eq, PartialEq, Hash, Debug, Default, serde::Deserialize)] pub struct FixedWindowRollerConfig { pattern: String, base: Option, count: u32, } -#[derive(Clone, Debug)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] enum Compression { None, #[cfg(feature = "gzip")] @@ -83,7 +81,7 @@ impl Compression { /// Note that this roller will have to rename every archived file every time the /// log rolls over. Performance may be negatively impacted by specifying a large /// count. -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct FixedWindowRoller { pattern: String, compression: Compression, @@ -102,14 +100,14 @@ impl FixedWindowRoller { impl Roll for FixedWindowRoller { #[cfg(not(feature = "background_rotation"))] - fn roll(&self, file: &Path) -> Result<(), Box> { + fn roll(&self, file: &Path) -> anyhow::Result<()> { if self.count == 0 { return fs::remove_file(file).map_err(Into::into); } rotate( self.pattern.clone(), - self.compression.clone(), + self.compression, self.base, self.count, file.to_path_buf(), @@ -119,7 +117,7 @@ impl Roll for FixedWindowRoller { } #[cfg(feature = "background_rotation")] - fn roll(&self, file: &Path) -> Result<(), Box> { + fn roll(&self, file: &Path) -> anyhow::Result<()> { if self.count == 0 { return fs::remove_file(file).map_err(Into::into); } @@ -138,7 +136,7 @@ impl Roll for FixedWindowRoller { drop(ready); let pattern = self.pattern.clone(); - let compression = self.compression.clone(); + let compression = self.compression; let base = self.base; let count = self.count; let cond_pair = self.cond_pair.clone(); @@ -235,6 +233,7 @@ fn rotate( } /// A builder for the `FixedWindowRoller`. +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct FixedWindowRollerBuilder { base: u32, } @@ -257,13 +256,9 @@ impl FixedWindowRollerBuilder { /// If the file extension of the pattern is `.gz` and the `gzip` Cargo /// feature is enabled, the archive files will be gzip-compressed. /// If the extension is `.gz` and the `gzip` feature is *not* enabled, an error will be returned. - pub fn build( - self, - pattern: &str, - count: u32, - ) -> Result> { + pub fn build(self, pattern: &str, count: u32) -> anyhow::Result { if !pattern.contains("{}") { - return Err("pattern does not contain `{}`".into()); + bail!("pattern does not contain `{}`"); } let compression = match Path::new(pattern).extension() { @@ -271,7 +266,7 @@ impl FixedWindowRollerBuilder { Some(e) if e == "gz" => Compression::Gzip, #[cfg(not(feature = "gzip"))] Some(e) if e == "gz" => { - return Err("gzip compression requires the `gzip` feature".into()); + bail!("gzip compression requires the `gzip` feature"); } _ => Compression::None, }; @@ -308,10 +303,11 @@ impl FixedWindowRollerBuilder { /// # The base value for archived log indices. Defaults to 0. /// base: 1 /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct FixedWindowRollerDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for FixedWindowRollerDeserializer { type Trait = dyn Roll; @@ -321,7 +317,7 @@ impl Deserialize for FixedWindowRollerDeserializer { &self, config: FixedWindowRollerConfig, _: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { let mut builder = FixedWindowRoller::builder(); if let Some(base) = config.base { builder = builder.base(base); diff --git a/src/append/rolling_file/policy/compound/roll/mod.rs b/src/append/rolling_file/policy/compound/roll/mod.rs index 82ddba46..3b53f578 100644 --- a/src/append/rolling_file/policy/compound/roll/mod.rs +++ b/src/append/rolling_file/policy/compound/roll/mod.rs @@ -1,9 +1,9 @@ //! Rollers -use std::{error::Error, fmt, path::Path}; +use std::{fmt, path::Path}; -#[cfg(feature = "file")] -use crate::file::Deserializable; +#[cfg(feature = "config_parsing")] +use crate::config::Deserializable; #[cfg(feature = "delete_roller")] pub mod delete; @@ -19,10 +19,10 @@ pub trait Roll: fmt::Debug + Send + Sync + 'static { /// /// If this method returns successfully, there *must* no longer be a file /// at the specified location. - fn roll(&self, file: &Path) -> Result<(), Box>; + fn roll(&self, file: &Path) -> anyhow::Result<()>; } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserializable for dyn Roll { fn name() -> &'static str { "roller" diff --git a/src/append/rolling_file/policy/compound/trigger/mod.rs b/src/append/rolling_file/policy/compound/trigger/mod.rs index 5cd8c227..76e67e74 100644 --- a/src/append/rolling_file/policy/compound/trigger/mod.rs +++ b/src/append/rolling_file/policy/compound/trigger/mod.rs @@ -1,10 +1,10 @@ //! Triggers -use std::{error::Error, fmt}; +use std::fmt; use crate::append::rolling_file::LogFile; -#[cfg(feature = "file")] -use crate::file::Deserializable; +#[cfg(feature = "config_parsing")] +use crate::config::Deserializable; #[cfg(feature = "size_trigger")] pub mod size; @@ -12,10 +12,10 @@ pub mod size; /// A trait which identifies if the active log file should be rolled over. pub trait Trigger: fmt::Debug + Send + Sync + 'static { /// Determines if the active log file should be rolled over. - fn trigger(&self, file: &LogFile) -> Result>; + fn trigger(&self, file: &LogFile) -> anyhow::Result; } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserializable for dyn Trigger { fn name() -> &'static str { "trigger" diff --git a/src/append/rolling_file/policy/compound/trigger/size.rs b/src/append/rolling_file/policy/compound/trigger/size.rs index 7f9ead81..c668549e 100644 --- a/src/append/rolling_file/policy/compound/trigger/size.rs +++ b/src/append/rolling_file/policy/compound/trigger/size.rs @@ -2,28 +2,26 @@ //! //! Requires the `size_trigger` feature. -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use serde::de; -#[cfg(feature = "file")] -use serde_derive::Deserialize; -use std::error::Error; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use std::fmt; use crate::append::rolling_file::{policy::compound::trigger::Trigger, LogFile}; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; + +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; /// Configuration for the size trigger. -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] #[serde(deny_unknown_fields)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default, serde::Deserialize)] pub struct SizeTriggerConfig { #[serde(deserialize_with = "deserialize_limit")] limit: u64, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] fn deserialize_limit<'de, D>(d: D) -> Result where D: de::Deserializer<'de>, @@ -102,7 +100,7 @@ where } /// A trigger which rolls the log once it has passed a certain size. -#[derive(Debug)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct SizeTrigger { limit: u64, } @@ -116,7 +114,7 @@ impl SizeTrigger { } impl Trigger for SizeTrigger { - fn trigger(&self, file: &LogFile) -> Result> { + fn trigger(&self, file: &LogFile) -> anyhow::Result { Ok(file.len_estimate() > self.limit) } } @@ -133,10 +131,11 @@ impl Trigger for SizeTrigger { /// # bytes if not specified. Required. /// limit: 10 mb /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct SizeTriggerDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for SizeTriggerDeserializer { type Trait = dyn Trigger; @@ -146,7 +145,7 @@ impl Deserialize for SizeTriggerDeserializer { &self, config: SizeTriggerConfig, _: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { Ok(Box::new(SizeTrigger::new(config.limit))) } } diff --git a/src/append/rolling_file/policy/mod.rs b/src/append/rolling_file/policy/mod.rs index 536e7ba3..8c1e6b2d 100644 --- a/src/append/rolling_file/policy/mod.rs +++ b/src/append/rolling_file/policy/mod.rs @@ -1,9 +1,10 @@ //! Policies. -use std::{error::Error, fmt}; +use std::fmt; use crate::append::rolling_file::LogFile; -#[cfg(feature = "file")] -use crate::file::Deserializable; + +#[cfg(feature = "config_parsing")] +use crate::config::Deserializable; #[cfg(feature = "compound_policy")] pub mod compound; @@ -14,10 +15,10 @@ pub trait Policy: Sync + Send + 'static + fmt::Debug { /// /// This method is called after each log event. It is provided a reference /// to the current log file. - fn process(&self, log: &mut LogFile) -> Result<(), Box>; + fn process(&self, log: &mut LogFile) -> anyhow::Result<()>; } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserializable for dyn Policy { fn name() -> &'static str { "policy" diff --git a/src/priv_file.rs b/src/config/file.rs similarity index 60% rename from src/priv_file.rs rename to src/config/file.rs index 68d7f885..288ab4fc 100644 --- a/src/priv_file.rs +++ b/src/config/file.rs @@ -1,20 +1,14 @@ -#![allow(deprecated)] - -use log::SetLoggerError; use std::{ - error, fmt, fs, + fs, path::{Path, PathBuf}, thread, time::{Duration, SystemTime}, }; -#[cfg(feature = "xml_format")] -use crate::file::RawConfigXml; -use crate::{ - config::Config, - file::{Deserializers, RawConfig}, - handle_error, init_config, Handle, -}; +use thiserror::Error; + +use super::{init_config, Config, Deserializers, Handle, RawConfig}; +use crate::handle_error; /// Initializes the global logger as a log4rs logger configured via a file. /// @@ -25,7 +19,7 @@ use crate::{ /// reported to stderr. /// /// Requires the `file` feature (enabled by default). -pub fn init_file

(path: P, deserializers: Deserializers) -> Result<(), Error> +pub fn init_file

(path: P, deserializers: Deserializers) -> anyhow::Result<()> where P: AsRef, { @@ -62,7 +56,7 @@ where /// /// Unlike `init_file`, this function does not initialize the logger; it only /// loads the `Config` and returns it. -pub fn load_config_file

(path: P, deserializers: Deserializers) -> Result +pub fn load_config_file

(path: P, deserializers: Deserializers) -> anyhow::Result where P: AsRef, { @@ -74,52 +68,31 @@ where Ok(deserialize(&config, &deserializers)) } -/// An error initializing the logging framework from a file. -#[derive(Debug)] -pub enum Error { - /// An error from the log crate - Log(SetLoggerError), - /// A fatal error initializing the log4rs config. - Log4rs(Box), -} - -impl fmt::Display for Error { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Log(ref e) => fmt::Display::fmt(e, fmt), - Error::Log4rs(ref e) => fmt::Display::fmt(e, fmt), - } - } -} +/// The various types of formatting errors that can be generated. +#[derive(Debug, Error)] +pub enum FormatError { + /// The YAML feature flag was missing. + #[error("the `yaml_format` feature is required for YAML support")] + YamlFeatureFlagRequired, -impl error::Error for Error { - fn description(&self) -> &str { - match *self { - Error::Log(ref e) => error::Error::description(e), - Error::Log4rs(ref e) => error::Error::description(&**e), - } - } + /// The JSON feature flag was missing. + #[error("the `json_format` feature is required for JSON support")] + JsonFeatureFlagRequired, - fn cause(&self) -> Option<&dyn error::Error> { - match *self { - Error::Log(ref e) => Some(e), - Error::Log4rs(ref e) => Some(&**e), - } - } -} + /// The TOML feature flag was missing. + #[error("the `toml_format` feature is required for TOML support")] + TomlFeatureFlagRequired, -impl From for Error { - fn from(t: SetLoggerError) -> Error { - Error::Log(t) - } -} + /// An unsupported format was specified. + #[error("unsupported file format `{0}`")] + UnsupportedFormat(String), -impl From> for Error { - fn from(t: Box) -> Error { - Error::Log4rs(t) - } + /// Log4rs could not determine the file format. + #[error("unable to determine the file format")] + UnknownFormat, } +#[derive(Debug)] enum Format { #[cfg(feature = "yaml_format")] Yaml, @@ -127,42 +100,33 @@ enum Format { Json, #[cfg(feature = "toml_format")] Toml, - #[cfg(feature = "xml_format")] - #[deprecated(since = "0.11.0")] - Xml, } impl Format { - fn from_path(path: &Path) -> Result> { + fn from_path(path: &Path) -> anyhow::Result { match path.extension().and_then(|s| s.to_str()) { #[cfg(feature = "yaml_format")] Some("yaml") | Some("yml") => Ok(Format::Yaml), #[cfg(not(feature = "yaml_format"))] - Some("yaml") | Some("yml") => { - Err("the `yaml_format` feature is required for YAML support".into()) - } + Some("yaml") | Some("yml") => Err(FormatError::YamlFeatureFlagRequired.into()), + #[cfg(feature = "json_format")] Some("json") => Ok(Format::Json), #[cfg(not(feature = "json_format"))] - Some("json") => Err("the `json_format` feature is required for JSON support".into()), + Some("json") => Err(FormatError::JsonFeatureFlagRequired.into()), #[cfg(feature = "toml_format")] Some("toml") => Ok(Format::Toml), #[cfg(not(feature = "toml_format"))] - Some("toml") => Err("the `toml_format` feature is required for TOML support".into()), + Some("toml") => Err(FormatError::TomlFeatureFlagRequired.into()), - #[cfg(feature = "xml_format")] - Some("xml") => Ok(Format::Xml), - #[cfg(not(feature = "xml_format"))] - Some("xml") => Err("the `xml_format` feature is required for XML support".into()), - - Some(f) => Err(format!("unsupported file format `{}`", f).into()), - None => Err("unable to determine the file format".into()), + Some(f) => Err(FormatError::UnsupportedFormat(f.to_string()).into()), + None => Err(FormatError::UnknownFormat.into()), } } #[allow(unused_variables)] - fn parse(&self, source: &str) -> Result> { + fn parse(&self, source: &str) -> anyhow::Result { match *self { #[cfg(feature = "yaml_format")] Format::Yaml => ::serde_yaml::from_str(source).map_err(Into::into), @@ -170,32 +134,25 @@ impl Format { Format::Json => ::serde_json::from_str(source).map_err(Into::into), #[cfg(feature = "toml_format")] Format::Toml => ::toml::from_str(source).map_err(Into::into), - #[cfg(feature = "xml_format")] - Format::Xml => ::serde_xml_rs::from_reader::<_, RawConfigXml>(source.as_bytes()) - .map(Into::into) - .map_err(|e| e.to_string().into()), } } } -fn read_config(path: &Path) -> Result> { +fn read_config(path: &Path) -> anyhow::Result { let s = fs::read_to_string(path)?; Ok(s) } fn deserialize(config: &RawConfig, deserializers: &Deserializers) -> Config { - let (appenders, errors) = config.appenders_lossy(deserializers); - for error in &errors { - handle_error(error); - } + let (appenders, mut errors) = config.appenders_lossy(deserializers); + errors.handle(); - let (config, errors) = Config::builder() + let (config, mut errors) = Config::builder() .appenders(appenders) .loggers(config.loggers()) .build_lossy(config.root()); - for error in &errors { - handle_error(error); - } + + errors.handle(); config } @@ -241,15 +198,12 @@ impl ConfigReloader { match self.run_once(rate) { Ok(Some(r)) => rate = r, Ok(None) => break, - Err(e) => handle_error(&*e), + Err(e) => handle_error(&e), } } } - fn run_once( - &mut self, - rate: Duration, - ) -> Result, Box> { + fn run_once(&mut self, rate: Duration) -> anyhow::Result> { if let Some(last_modified) = self.modified { let modified = fs::metadata(&self.path).and_then(|m| m.modified())?; if last_modified == modified { diff --git a/src/config/mod.rs b/src/config/mod.rs new file mode 100644 index 00000000..08cf5f73 --- /dev/null +++ b/src/config/mod.rs @@ -0,0 +1,86 @@ +//! All things pertaining to log4rs config. See the docs root for examples. + +use log::SetLoggerError; +use thiserror::Error; + +use crate::Handle; + +pub mod runtime; + +#[cfg(feature = "config_parsing")] +mod file; +#[cfg(feature = "config_parsing")] +mod raw; + +pub use runtime::{Appender, Config, Logger, Root}; + +#[cfg(feature = "config_parsing")] +pub use self::file::{init_file, load_config_file, FormatError}; +#[cfg(feature = "config_parsing")] +pub use self::raw::{Deserializable, Deserialize, Deserializers, RawConfig}; + +/// Initializes the global logger as a log4rs logger with the provided config. +/// +/// A `Handle` object is returned which can be used to adjust the logging +/// configuration. +pub fn init_config(config: runtime::Config) -> Result { + let logger = crate::Logger::new(config); + log::set_max_level(logger.max_log_level()); + let handle = Handle { + shared: logger.0.clone(), + }; + log::set_boxed_logger(Box::new(logger)).map(|()| handle) +} + +/// Initializes the global logger as a log4rs logger with the provided config and error handler. +/// +/// A `Handle` object is returned which can be used to adjust the logging +/// configuration. +pub fn init_config_with_err_handler( + config: runtime::Config, + err_handler: Box, +) -> Result { + let logger = crate::Logger::new_with_err_handler(config, err_handler); + log::set_max_level(logger.max_log_level()); + let handle = Handle { + shared: logger.0.clone(), + }; + log::set_boxed_logger(Box::new(logger)).map(|()| handle) +} + +/// Initializes the global logger as a log4rs logger using the provided raw config. +/// +/// This will return errors if the appenders configuration is malformed or if we fail to set the global logger. +#[cfg(feature = "config_parsing")] +pub fn init_raw_config(config: RawConfig) -> Result<(), InitError> { + let (appenders, errors) = config.appenders_lossy(&Deserializers::default()); + if !errors.is_empty() { + return Err(InitError::Deserializing(errors)); + } + let config = Config::builder() + .appenders(appenders) + .loggers(config.loggers()) + .build(config.root())?; + + let logger = crate::Logger::new(config); + log::set_max_level(logger.max_log_level()); + log::set_boxed_logger(Box::new(logger))?; + Ok(()) +} + +/// Errors found when initializing. +#[derive(Debug, Error)] +pub enum InitError { + /// There was an error deserializing. + #[error("Errors found when deserializing the config: {0:#?}")] + #[cfg(feature = "config_parsing")] + Deserializing(#[from] raw::AppenderErrors), + + /// There was an error building the handle. + #[error("Config building errors: {0:#?}")] + BuildConfig(#[from] runtime::ConfigErrors), + + /// There was an error setting the global logger. + #[error("Error setting the logger: {0:#?}")] + SetLogger(#[from] log::SetLoggerError), +} diff --git a/src/file.rs b/src/config/raw.rs similarity index 71% rename from src/file.rs rename to src/config/raw.rs index 1c9989cf..a1539849 100644 --- a/src/file.rs +++ b/src/config/raw.rs @@ -90,14 +90,16 @@ //! ``` #![allow(deprecated)] +use std::{ + borrow::ToOwned, collections::HashMap, fmt, marker::PhantomData, sync::Arc, time::Duration, +}; + +use anyhow::anyhow; +use derivative::Derivative; use log::LevelFilter; use serde::de::{self, Deserialize as SerdeDeserialize, DeserializeOwned}; -use serde_derive::Deserialize; use serde_value::Value; -use std::{ - borrow::ToOwned, collections::HashMap, error, fmt, marker::PhantomData, sync::Arc, - time::Duration, -}; +use thiserror::Error; use typemap::{Key, ShareCloneMap}; use crate::{append::AppenderConfig, config}; @@ -132,7 +134,7 @@ pub trait Deserialize: Send + Sync + 'static { &self, config: Self::Config, deserializers: &Deserializers, - ) -> Result, Box>; + ) -> anyhow::Result>; } trait ErasedDeserialize: Send + Sync + 'static { @@ -142,7 +144,7 @@ trait ErasedDeserialize: Send + Sync + 'static { &self, config: Value, deserializers: &Deserializers, - ) -> Result, Box>; + ) -> anyhow::Result>; } struct DeserializeEraser(T); @@ -157,7 +159,7 @@ where &self, config: Value, deserializers: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { let config = config.deserialize_into()?; self.0.deserialize(config, deserializers) } @@ -279,109 +281,61 @@ impl Deserializers { } /// Deserializes a value of a specific type and kind. - pub fn deserialize( - &self, - kind: &str, - config: Value, - ) -> Result, Box> + pub fn deserialize(&self, kind: &str, config: Value) -> anyhow::Result> where T: Deserializable, { match self.0.get::>().and_then(|m| m.get(kind)) { Some(b) => b.deserialize(config, self), - None => Err(format!( + None => Err(anyhow!( "no {} deserializer for kind `{}` registered", T::name(), kind - ) - .into()), + )), } } } -/// An error deserializing a configuration into a log4rs `Config`. -#[derive(Debug)] -pub struct Error(ErrorKind, Box); - -#[derive(Debug)] -enum ErrorKind { - Appender(String), - Filter(String), -} - -impl fmt::Display for Error { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - match self.0 { - ErrorKind::Appender(ref name) => { - write!(fmt, "error deserializing appender {}: {}", name, self.1) - } - ErrorKind::Filter(ref name) => write!( - fmt, - "error deserializing filter attached to appender {}: {}", - name, self.1 - ), - } - } -} - -impl error::Error for Error { - fn description(&self) -> &str { - "error deserializing a log4rs `Config`" - } - - fn cause(&self) -> Option<&dyn error::Error> { - Some(&*self.1) - } -} - -/// A raw deserializable log4rs configuration for xml. -#[cfg(feature = "xml_format")] -#[deprecated(since = "0.11.0")] -#[derive(Deserialize, Clone, Debug)] -#[serde(deny_unknown_fields)] -pub struct RawConfigXml { - #[serde(deserialize_with = "de_duration", default)] - refresh_rate: Option, - #[serde(default)] - root: Root, - #[serde(default)] - appenders: HashMap, - #[serde(rename = "loggers", default)] - loggers: LoggersXml, -} - -/// Loggers section wrapper for xml configuration -#[cfg(feature = "xml_format")] -#[deprecated(since = "0.11.0")] -#[derive(Deserialize, Debug, Clone)] -#[serde(deny_unknown_fields)] -pub struct LoggersXml { - #[serde(rename = "logger", default)] - loggers: Vec, -} - -#[cfg(feature = "xml_format")] -#[deprecated(since = "0.11.0")] -impl Default for LoggersXml { - fn default() -> Self { - Self { loggers: vec![] } - } +#[derive(Debug, Error)] +pub enum DeserializingConfigError { + #[error("error deserializing appender {0}: {1}")] + Appender(String, anyhow::Error), + #[error("error deserializing filter attached to appender {0}: {1}")] + Filter(String, anyhow::Error), } /// A raw deserializable log4rs configuration. -#[derive(Deserialize, Debug, Clone)] #[serde(deny_unknown_fields)] +#[derive(Clone, Debug, Default, serde::Deserialize)] pub struct RawConfig { #[serde(deserialize_with = "de_duration", default)] refresh_rate: Option, + #[serde(default)] root: Root, + #[serde(default)] appenders: HashMap, + #[serde(default)] loggers: HashMap, } +#[derive(Debug, Error)] +#[error("errors deserializing appenders {0:#?}")] +pub struct AppenderErrors(Vec); + +impl AppenderErrors { + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + pub fn handle(&mut self) { + for error in self.0.drain(..) { + crate::handle_error(&error.into()); + } + } +} + impl RawConfig { /// Returns the root. pub fn root(&self) -> config::Root { @@ -409,7 +363,7 @@ impl RawConfig { pub fn appenders_lossy( &self, deserializers: &Deserializers, - ) -> (Vec, Vec) { + ) -> (Vec, AppenderErrors) { let mut appenders = vec![]; let mut errors = vec![]; @@ -418,16 +372,16 @@ impl RawConfig { for filter in &appender.filters { match deserializers.deserialize(&filter.kind, filter.config.clone()) { Ok(filter) => builder = builder.filter(filter), - Err(e) => errors.push(Error(ErrorKind::Filter(name.clone()), e)), + Err(e) => errors.push(DeserializingConfigError::Filter(name.clone(), e)), } } match deserializers.deserialize(&appender.kind, appender.config.clone()) { Ok(appender) => appenders.push(builder.build(name.clone(), appender)), - Err(e) => errors.push(Error(ErrorKind::Appender(name.clone()), e)), + Err(e) => errors.push(DeserializingConfigError::Appender(name.clone(), e)), } } - (appenders, errors) + (appenders, AppenderErrors(errors)) } /// Returns the requested refresh rate. @@ -436,23 +390,6 @@ impl RawConfig { } } -#[cfg(feature = "xml_format")] -impl ::std::convert::From for RawConfig { - fn from(cfg: RawConfigXml) -> Self { - Self { - refresh_rate: cfg.refresh_rate, - root: cfg.root, - appenders: cfg.appenders, - loggers: cfg - .loggers - .loggers - .into_iter() - .map(|l| (l.name.clone(), l.into())) - .collect(), - } - } -} - fn de_duration<'de, D>(d: D) -> Result, D::Error> where D: de::Deserializer<'de>, @@ -488,47 +425,22 @@ where Option::::deserialize(d).map(|r| r.map(|s| s.0)) } -#[derive(Deserialize, Debug, Clone)] +#[derive(Clone, Debug, Derivative, serde::Deserialize)] +#[derivative(Default)] #[serde(deny_unknown_fields)] struct Root { #[serde(default = "root_level_default")] + #[derivative(Default(value = "root_level_default()"))] level: LevelFilter, #[serde(default)] appenders: Vec, } -impl Default for Root { - fn default() -> Root { - Root { - level: root_level_default(), - appenders: vec![], - } - } -} - fn root_level_default() -> LevelFilter { LevelFilter::Debug } -/// logger struct for xml configuration -#[cfg(feature = "xml_format")] -#[deprecated(since = "0.11.0")] -#[derive(Deserialize, Debug, Clone)] -#[serde(deny_unknown_fields)] -struct LoggerXml { - /// explicit field "name" for xml config - name: String, - - level: LevelFilter, - - #[serde(default)] - appenders: Vec, - - #[serde(default = "logger_additive_default")] - additive: bool, -} - -#[derive(Deserialize, Debug, Clone)] +#[derive(serde::Deserialize, Debug, Clone)] #[serde(deny_unknown_fields)] struct Logger { level: LevelFilter, @@ -538,17 +450,6 @@ struct Logger { additive: bool, } -#[cfg(feature = "xml_format")] -impl ::std::convert::From for Logger { - fn from(logger_xml: LoggerXml) -> Self { - Logger { - level: logger_xml.level, - appenders: logger_xml.appenders, - additive: logger_xml.additive, - } - } -} - fn logger_additive_default() -> bool { true } @@ -599,37 +500,4 @@ loggers: fn empty() { ::serde_yaml::from_str::("{}").unwrap(); } - - #[test] - #[cfg(feature = "xml_format")] - fn full_deserialize_xml() { - let cfg = r#" - - - - - - - - - - stdout - - - - requests - - - -"#; - let config: RawConfigXml = ::serde_xml_rs::from_reader(cfg.as_bytes()).unwrap(); - let config: RawConfig = config.into(); - let errors = config.appenders_lossy(&Deserializers::new()).1; - println!("{:?}", errors); - assert!(errors.is_empty()); - assert_eq!(config.refresh_rate, Some(Duration::from_secs(30))); - - let logger = config.loggers.get("foo::bar::baz").unwrap(); - assert_eq!(logger.appenders[0], "requests"); - } } diff --git a/src/config.rs b/src/config/runtime.rs similarity index 80% rename from src/config.rs rename to src/config/runtime.rs index 023fb80c..6b80019f 100644 --- a/src/config.rs +++ b/src/config/runtime.rs @@ -1,9 +1,170 @@ //! log4rs configuration use log::LevelFilter; -use std::{collections::HashSet, error, fmt, iter::IntoIterator}; +use std::{collections::HashSet, iter::IntoIterator}; +use thiserror::Error; -use crate::{append::Append, filter::Filter, ConfigPrivateExt, PrivateConfigAppenderExt}; +use crate::{append::Append, filter::Filter}; + +/// A log4rs configuration. +#[derive(Debug)] +pub struct Config { + appenders: Vec, + root: Root, + loggers: Vec, +} + +impl Config { + /// Creates a new `ConfigBuilder`. + pub fn builder() -> ConfigBuilder { + ConfigBuilder { + appenders: vec![], + loggers: vec![], + } + } + + /// Returns the `Appender`s associated with the `Config`. + pub fn appenders(&self) -> &[Appender] { + &self.appenders + } + + /// Returns the `Root` associated with the `Config`. + pub fn root(&self) -> &Root { + &self.root + } + + /// Returns a mutable handle for the `Root` associated with the `Config`. + pub fn root_mut(&mut self) -> &mut Root { + &mut self.root + } + + /// Returns the `Logger`s associated with the `Config`. + pub fn loggers(&self) -> &[Logger] { + &self.loggers + } + + pub(crate) fn unpack(self) -> (Vec, Root, Vec) { + let Config { + appenders, + root, + loggers, + } = self; + (appenders, root, loggers) + } +} + +/// A builder for `Config`s. +#[derive(Debug, Default)] +pub struct ConfigBuilder { + appenders: Vec, + loggers: Vec, +} + +impl ConfigBuilder { + /// Adds an appender. + pub fn appender(mut self, appender: Appender) -> ConfigBuilder { + self.appenders.push(appender); + self + } + + /// Adds appenders. + pub fn appenders(mut self, appenders: I) -> ConfigBuilder + where + I: IntoIterator, + { + self.appenders.extend(appenders); + self + } + + /// Adds a logger. + pub fn logger(mut self, logger: Logger) -> ConfigBuilder { + self.loggers.push(logger); + self + } + + /// Adds loggers. + pub fn loggers(mut self, loggers: I) -> ConfigBuilder + where + I: IntoIterator, + { + self.loggers.extend(loggers); + self + } + + /// Consumes the `ConfigBuilder`, returning the `Config`. + /// + /// Unlike `build`, this method will always return a `Config` by stripping + /// portions of the configuration that are incorrect. + pub fn build_lossy(self, mut root: Root) -> (Config, ConfigErrors) { + let mut errors: Vec = vec![]; + + let ConfigBuilder { appenders, loggers } = self; + + let mut ok_appenders = vec![]; + let mut appender_names = HashSet::new(); + for appender in appenders { + if appender_names.insert(appender.name.clone()) { + ok_appenders.push(appender); + } else { + errors.push(ConfigError::DuplicateAppenderName(appender.name)); + } + } + + let mut ok_root_appenders = vec![]; + for appender in root.appenders { + if appender_names.contains(&appender) { + ok_root_appenders.push(appender); + } else { + errors.push(ConfigError::NonexistentAppender(appender)); + } + } + root.appenders = ok_root_appenders; + + let mut ok_loggers = vec![]; + let mut logger_names = HashSet::new(); + for mut logger in loggers { + if !logger_names.insert(logger.name.clone()) { + errors.push(ConfigError::DuplicateLoggerName(logger.name)); + continue; + } + + if let Err(err) = check_logger_name(&logger.name) { + errors.push(err); + continue; + } + + let mut ok_logger_appenders = vec![]; + for appender in logger.appenders { + if appender_names.contains(&appender) { + ok_logger_appenders.push(appender); + } else { + errors.push(ConfigError::NonexistentAppender(appender)); + } + } + logger.appenders = ok_logger_appenders; + + ok_loggers.push(logger); + } + + let config = Config { + appenders: ok_appenders, + root, + loggers: ok_loggers, + }; + + (config, ConfigErrors(errors)) + } + + /// Consumes the `ConfigBuilder`, returning the `Config`. + pub fn build(self, root: Root) -> Result { + let (config, errors) = self.build_lossy(root); + if errors.is_empty() { + Ok(config) + } else { + Err(errors) + } + } +} /// Configuration for the root logger. #[derive(Debug)] @@ -35,7 +196,7 @@ impl Root { } /// A builder for `Root`s. -#[derive(Debug)] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct RootBuilder { appenders: Vec, } @@ -97,10 +258,8 @@ impl Appender { pub fn filters(&self) -> &[Box] { &self.filters } -} -impl PrivateConfigAppenderExt for Appender { - fn unpack(self) -> (String, Box, Vec>) { + pub(crate) fn unpack(self) -> (String, Box, Vec>) { let Appender { name, appender, @@ -146,7 +305,7 @@ impl AppenderBuilder { } /// Configuration for a logger. -#[derive(Debug)] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct Logger { name: String, level: LevelFilter, @@ -187,7 +346,7 @@ impl Logger { } /// A builder for `Logger`s. -#[derive(Debug)] +#[derive(Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct LoggerBuilder { appenders: Vec, additive: bool, @@ -233,159 +392,9 @@ impl LoggerBuilder { } } -/// A log4rs configuration. -#[derive(Debug)] -pub struct Config { - appenders: Vec, - root: Root, - loggers: Vec, -} - -impl Config { - /// Creates a new `ConfigBuilder`. - pub fn builder() -> ConfigBuilder { - ConfigBuilder { - appenders: vec![], - loggers: vec![], - } - } - - /// Returns the `Appender`s associated with the `Config`. - pub fn appenders(&self) -> &[Appender] { - &self.appenders - } - - /// Returns the `Root` associated with the `Config`. - pub fn root(&self) -> &Root { - &self.root - } - - /// Returns a mutable handle for the `Root` associated with the `Config`. - pub fn root_mut(&mut self) -> &mut Root { - &mut self.root - } - - /// Returns the `Logger`s associated with the `Config`. - pub fn loggers(&self) -> &[Logger] { - &self.loggers - } -} - -/// A builder for `Config`s. -pub struct ConfigBuilder { - appenders: Vec, - loggers: Vec, -} - -impl ConfigBuilder { - /// Adds an appender. - pub fn appender(mut self, appender: Appender) -> ConfigBuilder { - self.appenders.push(appender); - self - } - - /// Adds appenders. - pub fn appenders(mut self, appenders: I) -> ConfigBuilder - where - I: IntoIterator, - { - self.appenders.extend(appenders); - self - } - - /// Adds a logger. - pub fn logger(mut self, logger: Logger) -> ConfigBuilder { - self.loggers.push(logger); - self - } - - /// Adds loggers. - pub fn loggers(mut self, loggers: I) -> ConfigBuilder - where - I: IntoIterator, - { - self.loggers.extend(loggers); - self - } - - /// Consumes the `ConfigBuilder`, returning the `Config`. - /// - /// Unlike `build`, this method will always return a `Config` by stripping - /// portions of the configuration that are incorrect. - pub fn build_lossy(self, mut root: Root) -> (Config, Vec) { - let mut errors = vec![]; - - let ConfigBuilder { appenders, loggers } = self; - - let mut ok_appenders = vec![]; - let mut appender_names = HashSet::new(); - for appender in appenders { - if appender_names.insert(appender.name.clone()) { - ok_appenders.push(appender); - } else { - errors.push(Error::DuplicateAppenderName(appender.name)); - } - } - - let mut ok_root_appenders = vec![]; - for appender in root.appenders { - if appender_names.contains(&appender) { - ok_root_appenders.push(appender); - } else { - errors.push(Error::NonexistentAppender(appender)); - } - } - root.appenders = ok_root_appenders; - - let mut ok_loggers = vec![]; - let mut logger_names = HashSet::new(); - for mut logger in loggers { - if !logger_names.insert(logger.name.clone()) { - errors.push(Error::DuplicateLoggerName(logger.name)); - continue; - } - - if let Err(err) = check_logger_name(&logger.name) { - errors.push(err); - continue; - } - - let mut ok_logger_appenders = vec![]; - for appender in logger.appenders { - if appender_names.contains(&appender) { - ok_logger_appenders.push(appender); - } else { - errors.push(Error::NonexistentAppender(appender)); - } - } - logger.appenders = ok_logger_appenders; - - ok_loggers.push(logger); - } - - let config = Config { - appenders: ok_appenders, - root, - loggers: ok_loggers, - }; - - (config, errors) - } - - /// Consumes the `ConfigBuilder`, returning the `Config`. - pub fn build(self, root: Root) -> Result { - let (config, errors) = self.build_lossy(root); - if errors.is_empty() { - Ok(config) - } else { - Err(Errors { errors }) - } - } -} - -fn check_logger_name(name: &str) -> Result<(), Error> { +fn check_logger_name(name: &str) -> Result<(), ConfigError> { if name.is_empty() { - return Err(Error::InvalidLoggerName(name.to_owned())); + return Err(ConfigError::InvalidLoggerName(name.to_owned())); } let mut streak = 0; @@ -393,97 +402,69 @@ fn check_logger_name(name: &str) -> Result<(), Error> { if ch == ':' { streak += 1; if streak > 2 { - return Err(Error::InvalidLoggerName(name.to_owned())); + return Err(ConfigError::InvalidLoggerName(name.to_owned())); } } else { if streak > 0 && streak != 2 { - return Err(Error::InvalidLoggerName(name.to_owned())); + return Err(ConfigError::InvalidLoggerName(name.to_owned())); } streak = 0; } } if streak > 0 { - Err(Error::InvalidLoggerName(name.to_owned())) + Err(ConfigError::InvalidLoggerName(name.to_owned())) } else { Ok(()) } } -impl ConfigPrivateExt for Config { - fn unpack(self) -> (Vec, Root, Vec) { - let Config { - appenders, - root, - loggers, - } = self; - (appenders, root, loggers) - } -} - /// Errors encountered when validating a log4rs `Config`. -#[derive(Debug)] -pub struct Errors { - errors: Vec, -} +#[derive(Debug, Error)] +#[error("Configuration errors: {0:#?}")] +pub struct ConfigErrors(Vec); -impl Errors { +impl ConfigErrors { + /// There were no config errors. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } /// Returns a slice of `Error`s. - pub fn errors(&self) -> &[Error] { - &self.errors + pub fn errors(&self) -> &[ConfigError] { + &self.0 } -} - -impl fmt::Display for Errors { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - for error in &self.errors { - writeln!(fmt, "{}", error)?; + /// Handle non-fatal errors (by logging them to stderr.) + pub fn handle(&mut self) { + for e in self.0.drain(..) { + crate::handle_error(&e.into()); } - Ok(()) - } -} - -impl error::Error for Errors { - fn description(&self) -> &str { - "Errors encountered when validating a log4rs `Config`" } } /// An error validating a log4rs `Config`. -#[derive(Debug)] -pub enum Error { +#[derive(Debug, Error)] +pub enum ConfigError { /// Multiple appenders were registered with the same name. + #[error("Duplicate appender name `{0}`")] DuplicateAppenderName(String), + /// A reference to a nonexistant appender. + #[error("Reference to nonexistent appender: `{0}`")] NonexistentAppender(String), + /// Multiple loggers were registered with the same name. + #[error("Duplicate logger name `{0}`")] DuplicateLoggerName(String), + /// A logger name was invalid. + #[error("Invalid logger name `{0}`")] InvalidLoggerName(String), + #[doc(hidden)] + #[error("Reserved for future use")] __Extensible, } -impl fmt::Display for Error { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::DuplicateAppenderName(ref n) => write!(fmt, "Duplicate appender name `{}`", n), - Error::NonexistentAppender(ref n) => { - write!(fmt, "Reference to nonexistent appender: `{}`", n) - } - Error::DuplicateLoggerName(ref n) => write!(fmt, "Duplicate logger name `{}`", n), - Error::InvalidLoggerName(ref n) => write!(fmt, "Invalid logger name `{}`", n), - Error::__Extensible => unreachable!(), - } - } -} - -impl error::Error for Error { - fn description(&self) -> &str { - "An error constructing a log4rs `Config`" - } -} - #[cfg(test)] mod test { #[test] diff --git a/src/encode/json.rs b/src/encode/json.rs index 0224dc89..0ac336df 100644 --- a/src/encode/json.rs +++ b/src/encode/json.rs @@ -31,18 +31,15 @@ use chrono::{ }; use log::{Level, Record}; use serde::ser::{self, Serialize, SerializeMap}; -#[cfg(feature = "file")] -use serde_derive::Deserialize; -use serde_derive::Serialize; -use std::{error::Error, fmt, option, thread}; +use std::{fmt, option, thread}; +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; use crate::encode::{Encode, Write, NEWLINE}; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; /// The JSON encoder's configuration -#[cfg(feature = "file")] -#[derive(Deserialize, Clone)] +#[cfg(feature = "config_parsing")] +#[derive(Clone, Eq, PartialEq, Hash, Debug, Default, serde::Deserialize)] #[serde(deny_unknown_fields)] pub struct JsonEncoderConfig { #[serde(skip_deserializing)] @@ -50,7 +47,7 @@ pub struct JsonEncoderConfig { } /// An `Encode`r which writes a JSON object. -#[derive(Debug, Default)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct JsonEncoder(()); impl JsonEncoder { @@ -66,7 +63,7 @@ impl JsonEncoder { w: &mut dyn Write, time: DateTime, record: &Record, - ) -> Result<(), Box> { + ) -> anyhow::Result<()> { let thread = thread::current(); let message = Message { time: time.format_with_items(Some(Item::Fixed(Fixed::RFC3339)).into_iter()), @@ -87,16 +84,12 @@ impl JsonEncoder { } impl Encode for JsonEncoder { - fn encode( - &self, - w: &mut dyn Write, - record: &Record, - ) -> Result<(), Box> { + fn encode(&self, w: &mut dyn Write, record: &Record) -> anyhow::Result<()> { self.encode_inner(w, Local::now(), record) } } -#[derive(Serialize)] +#[derive(serde::Serialize)] struct Message<'a> { #[serde(serialize_with = "ser_display")] time: DelayedFormat>>, @@ -151,10 +144,11 @@ impl ser::Serialize for Mdc { /// ```yaml /// kind: json /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)] pub struct JsonEncoderDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for JsonEncoderDeserializer { type Trait = dyn Encode; @@ -164,7 +158,7 @@ impl Deserialize for JsonEncoderDeserializer { &self, _: JsonEncoderConfig, _: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { Ok(Box::new(JsonEncoder::default())) } } diff --git a/src/encode/mod.rs b/src/encode/mod.rs index 8e4297ba..aa290b3c 100644 --- a/src/encode/mod.rs +++ b/src/encode/mod.rs @@ -1,16 +1,18 @@ //! Encoders +use derivative::Derivative; use log::Record; -#[cfg(feature = "file")] +use std::{fmt, io}; + +#[cfg(feature = "config_parsing")] use serde::de; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use serde_value::Value; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use std::collections::BTreeMap; -use std::{error::Error, fmt, io}; -#[cfg(feature = "file")] -use crate::file::Deserializable; +#[cfg(feature = "config_parsing")] +use crate::config::Deserializable; #[cfg(feature = "json_encoder")] pub mod json; @@ -21,6 +23,7 @@ pub mod writer; #[allow(dead_code)] #[cfg(windows)] const NEWLINE: &'static str = "\r\n"; + #[allow(dead_code)] #[cfg(not(windows))] const NEWLINE: &str = "\n"; @@ -32,14 +35,10 @@ const NEWLINE: &str = "\n"; /// output. pub trait Encode: fmt::Debug + Send + Sync + 'static { /// Encodes the `Record` into bytes and writes them. - fn encode( - &self, - w: &mut dyn Write, - record: &Record, - ) -> Result<(), Box>; + fn encode(&self, w: &mut dyn Write, record: &Record) -> anyhow::Result<()>; } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserializable for dyn Encode { fn name() -> &'static str { "encoder" @@ -47,7 +46,8 @@ impl Deserializable for dyn Encode { } /// Configuration for an encoder. -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct EncoderConfig { /// The encoder's kind. pub kind: String, @@ -56,7 +56,7 @@ pub struct EncoderConfig { pub config: Value, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl<'de> de::Deserialize<'de> for EncoderConfig { fn deserialize(d: D) -> Result where @@ -77,8 +77,8 @@ impl<'de> de::Deserialize<'de> for EncoderConfig { } /// A text or background color. -#[derive(Copy, Clone, Debug)] #[allow(missing_docs)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub enum Color { Black, Red, @@ -94,7 +94,9 @@ pub enum Color { /// /// Any fields set to `None` will be set to their default format, as defined /// by the `Write`r. -#[derive(Clone, Default)] +#[derive(Derivative)] +#[derivative(Debug)] +#[derive(Clone, Eq, PartialEq, Hash, Default)] pub struct Style { /// The text (or foreground) color. pub text: Option, @@ -102,19 +104,10 @@ pub struct Style { pub background: Option, /// True if the text should have increased intensity. pub intense: Option, + #[derivative(Debug = "ignore")] _p: (), } -impl fmt::Debug for Style { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("Style") - .field("text", &self.text) - .field("background", &self.background) - .field("intense", &self.intense) - .finish() - } -} - impl Style { /// Returns a `Style` with all fields set to their defaults. pub fn new() -> Style { diff --git a/src/encode/pattern/mod.rs b/src/encode/pattern/mod.rs index e1b43329..5fef409b 100644 --- a/src/encode/pattern/mod.rs +++ b/src/encode/pattern/mod.rs @@ -119,25 +119,25 @@ //! [MDC]: https://crates.io/crates/log-mdc use chrono::{Local, Utc}; +use derivative::Derivative; use log::{Level, Record}; -#[cfg(feature = "file")] -use serde_derive::Deserialize; -use std::{default::Default, error::Error, fmt, io, process, thread}; +use std::{default::Default, io, process, thread}; use crate::encode::{ self, pattern::parser::{Alignment, Parameters, Parser, Piece}, Color, Encode, Style, NEWLINE, }; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; + +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; mod parser; /// The pattern encoder's configuration. -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] #[serde(deny_unknown_fields)] +#[derive(Clone, Eq, PartialEq, Hash, Debug, Default, serde::Deserialize)] pub struct PatternEncoderConfig { pattern: Option, } @@ -293,6 +293,7 @@ impl encode::Write for RightAlignWriter { } } +#[derive(Clone, Eq, PartialEq, Hash, Debug)] enum Chunk { Text(String), Formatted { @@ -404,16 +405,17 @@ impl<'a> From> for Chunk { let timezone = match formatter.args.get(1) { Some(arg) => { - if arg.len() != 1 { - return Chunk::Error("invalid timezone".to_owned()); - } - match arg[0] { - Piece::Text(ref z) if *z == "utc" => Timezone::Utc, - Piece::Text(ref z) if *z == "local" => Timezone::Local, - Piece::Text(ref z) => { - return Chunk::Error(format!("invalid timezone `{}`", z)); + if let Some(arg) = arg.get(0) { + match arg { + Piece::Text(ref z) if *z == "utc" => Timezone::Utc, + Piece::Text(ref z) if *z == "local" => Timezone::Local, + Piece::Text(ref z) => { + return Chunk::Error(format!("invalid timezone `{}`", z)); + } + _ => return Chunk::Error("invalid timezone".to_owned()), } - _ => return Chunk::Error("invalid timezone".to_owned()), + } else { + return Chunk::Error("invalid timezone".to_owned()); } } None => Timezone::Local, @@ -458,34 +460,36 @@ impl<'a> From> for Chunk { let key = match formatter.args.get(0) { Some(arg) => { - if arg.len() != 1 { + if let Some(arg) = arg.get(0) { + match arg { + Piece::Text(key) => key.to_owned(), + Piece::Error(ref e) => return Chunk::Error(e.clone()), + _ => return Chunk::Error("invalid MDC key".to_owned()), + } + } else { return Chunk::Error("invalid MDC key".to_owned()); } - match arg[0] { - Piece::Text(key) => key.to_owned(), - Piece::Error(ref e) => return Chunk::Error(e.clone()), - _ => return Chunk::Error("invalid MDC key".to_owned()), - } } None => return Chunk::Error("missing MDC key".to_owned()), }; let default = match formatter.args.get(1) { Some(arg) => { - if arg.len() != 1 { + if let Some(arg) = arg.get(0) { + match arg { + Piece::Text(key) => key.to_owned(), + Piece::Error(ref e) => return Chunk::Error(e.clone()), + _ => return Chunk::Error("invalid MDC default".to_owned()), + } + } else { return Chunk::Error("invalid MDC default".to_owned()); } - match arg[0] { - Piece::Text(key) => key.to_owned(), - Piece::Error(ref e) => return Chunk::Error(e.clone()), - _ => return Chunk::Error("invalid MDC default".to_owned()), - } } - None => "".to_owned(), + None => "", }; Chunk::Formatted { - chunk: FormattedChunk::Mdc(key, default), + chunk: FormattedChunk::Mdc(key.into(), default.into()), params: parameters, } } @@ -521,11 +525,13 @@ fn no_args(arg: &[Vec], params: Parameters, chunk: FormattedChunk) -> Chu } } +#[derive(Clone, Eq, PartialEq, Hash, Debug)] enum Timezone { Utc, Local, } +#[derive(Clone, Eq, PartialEq, Hash, Debug)] enum FormattedChunk { Time(String, Timezone), Level, @@ -576,15 +582,18 @@ impl FormattedChunk { Level::Error => { w.set_style(Style::new().text(Color::Red).intense(true))?; } - Level::Warn => w.set_style(Style::new().text(Color::Red))?, - Level::Info => w.set_style(Style::new().text(Color::Blue))?, + Level::Warn => w.set_style(Style::new().text(Color::Yellow))?, + Level::Info => w.set_style(Style::new().text(Color::Green))?, + Level::Trace => w.set_style(Style::new().text(Color::Cyan))?, _ => {} } for chunk in chunks { chunk.encode(w, record)?; } match record.level() { - Level::Error | Level::Warn | Level::Info => w.set_style(&Style::new())?, + Level::Error | Level::Warn | Level::Info | Level::Trace => { + w.set_style(&Style::new())? + } _ => {} } Ok(()) @@ -597,19 +606,15 @@ impl FormattedChunk { } /// An `Encode`r configured via a format string. +#[derive(Derivative)] +#[derivative(Debug)] +#[derive(Clone, Eq, PartialEq, Hash)] pub struct PatternEncoder { + #[derivative(Debug = "ignore")] chunks: Vec, pattern: String, } -impl fmt::Debug for PatternEncoder { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("PatternEncoder") - .field("pattern", &self.pattern) - .finish() - } -} - /// Returns a `PatternEncoder` using the default pattern of `{d} {l} {t} - {m}{n}`. impl Default for PatternEncoder { fn default() -> PatternEncoder { @@ -618,11 +623,7 @@ impl Default for PatternEncoder { } impl Encode for PatternEncoder { - fn encode( - &self, - w: &mut dyn encode::Write, - record: &Record, - ) -> Result<(), Box> { + fn encode(&self, w: &mut dyn encode::Write, record: &Record) -> anyhow::Result<()> { for chunk in &self.chunks { chunk.encode(w, record)?; } @@ -653,10 +654,11 @@ impl PatternEncoder { /// # "{d} {l} {t} - {m}{n}". /// pattern: "{d} {l} {t} - {m}{n}" /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub struct PatternEncoderDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for PatternEncoderDeserializer { type Trait = dyn Encode; @@ -666,7 +668,7 @@ impl Deserialize for PatternEncoderDeserializer { &self, config: PatternEncoderConfig, _: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { let encoder = match config.pattern { Some(pattern) => PatternEncoder::new(&pattern), None => PatternEncoder::default(), diff --git a/src/encode/pattern/parser.rs b/src/encode/pattern/parser.rs index 445cee76..8e91e8ec 100644 --- a/src/encode/pattern/parser.rs +++ b/src/encode/pattern/parser.rs @@ -1,6 +1,7 @@ // cribbed to a large extent from libfmt_macros use std::{iter::Peekable, str::CharIndices}; +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum Piece<'a> { Text(&'a str), Argument { @@ -10,11 +11,13 @@ pub enum Piece<'a> { Error(String), } +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct Formatter<'a> { pub name: &'a str, pub args: Vec>>, } +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct Parameters { pub fill: char, pub align: Alignment, @@ -22,12 +25,13 @@ pub struct Parameters { pub max_width: Option, } -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub enum Alignment { Left, Right, } +#[derive(Clone, Debug)] pub struct Parser<'a> { pattern: &'a str, it: Peekable>, diff --git a/src/encode/writer/ansi.rs b/src/encode/writer/ansi.rs index 18deef93..8b8b4226 100644 --- a/src/encode/writer/ansi.rs +++ b/src/encode/writer/ansi.rs @@ -7,7 +7,7 @@ use std::{fmt, io}; /// An `encode::Write`r that wraps an `io::Write`r, emitting ANSI escape codes /// for text style. -#[derive(Debug)] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct AnsiWriter(pub W); impl io::Write for AnsiWriter { diff --git a/src/encode/writer/simple.rs b/src/encode/writer/simple.rs index 08757650..db29a6d9 100644 --- a/src/encode/writer/simple.rs +++ b/src/encode/writer/simple.rs @@ -7,7 +7,7 @@ use std::{fmt, io}; /// An `encode::Write`r that simply delegates to an `io::Write`r and relies /// on the default implementations of `encode::Write`r methods. -#[derive(Debug)] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct SimpleWriter(pub W); impl io::Write for SimpleWriter { diff --git a/src/filter/mod.rs b/src/filter/mod.rs index 35286d55..093dd2d1 100644 --- a/src/filter/mod.rs +++ b/src/filter/mod.rs @@ -1,16 +1,16 @@ //! Filters use log::Record; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use serde::de; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use serde_value::Value; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] use std::collections::BTreeMap; use std::fmt; -#[cfg(feature = "file")] -use crate::file::Deserializable; +#[cfg(feature = "config_parsing")] +use crate::config::Deserializable; #[cfg(feature = "threshold_filter")] pub mod threshold; @@ -24,7 +24,7 @@ pub trait Filter: fmt::Debug + Send + Sync + 'static { fn filter(&self, record: &Record) -> Response; } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserializable for dyn Filter { fn name() -> &'static str { "filter" @@ -50,8 +50,8 @@ pub enum Response { } /// Configuration for a filter. -#[derive(PartialEq, Eq, Debug, Clone)] -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Clone, Eq, PartialEq, Hash, Debug)] pub struct FilterConfig { /// The filter kind. pub kind: String, @@ -59,7 +59,7 @@ pub struct FilterConfig { pub config: Value, } -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl<'de> de::Deserialize<'de> for FilterConfig { fn deserialize(d: D) -> Result where diff --git a/src/filter/threshold.rs b/src/filter/threshold.rs index 396fbcf5..86c6e289 100644 --- a/src/filter/threshold.rs +++ b/src/filter/threshold.rs @@ -3,24 +3,20 @@ //! Requires the `threshold_filter` feature. use log::{LevelFilter, Record}; -#[cfg(feature = "file")] -use serde_derive::Deserialize; -#[cfg(feature = "file")] -use std::error::Error; -#[cfg(feature = "file")] -use crate::file::{Deserialize, Deserializers}; +#[cfg(feature = "config_parsing")] +use crate::config::{Deserialize, Deserializers}; use crate::filter::{Filter, Response}; /// The threshold filter's configuration. -#[cfg(feature = "file")] -#[derive(Deserialize)] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, serde::Deserialize)] pub struct ThresholdFilterConfig { level: LevelFilter, } /// A filter that rejects all events at a level below a provided threshold. -#[derive(Debug)] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub struct ThresholdFilter { level: LevelFilter, } @@ -52,10 +48,11 @@ impl Filter for ThresholdFilter { /// # The threshold log level to filter at. Required /// level: warn /// ``` -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub struct ThresholdFilterDeserializer; -#[cfg(feature = "file")] +#[cfg(feature = "config_parsing")] impl Deserialize for ThresholdFilterDeserializer { type Trait = dyn Filter; @@ -65,7 +62,7 @@ impl Deserialize for ThresholdFilterDeserializer { &self, config: ThresholdFilterConfig, _: &Deserializers, - ) -> Result, Box> { + ) -> anyhow::Result> { Ok(Box::new(ThresholdFilter::new(config.level))) } } diff --git a/src/lib.rs b/src/lib.rs index 06b8cb55..589b9ae8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -59,7 +59,7 @@ //! Loggers are also associated with a set of appenders. Appenders can be //! associated directly with a logger. In addition, the appenders of the //! logger's parent will be associated with the logger unless the logger has -//! its *additivity* set to `false`. Log events sent to the logger that are not +//! its *additive* set to `false`. Log events sent to the logger that are not //! filtered out by the logger's maximum log level will be sent to all //! associated appenders. //! @@ -134,7 +134,7 @@ //! Add the following in your application initialization. //! //! ```no_run -//! # #[cfg(feature = "file")] +//! # #[cfg(feature = "config_parsing")] //! # fn f() { //! log4rs::init_file("log4rs.yml", Default::default()).unwrap(); //! # } @@ -186,31 +186,31 @@ #![allow(where_clauses_object_safety, clippy::manual_non_exhaustive)] #![warn(missing_docs)] -use arc_swap::ArcSwap; -use fnv::FnvHasher; -use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; use std::{ - cmp, collections::HashMap, error, hash::BuildHasherDefault, io, io::prelude::*, sync::Arc, + cmp, collections::HashMap, fmt, hash::BuildHasherDefault, io, io::prelude::*, sync::Arc, }; -#[cfg(feature = "file")] -pub use crate::priv_file::{init_file, load_config_file, Error}; - -use crate::{append::Append, config::Config, filter::Filter}; +use arc_swap::ArcSwap; +use fnv::FnvHasher; +use log::{Level, LevelFilter, Metadata, Record}; pub mod append; pub mod config; pub mod encode; -#[cfg(feature = "file")] -pub mod file; pub mod filter; -#[cfg(feature = "file")] -mod priv_file; #[cfg(feature = "console_writer")] mod priv_io; +pub use config::{init_config, Config}; + +#[cfg(feature = "config_parsing")] +pub use config::{init_file, init_raw_config}; + +use self::{append::Append, filter::Filter}; + type FnvHashMap = HashMap>; +#[derive(Debug)] struct ConfiguredLogger { level: LevelFilter, appenders: Vec, @@ -277,24 +277,32 @@ impl ConfiguredLogger { self.level >= level } - fn log(&self, record: &log::Record, appenders: &[Appender]) { + fn log(&self, record: &log::Record, appenders: &[Appender]) -> Result<(), Vec> { + let mut errors = vec![]; if self.enabled(record.level()) { for &idx in &self.appenders { if let Err(err) = appenders[idx].append(record) { - handle_error(&*err); + errors.push(err); } } } + + if errors.is_empty() { + Ok(()) + } else { + Err(errors) + } } } +#[derive(Debug)] struct Appender { appender: Box, filters: Vec>, } impl Appender { - fn append(&self, record: &Record) -> Result<(), Box> { + fn append(&self, record: &Record) -> anyhow::Result<()> { for filter in &self.filters { match filter.filter(record) { filter::Response::Accept => break, @@ -314,10 +322,31 @@ impl Appender { struct SharedLogger { root: ConfiguredLogger, appenders: Vec, + err_handler: Box, +} + +impl fmt::Debug for SharedLogger { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SharedLogger") + .field("root", &self.root) + .field("appenders", &self.appenders) + .finish() + } } impl SharedLogger { fn new(config: config::Config) -> SharedLogger { + Self::new_with_err_handler( + config, + Box::new(|e: &anyhow::Error| { + let _ = writeln!(io::stderr(), "log4rs: {}", e); + }), + ) + } + fn new_with_err_handler( + config: config::Config, + err_handler: Box, + ) -> SharedLogger { let (appenders, root, mut loggers) = config.unpack(); let root = { @@ -359,12 +388,17 @@ impl SharedLogger { }) .collect(); - SharedLogger { root, appenders } + SharedLogger { + root, + appenders, + err_handler, + } } } /// The fully configured log4rs Logger which is appropriate /// to use with the `log::set_boxed_logger` function. +#[derive(Debug)] pub struct Logger(Arc>); impl Logger { @@ -372,6 +406,15 @@ impl Logger { pub fn new(config: config::Config) -> Logger { Logger(Arc::new(ArcSwap::new(Arc::new(SharedLogger::new(config))))) } + /// Create a new `Logger` given a configuration and err handler. + pub fn new_with_err_handler( + config: config::Config, + err_handler: Box, + ) -> Logger { + Logger(Arc::new(ArcSwap::new(Arc::new( + SharedLogger::new_with_err_handler(config, err_handler), + )))) + } /// Set the max log level above which everything will be filtered. pub fn max_log_level(&self) -> LevelFilter { @@ -390,10 +433,15 @@ impl log::Log for Logger { fn log(&self, record: &log::Record) { let shared = self.0.load(); - shared + if let Err(errs) = shared .root .find(record.target()) - .log(record, &shared.appenders); + .log(record, &shared.appenders) + { + for e in errs { + (shared.err_handler)(&e) + } + } } fn flush(&self) { @@ -403,25 +451,12 @@ impl log::Log for Logger { } } -fn handle_error(e: &E) { +pub(crate) fn handle_error(e: &anyhow::Error) { let _ = writeln!(io::stderr(), "log4rs: {}", e); } -/// Initializes the global logger as a log4rs logger with the provided config. -/// -/// A `Handle` object is returned which can be used to adjust the logging -/// configuration. -pub fn init_config(config: config::Config) -> Result { - let logger = Logger::new(config); - log::set_max_level(logger.max_log_level()); - let handle = Handle { - shared: logger.0.clone(), - }; - log::set_boxed_logger(Box::new(logger)).map(|()| handle) -} - /// A handle to the active logger. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct Handle { shared: Arc>, } @@ -439,20 +474,49 @@ trait ErrorInternals { fn new(message: String) -> Self; } -trait ConfigPrivateExt { - fn unpack(self) -> (Vec, config::Root, Vec); -} - -trait PrivateConfigAppenderExt { - fn unpack(self) -> (String, Box, Vec>); -} - #[cfg(test)] mod test { use log::{Level, LevelFilter, Log}; use super::*; + #[test] + #[cfg(all(feature = "config_parsing", feature = "json_format"))] + fn init_from_raw_config() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("append.log"); + + let cfg = serde_json::json!({ + "refresh_rate": "60 seconds", + "root" : { + "appenders": ["baz"], + "level": "info", + }, + "appenders": { + "baz": { + "kind": "file", + "path": path, + "encoder": { + "pattern": "{m}" + } + } + }, + }); + let config = serde_json::from_str::(&cfg.to_string()).unwrap(); + if let Err(e) = init_raw_config(config) { + panic!(e); + } + assert!(path.exists()); + log::info!("init_from_raw_config"); + + let mut contents = String::new(); + std::fs::File::open(&path) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert_eq!(contents, "init_from_raw_config"); + } + #[test] fn enabled() { let root = config::Root::builder().build(LevelFilter::Debug);