From 2786748d7f50b3f5c0cf28138807d8c7ddd4ecb2 Mon Sep 17 00:00:00 2001 From: emabee Date: Thu, 21 Sep 2023 12:27:13 +0200 Subject: [PATCH] Rework LoggerHande::existing_log_files() - extend LogWriter - extend impact of trigger_rotation --- CHANGELOG.md | 8 + Cargo.toml | 2 +- README.md | 6 +- src/file_spec.rs | 4 + src/lib.rs | 2 +- src/logger_handle.rs | 87 +++++++++- src/primary_writer.rs | 9 +- src/primary_writer/multi_writer.rs | 34 ++-- src/writers/file_log_writer.rs | 20 ++- src/writers/file_log_writer/state.rs | 161 ++---------------- .../file_log_writer/state/list_and_cleanup.rs | 76 ++++++--- .../file_log_writer/state/timestamps.rs | 7 +- src/writers/file_log_writer/state_handle.rs | 14 +- src/writers/log_writer.rs | 13 ++ tests/test_multi_threaded_cleanup_use_utc.rs | 12 +- tests/test_multi_threaded_numbers.rs | 12 +- 16 files changed, 247 insertions(+), 220 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d5cc6f..838833d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.27.0] - 2023-09-20 + +Revise, and modify the signature of, `LoggerHande::existing_log_files()` (version bump). + +Extend the trait `LogWriter` with an optional method `rotate`. + +Extend impact of `LoggerHande::trigger_rotation()` to all configured writers. + ## [0.26.1] - 2023-09-19 Introduce new naming variants that work without `_rCURRENT` files: `Naming::TimestampsDirect` diff --git a/Cargo.toml b/Cargo.toml index 452ffd9..9b9d7c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flexi_logger" -version = "0.26.1" +version = "0.27.0" authors = ["emabee "] categories = ["development-tools::debugging"] description = """ diff --git a/README.md b/README.md index 4a3f4f5..8b67a77 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ and you use the ```log``` macros to write log lines from your code): ```toml [dependencies] -flexi_logger = "0.26" +flexi_logger = "0.27" log = "0.4" ``` @@ -69,7 +69,7 @@ Make use of the non-default features by specifying them in your `Cargo.toml`, e. ```toml [dependencies] -flexi_logger = { version = "0.26", features = ["async", "specfile", "compress"] } +flexi_logger = { version = "0.27", features = ["async", "specfile", "compress"] } log = "0.4" ``` @@ -77,7 +77,7 @@ or, to get the smallest footprint (and no colors), switch off even the default f ```toml [dependencies] -flexi_logger = { version = "0.26", default_features = false } +flexi_logger = { version = "0.27", default_features = false } log = "0.4" ``` diff --git a/src/file_spec.rs b/src/file_spec.rs index 2e7e19f..f08031a 100644 --- a/src/file_spec.rs +++ b/src/file_spec.rs @@ -200,6 +200,10 @@ impl FileSpec { self.directory.clone() } + pub(crate) fn get_suffix(&self) -> Option { + self.o_suffix.clone() + } + /// Derives a `PathBuf` from the spec and the given infix. /// /// It is composed like this: diff --git a/src/lib.rs b/src/lib.rs index c52d78e..535018e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -68,7 +68,7 @@ pub use crate::flexi_error::FlexiLoggerError; pub use crate::formats::*; pub use crate::log_specification::{LogSpecBuilder, LogSpecification, ModuleFilter}; pub use crate::logger::{Duplicate, ErrorChannel, Logger}; -pub use crate::logger_handle::LoggerHandle; +pub use crate::logger_handle::{LogfileSelector, LoggerHandle}; pub use crate::parameters::{Age, Cleanup, Criterion, Naming}; pub(crate) use crate::write_mode::EffectiveWriteMode; pub use crate::write_mode::{WriteMode, DEFAULT_BUFFER_CAPACITY, DEFAULT_FLUSH_INTERVAL}; diff --git a/src/logger_handle.rs b/src/logger_handle.rs index 1ad9edb..6de0e8f 100644 --- a/src/logger_handle.rs +++ b/src/logger_handle.rs @@ -278,14 +278,23 @@ impl LoggerHandle { /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. + /// + /// IO errors. pub fn trigger_rotation(&self) -> Result<(), FlexiLoggerError> { - if let PrimaryWriter::Multi(ref mw) = &*self.writers_handle.primary_writer { - mw.trigger_rotation()?; + let mut result = if let PrimaryWriter::Multi(ref mw) = &*self.writers_handle.primary_writer + { + mw.trigger_rotation() + } else { + Ok(()) + }; + + for blw in self.writers_handle.other_writers.values() { + let result2 = blw.rotate(); + if result.is_ok() && result2.is_err() { + result = result2; + } } - // for blw in self.writers_handle.other_writers.values() { - // let result2 = blw.trigger_rotation(); // todo is not (yet?) part of trait LogWriter - // } - Ok(()) + result } /// Shutdown all participating writers. @@ -308,14 +317,21 @@ impl LoggerHandle { /// Returns the list of existing log files according to the current `FileSpec`. /// - /// The list includes the current log file and the compressed files, if they exist. + /// Depending on the given selector, the list may include the CURRENT log file + /// and the compressed files, if they exist. /// The list is empty if the logger is not configured for writing to files. /// /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. - pub fn existing_log_files(&self) -> Result, FlexiLoggerError> { - let mut log_files = self.writers_handle.primary_writer.existing_log_files()?; + pub fn existing_log_files( + &self, + selector: &LogfileSelector, + ) -> Result, FlexiLoggerError> { + let mut log_files = self + .writers_handle + .primary_writer + .existing_log_files(selector)?; log_files.sort(); Ok(log_files) } @@ -357,6 +373,59 @@ impl LoggerHandle { } } +/// Used in [`LoggerHandle::existing_log_files`]. +/// +/// Example: +/// +/// ```rust +/// # use flexi_logger::{LogfileSelector,Logger}; +/// # let logger_handle = Logger::try_with_env().unwrap().start().unwrap(); +/// let all_log_files = logger_handle.existing_log_files( +/// &LogfileSelector::default() +/// .with_r_current() +/// .with_compressed_files() +/// ); +/// ``` +pub struct LogfileSelector { + pub(crate) with_plain_files: bool, + pub(crate) with_r_current: bool, + pub(crate) with_compressed_files: bool, +} +impl Default for LogfileSelector { + /// Selects plain log files without the `rCURRENT` file. + fn default() -> Self { + Self { + with_plain_files: true, + with_r_current: false, + with_compressed_files: false, + } + } +} +impl LogfileSelector { + /// Selects no file at all. + #[must_use] + pub fn none() -> Self { + Self { + with_plain_files: false, + with_r_current: false, + with_compressed_files: false, + } + } + /// Selects additionally the `rCURRENT` file. + #[must_use] + pub fn with_r_current(mut self) -> Self { + self.with_r_current = true; + self + } + + /// Selects additionally the compressed log files. + #[must_use] + pub fn with_compressed_files(mut self) -> Self { + self.with_compressed_files = true; + self + } +} + #[derive(Clone)] pub(crate) struct WritersHandle { spec: Arc>, diff --git a/src/primary_writer.rs b/src/primary_writer.rs index a33f654..98c0750 100644 --- a/src/primary_writer.rs +++ b/src/primary_writer.rs @@ -11,7 +11,7 @@ use crate::{ filter::LogLineWriter, logger::Duplicate, writers::{FileLogWriter, LogWriter}, - DeferredNow, FlexiLoggerError, FormatFunction, WriteMode, + DeferredNow, FlexiLoggerError, FormatFunction, LogfileSelector, WriteMode, }; use log::Record; use std::path::PathBuf; @@ -115,9 +115,12 @@ impl PrimaryWriter { } } - pub fn existing_log_files(&self) -> Result, FlexiLoggerError> { + pub fn existing_log_files( + &self, + selector: &LogfileSelector, + ) -> Result, FlexiLoggerError> { match self { - Self::Multi(multi_writer) => multi_writer.existing_log_files(), + Self::Multi(multi_writer) => multi_writer.existing_log_files(selector), _ => Ok(Vec::new()), } } diff --git a/src/primary_writer/multi_writer.rs b/src/primary_writer/multi_writer.rs index 8211c72..3576a67 100644 --- a/src/primary_writer/multi_writer.rs +++ b/src/primary_writer/multi_writer.rs @@ -2,7 +2,7 @@ use crate::{ logger::Duplicate, util::{eprint_err, write_buffered, ErrorCode}, writers::{FileLogWriter, FileLogWriterBuilder, FileLogWriterConfig, LogWriter}, - {DeferredNow, FlexiLoggerError, FormatFunction}, + LogfileSelector, {DeferredNow, FlexiLoggerError, FormatFunction}, }; use log::Record; use std::{ @@ -71,9 +71,27 @@ impl MultiWriter { } } } - pub(crate) fn existing_log_files(&self) -> Result, FlexiLoggerError> { + pub(crate) fn trigger_rotation(&self) -> Result<(), FlexiLoggerError> { + match (&self.o_file_writer, &self.o_other_writer) { + (None, None) => Ok(()), + (Some(ref w), None) => w.rotate(), + (None, Some(w)) => w.rotate(), + (Some(w1), Some(w2)) => { + let r1 = w1.rotate(); + let r2 = w2.rotate(); + match (r1, r2) { + (Ok(()), Ok(())) => Ok(()), + (Err(e), _) | (Ok(()), Err(e)) => Err(e), + } + } + } + } + pub(crate) fn existing_log_files( + &self, + selector: &LogfileSelector, + ) -> Result, FlexiLoggerError> { if let Some(fw) = self.o_file_writer.as_ref() { - fw.existing_log_files() + fw.existing_log_files(selector) } else { Ok(Vec::new()) } @@ -93,16 +111,6 @@ impl MultiWriter { fn duplication_to_stdout(&self) -> Duplicate { Duplicate::from(self.duplicate_stdout.load(Ordering::Relaxed)) } - - pub(crate) fn trigger_rotation(&self) -> Result<(), FlexiLoggerError> { - if let Some(ref w) = &self.o_file_writer { - w.trigger_rotation()?; - } - // if let Some(w) = &self.o_other_writer { - // w.trigger_rotation(); // todo is not (yet?) part of trait LogWriter - // } - Ok(()) - } } impl LogWriter for MultiWriter { diff --git a/src/writers/file_log_writer.rs b/src/writers/file_log_writer.rs index 1792cec..eee551a 100644 --- a/src/writers/file_log_writer.rs +++ b/src/writers/file_log_writer.rs @@ -10,7 +10,8 @@ pub use self::config::FileLogWriterConfig; use self::{config::RotationConfig, state::State, state_handle::StateHandle}; use crate::{ - writers::LogWriter, DeferredNow, EffectiveWriteMode, FileSpec, FlexiLoggerError, FormatFunction, + writers::LogWriter, DeferredNow, EffectiveWriteMode, FileSpec, FlexiLoggerError, + FormatFunction, LogfileSelector, }; use log::Record; use std::path::PathBuf; @@ -132,8 +133,10 @@ impl FileLogWriter { /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. - pub fn trigger_rotation(&self) -> Result<(), FlexiLoggerError> { - self.state_handle.force_rotation() + /// + /// IO errors. + pub fn rotate(&self) -> Result<(), FlexiLoggerError> { + self.state_handle.rotate() } /// Returns the list of existing log files according to the current `FileSpec`. @@ -143,8 +146,11 @@ impl FileLogWriter { /// # Errors /// /// `FlexiLoggerError::Poison` if some mutex is poisoned. - pub fn existing_log_files(&self) -> Result, FlexiLoggerError> { - self.state_handle.existing_log_files() + pub fn existing_log_files( + &self, + selector: &LogfileSelector, + ) -> Result, FlexiLoggerError> { + self.state_handle.existing_log_files(selector) } } @@ -168,6 +174,10 @@ impl LogWriter for FileLogWriter { self.reopen_outputfile() } + fn rotate(&self) -> Result<(), FlexiLoggerError> { + self.state_handle.rotate() + } + fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { self.state_handle.validate_logs(expected); } diff --git a/src/writers/file_log_writer/state.rs b/src/writers/file_log_writer/state.rs index 1f212b1..3e64397 100644 --- a/src/writers/file_log_writer/state.rs +++ b/src/writers/file_log_writer/state.rs @@ -5,15 +5,16 @@ mod timestamps; use super::config::{FileLogWriterConfig, RotationConfig}; use crate::{ util::{eprint_err, ErrorCode}, - Age, Cleanup, Criterion, FlexiLoggerError, Naming, + Age, Cleanup, Criterion, FlexiLoggerError, LogfileSelector, Naming, }; use chrono::{DateTime, Datelike, Local, Timelike}; +#[cfg(feature = "async")] +use std::thread::JoinHandle; use std::{ fs::{remove_file, File, OpenOptions}, io::{BufRead, BufReader, BufWriter, Write}, path::{Path, PathBuf}, sync::{Arc, Mutex}, - thread::JoinHandle, }; #[cfg(feature = "async")] @@ -182,29 +183,20 @@ impl RollState { } } -#[derive(Debug)] -struct CleanupThreadHandle { - sender: std::sync::mpsc::Sender, - join_handle: JoinHandle<()>, -} - #[derive(Debug)] struct RotationState { naming_state: NamingState, roll_state: RollState, cleanup: Cleanup, - o_cleanup_thread_handle: Option, + o_cleanup_thread_handle: Option, } impl RotationState { fn shutdown(&mut self) { // this sets o_cleanup_thread_handle in self.state.o_rotation_state to None: let o_cleanup_thread_handle = self.o_cleanup_thread_handle.take(); + if let Some(cleanup_thread_handle) = o_cleanup_thread_handle { - cleanup_thread_handle - .sender - .send(list_and_cleanup::MessageToCleanupThread::Die) - .ok(); - cleanup_thread_handle.join_handle.join().ok(); + cleanup_thread_handle.shutdown(); } } } @@ -306,15 +298,10 @@ impl State { &self.config.file_spec, )?; if *cleanup_in_background_thread { - let (sender, join_handle) = list_and_cleanup::start_cleanup_thread( + Some(list_and_cleanup::start_cleanup_thread( rotate_config.cleanup, self.config.file_spec.clone(), - )?; - - Some(CleanupThreadHandle { - sender, - join_handle, - }) + )?) } else { None } @@ -452,12 +439,8 @@ impl State { Ok(()) } - pub fn existing_log_files(&self) -> Vec { - let mut list: Vec = - list_and_cleanup::list_of_log_and_compressed_files(&self.config.file_spec).collect(); - // todo this should only be returned if the file exists: - list.push(self.config.file_spec.as_pathbuf(Some(CURRENT_INFIX))); - list + pub fn existing_log_files(&self, selector: &LogfileSelector) -> Vec { + list_and_cleanup::existing_log_files(&self.config.file_spec, selector) } pub fn validate_logs(&mut self, expected: &[(&'static str, &'static str, &'static str)]) { @@ -707,127 +690,3 @@ mod platform { #[cfg(not(target_family = "unix"))] fn unix_create_symlink(_: &Path, _: &Path) {} } - -#[cfg(test)] -mod test { - use chrono::Local; - - use super::list_and_cleanup::list_of_files; - - use crate::{ - writers::{file_log_writer::config::RotationConfig, FileLogWriterConfig}, - Cleanup, Criterion, FileSpec, Naming, WriteMode, - }; - - use super::State; - - fn write_log_entries(file_spec: &FileSpec, timestamps: bool, rcurrent: bool) { - { - let mut state1 = State::new( - get_flw_config(file_spec.clone(), true), - Some(get_rotation_config(timestamps, rcurrent)), - false, - ); - // write to it to provoke some rotations - for _ in 0..17 { - std::thread::sleep(std::time::Duration::from_millis(60)); - state1 - .write_buffer(b"hello world hello world hello world hello world\n") - .unwrap(); - } - } - - // restart with a new instance of State with append - { - let mut state2 = State::new( - get_flw_config(file_spec.clone(), true), - Some(get_rotation_config(timestamps, rcurrent)), - false, - ); - // write to it to provoke some rotations - for _ in 0..23 { - std::thread::sleep(std::time::Duration::from_millis(60)); - state2 - .write_buffer(b"hello world hello world hello world hello world\n") - .unwrap(); - } - } - - // restart with a new instance of State without append - { - let mut state3 = State::new( - get_flw_config(file_spec.clone(), false), - Some(get_rotation_config(timestamps, rcurrent)), - false, - ); - // write to it to provoke some rotations - for _ in 0..15 { - std::thread::sleep(std::time::Duration::from_millis(60)); - state3 - .write_buffer(b"hello world hello world hello world hello world\n") - .unwrap(); - } - } - } - - #[test] - fn test_timestamps_rcurrent() { - let dir = format!("./log_files/state_unit_tests1-{}", Local::now()); - std::fs::create_dir_all(dir.clone()).unwrap(); - let file_spec = FileSpec::default() - .directory(dir) - .discriminant("ts") - .suppress_timestamp(); - - write_log_entries(&file_spec, true, true); - - // verify that rCURRENT and the intended number of rotated files exist - let pattern = file_spec.as_glob_pattern("_r[0-9]*", Some("log")); - assert_eq!(list_of_files(&pattern).count(), 8); - let pattern = file_spec.as_glob_pattern("_rCURRENT", Some("log")); - assert_eq!(list_of_files(&pattern).count(), 1); - } - - #[test] - fn test_numbers_rcurrent() { - let dir = format!("./log_files/state_unit_tests2-{}", Local::now()); - std::fs::create_dir_all(dir.clone()).unwrap(); - let file_spec = FileSpec::default() - .discriminant("nr") - .directory(dir) - .suppress_timestamp(); - - write_log_entries(&file_spec, false, true); - - // verify that rCURRENT and the intended number of rotated files exist - let pattern = file_spec.as_glob_pattern("_r[0-9]*", Some("log")); - assert_eq!(list_of_files(&pattern).count(), 8); - let pattern = file_spec.as_glob_pattern("_rCURRENT", Some("log")); - assert_eq!(list_of_files(&pattern).count(), 1); - } - - fn get_flw_config(file_spec: FileSpec, append: bool) -> FileLogWriterConfig { - // create an instance of State with Naming::Timestamps and Criterion::Size(200) - FileLogWriterConfig { - print_message: false, - append, - write_mode: WriteMode::Direct, - file_spec, - o_create_symlink: None, - line_ending: &[b'\n'], - use_utc: true, - } - } - - fn get_rotation_config(timestamps: bool, _rcurrent: bool) -> RotationConfig { - RotationConfig { - criterion: Criterion::Size(290), - naming: if timestamps { - Naming::Timestamps - } else { - Naming::Numbers - }, - cleanup: Cleanup::Never, - } - } -} diff --git a/src/writers/file_log_writer/state/list_and_cleanup.rs b/src/writers/file_log_writer/state/list_and_cleanup.rs index 5418667..1446fc5 100644 --- a/src/writers/file_log_writer/state/list_and_cleanup.rs +++ b/src/writers/file_log_writer/state/list_and_cleanup.rs @@ -1,33 +1,51 @@ -use super::CleanupThreadHandle; -use crate::{Cleanup, FileSpec}; +use crate::{Cleanup, FileSpec, LogfileSelector}; #[cfg(feature = "compress")] use std::fs::File; use std::{ - iter::Chain, path::PathBuf, - sync::mpsc::Sender, thread::{Builder as ThreadBuilder, JoinHandle}, - vec::IntoIter, }; -pub(super) const INFIX_PATTERN: &str = "_r[0-9]*"; +const INFIX_PATTERN: &str = "_r[0-9]*"; -pub(super) fn list_of_log_and_compressed_files( - file_spec: &FileSpec, -) -> Chain, IntoIter> { - let log_pattern = file_spec.as_glob_pattern(INFIX_PATTERN, None); - let gz_pattern = file_spec.as_glob_pattern(INFIX_PATTERN, Some("gz")); +pub(super) fn existing_log_files(file_spec: &FileSpec, selector: &LogfileSelector) -> Vec { + let mut result = Vec::new(); + if selector.with_plain_files { + let pattern = file_spec.as_glob_pattern(INFIX_PATTERN, file_spec.get_suffix().as_deref()); + result.append(&mut list_of_files(&pattern)); + } + + if selector.with_compressed_files { + let pattern = file_spec.as_glob_pattern(INFIX_PATTERN, Some("gz")); + result.append(&mut list_of_files(&pattern)); + } + + if selector.with_r_current { + let pattern = + file_spec.as_glob_pattern(super::CURRENT_INFIX, file_spec.get_suffix().as_deref()); + result.append(&mut list_of_files(&pattern)); + } + result +} - list_of_files(&log_pattern).chain(list_of_files(&gz_pattern)) +pub(super) fn list_of_log_and_compressed_files(file_spec: &FileSpec) -> Vec { + existing_log_files( + file_spec, + &LogfileSelector::default().with_compressed_files(), + ) } -pub(super) fn list_of_files(pattern: &str) -> std::vec::IntoIter { +pub(super) fn list_of_infix_files() -> Vec { + list_of_files(INFIX_PATTERN) +} +fn list_of_files(pattern: &str) -> Vec { let mut log_files: Vec = glob::glob(pattern) .unwrap(/* failure should be impossible */) .filter_map(Result::ok) .collect(); + log_files.sort_unstable(); // should be no-op, but we don't want to rely on glob doing it log_files.reverse(); - log_files.into_iter() + log_files } pub(super) fn remove_or_compress_too_old_logfiles( @@ -66,7 +84,10 @@ pub(crate) fn remove_or_compress_too_old_logfiles_impl( } }; - for (index, file) in list_of_log_and_compressed_files(file_spec).enumerate() { + for (index, file) in list_of_log_and_compressed_files(file_spec) + .into_iter() + .enumerate() + { if index >= log_limit + compress_limit { // delete (log or log.gz) std::fs::remove_file(file)?; @@ -97,24 +118,37 @@ pub(crate) fn remove_or_compress_too_old_logfiles_impl( const CLEANER: &str = "flexi_logger-fs-cleanup"; -pub(crate) enum MessageToCleanupThread { +#[derive(Debug)] +pub(super) struct CleanupThreadHandle { + sender: std::sync::mpsc::Sender, + join_handle: JoinHandle<()>, +} + +enum MessageToCleanupThread { Act, Die, } -pub(crate) fn start_cleanup_thread( +impl CleanupThreadHandle { + pub(super) fn shutdown(self) { + self.sender.send(MessageToCleanupThread::Die).ok(); + self.join_handle.join().ok(); + } +} + +pub(super) fn start_cleanup_thread( cleanup: Cleanup, file_spec: FileSpec, -) -> Result<(Sender, JoinHandle<()>), std::io::Error> { +) -> Result { let (sender, receiver) = std::sync::mpsc::channel(); let builder = ThreadBuilder::new().name(CLEANER.to_string()); #[cfg(not(feature = "dont_minimize_extra_stacks"))] let builder = builder.stack_size(512 * 1024); - Ok(( + Ok(CleanupThreadHandle { sender, - builder.spawn(move || { + join_handle: builder.spawn(move || { while let Ok(MessageToCleanupThread::Act) = receiver.recv() { remove_or_compress_too_old_logfiles_impl(&cleanup, &file_spec).ok(); } })?, - )) + }) } diff --git a/src/writers/file_log_writer/state/timestamps.rs b/src/writers/file_log_writer/state/timestamps.rs index 977f074..7058010 100644 --- a/src/writers/file_log_writer/state/timestamps.rs +++ b/src/writers/file_log_writer/state/timestamps.rs @@ -1,4 +1,4 @@ -use super::list_and_cleanup::{list_of_files, INFIX_PATTERN}; +use super::list_and_cleanup::list_of_infix_files; use super::{get_creation_date, CURRENT_INFIX}; use crate::{writers::FileLogWriterConfig, FileSpec}; use chrono::{DateTime, Local, NaiveDateTime, TimeZone}; @@ -62,10 +62,11 @@ pub(super) fn latest_timestamp_file(config: &FileLogWriterConfig, rotate: bool) Local::now() } else { // find all file paths that fit the pattern - list_of_files(INFIX_PATTERN) + list_of_infix_files() + .into_iter() // retrieve the infix .map(|path| ts_infix_from_path(&path, &config.file_spec)) - // parse infix as date, ignore all files where this fails, + // parse infix as date, ignore all infixes where this fails .filter_map(|infix| timestamp_from_ts_infix(&infix)) // take the newest of these dates .reduce(|acc, e| if acc > e { acc } else { e }) diff --git a/src/writers/file_log_writer/state_handle.rs b/src/writers/file_log_writer/state_handle.rs index b845150..66c65d6 100644 --- a/src/writers/file_log_writer/state_handle.rs +++ b/src/writers/file_log_writer/state_handle.rs @@ -1,7 +1,10 @@ use super::{builder::FileLogWriterBuilder, config::FileLogWriterConfig, state::State}; -use crate::util::{buffer_with, eprint_err, io_err, ErrorCode}; #[cfg(feature = "async")] use crate::util::{ASYNC_FLUSH, ASYNC_SHUTDOWN}; +use crate::{ + util::{buffer_with, eprint_err, io_err, ErrorCode}, + LogfileSelector, +}; use crate::{DeferredNow, FlexiLoggerError, FormatFunction}; use log::Record; #[cfg(feature = "async")] @@ -271,7 +274,7 @@ impl StateHandle { Ok(state.reopen_outputfile()?) } - pub(super) fn force_rotation(&self) -> Result<(), FlexiLoggerError> { + pub(super) fn rotate(&self) -> Result<(), FlexiLoggerError> { let mut state = match self { StateHandle::Sync(handle) => handle.am_state.lock(), #[cfg(feature = "async")] @@ -292,7 +295,10 @@ impl StateHandle { Ok(state.config().clone()) } - pub(super) fn existing_log_files(&self) -> Result, FlexiLoggerError> { + pub(super) fn existing_log_files( + &self, + selector: &LogfileSelector, + ) -> Result, FlexiLoggerError> { let state = match self { StateHandle::Sync(handle) => handle.am_state.lock(), #[cfg(feature = "async")] @@ -300,7 +306,7 @@ impl StateHandle { } .map_err(|_| FlexiLoggerError::Poison)?; - Ok(state.existing_log_files()) + Ok(state.existing_log_files(selector)) } pub(super) fn validate_logs(&self, expected: &[(&'static str, &'static str, &'static str)]) { diff --git a/src/writers/log_writer.rs b/src/writers/log_writer.rs index 149d3e4..6c63d8a 100644 --- a/src/writers/log_writer.rs +++ b/src/writers/log_writer.rs @@ -52,6 +52,19 @@ pub trait LogWriter: Sync + Send { Ok(()) } + /// Rotate the current output, if meaningful. + /// + /// This method is called from + /// [`LoggerHandle::trigger_rotation`](crate::LoggerHandle::trigger_rotation) + /// for all registered additional writers. + /// + /// # Errors + /// + /// Depend on registered writers. + fn rotate(&self) -> Result<(), FlexiLoggerError> { + Ok(()) + } + // Takes a vec with three patterns per line that represent the log line, // compares the written log with the expected lines, // and asserts that both are in sync. diff --git a/tests/test_multi_threaded_cleanup_use_utc.rs b/tests/test_multi_threaded_cleanup_use_utc.rs index 3cc856b..1212e2c 100644 --- a/tests/test_multi_threaded_cleanup_use_utc.rs +++ b/tests/test_multi_threaded_cleanup_use_utc.rs @@ -3,8 +3,8 @@ mod test_utils; #[cfg(feature = "compress")] mod d { use flexi_logger::{ - Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, LogSpecification, Logger, Naming, - Record, WriteMode, TS_DASHES_BLANK_COLONS_DOT_BLANK, + Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, LogSpecification, LogfileSelector, + Logger, Naming, Record, WriteMode, TS_DASHES_BLANK_COLONS_DOT_BLANK, }; use glob::glob; use log::*; @@ -54,7 +54,13 @@ mod d { wait_for_workers_to_close(worker_handles); - let log_files = logger.existing_log_files().unwrap(); + let log_files = logger + .existing_log_files( + &LogfileSelector::default() + .with_compressed_files() + .with_r_current(), + ) + .unwrap(); assert_eq!(log_files.len(), NO_OF_LOG_FILES + NO_OF_GZ_FILES + 1); for f in log_files { debug!("Existing log file: {f:?}"); diff --git a/tests/test_multi_threaded_numbers.rs b/tests/test_multi_threaded_numbers.rs index 6ac6317..7341dd8 100644 --- a/tests/test_multi_threaded_numbers.rs +++ b/tests/test_multi_threaded_numbers.rs @@ -1,8 +1,8 @@ mod test_utils; use flexi_logger::{ - Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, LogSpecification, Logger, Naming, Record, - WriteMode, TS_DASHES_BLANK_COLONS_DOT_BLANK, + Cleanup, Criterion, DeferredNow, Duplicate, FileSpec, LogSpecification, LogfileSelector, + Logger, Naming, Record, WriteMode, TS_DASHES_BLANK_COLONS_DOT_BLANK, }; use glob::glob; use log::*; @@ -63,7 +63,13 @@ fn multi_threaded() { .unwrap(); wait_for_workers_to_close(worker_handles); - let log_files = logger.existing_log_files().unwrap(); + let log_files = logger + .existing_log_files( + &LogfileSelector::default() + .with_compressed_files() + .with_r_current(), + ) + .unwrap(); assert_eq!(log_files.len(), 17); logger.parse_new_spec("info").unwrap(); for f in log_files {