Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 71 additions & 7 deletions codex-rs/state/src/bin/logs_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use std::time::Duration;
use anyhow::Context;
use chrono::DateTime;
use clap::Parser;
use clap::ValueEnum;
use codex_state::LogQuery;
use codex_state::LogRow;
use codex_state::StateRuntime;
Expand All @@ -22,9 +23,9 @@ struct Args {
#[arg(long)]
db: Option<PathBuf>,

/// Log level to match exactly (case-insensitive).
#[arg(long)]
level: Option<String>,
/// Minimum log level to include.
#[arg(long, value_enum, ignore_case = true)]
level: Option<LogLevelThreshold>,

/// Start timestamp (RFC3339 or unix seconds).
#[arg(long, value_name = "RFC3339|UNIX")]
Expand Down Expand Up @@ -69,7 +70,7 @@ struct Args {

#[derive(Debug, Clone)]
struct LogFilter {
level_upper: Option<String>,
levels_upper: Vec<String>,
from_ts: Option<i64>,
to_ts: Option<i64>,
module_like: Vec<String>,
Expand All @@ -79,6 +80,28 @@ struct LogFilter {
include_threadless: bool,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)]
enum LogLevelThreshold {
Trace,
Debug,
Info,
Warn,
Error,
}

impl LogLevelThreshold {
fn levels_upper(self) -> Vec<String> {
let levels = match self {
LogLevelThreshold::Trace => &["TRACE", "DEBUG", "INFO", "WARN", "ERROR"][..],
LogLevelThreshold::Debug => &["DEBUG", "INFO", "WARN", "ERROR"],
LogLevelThreshold::Info => &["INFO", "WARN", "ERROR"],
LogLevelThreshold::Warn => &["WARN", "ERROR"],
LogLevelThreshold::Error => &["ERROR"],
};
levels.iter().map(ToString::to_string).collect()
}
}

#[tokio::main]
async fn main() -> anyhow::Result<()> {
let args = Args::parse();
Expand Down Expand Up @@ -137,7 +160,9 @@ fn build_filter(args: &Args) -> anyhow::Result<LogFilter> {
.transpose()
.context("failed to parse --to")?;

let level_upper = args.level.as_ref().map(|level| level.to_ascii_uppercase());
let levels_upper = args
.level
.map_or_else(Vec::new, LogLevelThreshold::levels_upper);
let module_like = args
.module
.iter()
Expand All @@ -158,7 +183,7 @@ fn build_filter(args: &Args) -> anyhow::Result<LogFilter> {
.collect::<Vec<_>>();

Ok(LogFilter {
level_upper,
levels_upper,
from_ts,
to_ts,
module_like,
Expand Down Expand Up @@ -251,7 +276,7 @@ fn to_log_query(
descending: bool,
) -> LogQuery {
LogQuery {
level_upper: filter.level_upper.clone(),
levels_upper: filter.levels_upper.clone(),
from_ts: filter.from_ts,
to_ts: filter.to_ts,
module_like: filter.module_like.clone(),
Expand Down Expand Up @@ -350,3 +375,42 @@ mod formatter {
padded.bold().to_string()
}
}

#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;

#[test]
fn log_level_threshold_includes_more_severe_levels() {
assert_eq!(
LogLevelThreshold::Warn.levels_upper(),
vec!["WARN".to_string(), "ERROR".to_string()]
);
assert_eq!(
LogLevelThreshold::Trace.levels_upper(),
vec![
"TRACE".to_string(),
"DEBUG".to_string(),
"INFO".to_string(),
"WARN".to_string(),
"ERROR".to_string(),
]
);
}

#[test]
fn log_level_rejects_aliases_and_unknown_values() {
assert!(Args::try_parse_from(["codex-state-logs", "--level", "warning"]).is_err());
assert!(Args::try_parse_from(["codex-state-logs", "--level", "err"]).is_err());
assert!(Args::try_parse_from(["codex-state-logs", "--level", "warn,error"]).is_err());
}

#[test]
fn log_level_accepts_canonical_values_case_insensitively() {
let args = Args::try_parse_from(["codex-state-logs", "--level", "WARN"])
.expect("parse uppercase log level");

assert_eq!(args.level, Some(LogLevelThreshold::Warn));
}
}
2 changes: 1 addition & 1 deletion codex-rs/state/src/model/log.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pub struct LogRow {

#[derive(Clone, Debug, Default)]
pub struct LogQuery {
pub level_upper: Option<String>,
pub levels_upper: Vec<String>,
pub from_ts: Option<i64>,
pub to_ts: Option<i64>,
pub module_like: Vec<String>,
Expand Down
10 changes: 9 additions & 1 deletion codex-rs/state/src/runtime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,15 @@ async fn remove_legacy_db_files(
// sidecar-style paths first so the main file is attempted last.
legacy_paths.sort_by_key(|path| std::cmp::Reverse(path.as_os_str().len()));
for legacy_path in legacy_paths {
if let Err(err) = tokio::fs::remove_file(&legacy_path).await {
let mut result = tokio::fs::remove_file(&legacy_path).await;
for _ in 0..3 {
if result.is_ok() {
break;
}
tokio::time::sleep(Duration::from_millis(25)).await;
result = tokio::fs::remove_file(&legacy_path).await;
}
if let Err(err) = result {
warn!(
"failed to remove legacy {db_label} db file {}: {err}",
legacy_path.display(),
Expand Down
98 changes: 94 additions & 4 deletions codex-rs/state/src/runtime/logs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -464,10 +464,15 @@ fn format_feedback_log_line(
}

fn push_log_filters<'a>(builder: &mut QueryBuilder<'a, Sqlite>, query: &'a LogQuery) {
if let Some(level_upper) = query.level_upper.as_ref() {
builder
.push(" AND UPPER(level) = ")
.push_bind(level_upper.as_str());
if !query.levels_upper.is_empty() {
builder.push(" AND UPPER(level) IN (");
{
let mut separated = builder.separated(", ");
for level_upper in &query.levels_upper {
separated.push_bind(level_upper.as_str());
}
}
builder.push(")");
}
if let Some(from_ts) = query.from_ts {
builder.push(" AND ts >= ").push_bind(from_ts);
Expand Down Expand Up @@ -851,6 +856,91 @@ mod tests {
let _ = tokio::fs::remove_dir_all(codex_home).await;
}

#[tokio::test]
async fn query_logs_filters_level_set_without_rewriting_stored_level() {
let codex_home = unique_temp_dir();
let runtime = StateRuntime::init(codex_home.clone(), "test-provider".to_string())
.await
.expect("initialize runtime");

runtime
.insert_logs(&[
LogEntry {
ts: 1,
ts_nanos: 0,
level: "TRACE".to_string(),
target: "cli".to_string(),
message: Some("trace-row".to_string()),
feedback_log_body: Some("trace-row".to_string()),
thread_id: None,
process_uuid: None,
file: Some("main.rs".to_string()),
line: Some(1),
module_path: None,
},
LogEntry {
ts: 2,
ts_nanos: 0,
level: "INFO".to_string(),
target: "cli".to_string(),
message: Some("info-row".to_string()),
feedback_log_body: Some("info-row".to_string()),
thread_id: None,
process_uuid: None,
file: Some("main.rs".to_string()),
line: Some(2),
module_path: None,
},
LogEntry {
ts: 3,
ts_nanos: 0,
level: "warn".to_string(),
target: "cli".to_string(),
message: Some("warn-row".to_string()),
feedback_log_body: Some("warn-row".to_string()),
thread_id: None,
process_uuid: None,
file: Some("main.rs".to_string()),
line: Some(3),
module_path: None,
},
LogEntry {
ts: 4,
ts_nanos: 0,
level: "ERROR".to_string(),
target: "cli".to_string(),
message: Some("error-row".to_string()),
feedback_log_body: Some("error-row".to_string()),
thread_id: None,
process_uuid: None,
file: Some("main.rs".to_string()),
line: Some(4),
module_path: None,
},
])
.await
.expect("insert test logs");

let rows = runtime
.query_logs(&LogQuery {
levels_upper: vec!["WARN".to_string(), "ERROR".to_string()],
..Default::default()
})
.await
.expect("query matching logs");
let actual = rows
.iter()
.map(|row| (row.level.as_str(), row.message.as_deref()))
.collect::<Vec<_>>();

assert_eq!(
actual,
vec![("warn", Some("warn-row")), ("ERROR", Some("error-row"))]
);

let _ = tokio::fs::remove_dir_all(codex_home).await;
}

#[tokio::test]
async fn insert_logs_prunes_old_rows_when_thread_exceeds_size_limit() {
let codex_home = unique_temp_dir();
Expand Down
Loading