Skip to content

Commit

Permalink
use trace!() in tokenizer to reduce log spam
Browse files Browse the repository at this point in the history
html5ever doesn't currently use trace!() anywhere. This patch
downgrades a few of the noisiest debug!() calls to use trace!()
instead.

This seems like a more reasonable log level for a tokenizer.

This makes configuring env_logger to debug other code easier
(RUST_LOG=debug rather than RUST_LOG=html5ever=info,debug), and
makes working with minimal wasm log frameworks like console_log
nicer (the console_log crate doesn't have a way to set different
levels for different log prefixes).
  • Loading branch information
alsuren committed Feb 16, 2022
1 parent af0f510 commit 3cf2fb1
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions html5ever/src/tokenizer/mod.rs
Expand Up @@ -23,7 +23,7 @@ use self::char_ref::{CharRef, CharRefTokenizer};

use crate::util::str::lower_ascii_letter;

use log::debug;
use log::{debug, trace};
use mac::{_tt_as_expr_hack, format_if, matches};
use markup5ever::{namespace_url, ns, small_char_set};
use std::borrow::Cow::{self, Borrowed};
Expand Down Expand Up @@ -276,7 +276,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.emit_error(Cow::Owned(msg));
}

debug!("got character {}", c);
trace!("got character {}", c);
self.current_char = c;
Some(c)
}
Expand Down Expand Up @@ -304,7 +304,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}

let d = input.pop_except_from(set);
debug!("got characters {:?}", d);
trace!("got characters {:?}", d);
match d {
Some(FromSet(c)) => self.get_preprocessed_char(c, input).map(FromSet),

Expand Down Expand Up @@ -605,7 +605,7 @@ macro_rules! shorthand (
// so it's behind a cfg flag.
#[cfg(trace_tokenizer)]
macro_rules! sh_trace ( ( $me:ident : $($cmds:tt)* ) => ({
debug!(" {:s}", stringify!($($cmds)*));
trace!(" {:s}", stringify!($($cmds)*));
shorthand!($me:expr : $($cmds)*);
}));

Expand Down Expand Up @@ -689,7 +689,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
return self.step_char_ref_tokenizer(input);
}

debug!("processing in state {:?}", self.state);
trace!("processing in state {:?}", self.state);
match self.state {
//§ data-state
states::Data => loop {
Expand Down

0 comments on commit 3cf2fb1

Please sign in to comment.