diff --git a/src/tokenizer/interface.rs b/src/tokenizer/interface.rs index 1e9dd061..22a00490 100644 --- a/src/tokenizer/interface.rs +++ b/src/tokenizer/interface.rs @@ -94,9 +94,6 @@ pub enum Token { ParseError(Cow<'static, str>), } -// FIXME: rust-lang/rust#22629 -unsafe impl Send for Token { } - #[derive(Debug, PartialEq)] #[must_use] pub enum TokenSinkResult { diff --git a/tests/tokenizer.rs b/tests/tokenizer.rs index f8e5cbfd..db99e0a0 100644 --- a/tests/tokenizer.rs +++ b/tests/tokenizer.rs @@ -313,7 +313,7 @@ fn unescape_json(js: &Json) -> Json { } } -fn mk_test(desc: String, input: String, expect: Vec, opts: TokenizerOpts) +fn mk_test(desc: String, input: String, expect: Json, opts: TokenizerOpts) -> TestDescAndFn { TestDescAndFn { desc: TestDesc { @@ -330,7 +330,8 @@ fn mk_test(desc: String, input: String, expect: Vec, opts: TokenizerOpts) // result but the compiler doesn't catch it! // Possibly mozilla/rust#12223. let output = tokenize(input.clone(), opts.clone()); - if output != expect { + let expect_toks = json_to_tokens(&expect, opts.exact_errors); + if output != expect_toks { panic!("\ninput: {:?}\ngot: {:?}\nexpected: {:?}", input, output, expect); } @@ -384,8 +385,7 @@ fn mk_tests(tests: &mut Vec, filename: &str, js: &Json) { newdesc = format!("{} (exact errors)", newdesc); } - let expect_toks = json_to_tokens(&expect, exact_errors); - tests.push(mk_test(newdesc, input.clone(), expect_toks, TokenizerOpts { + tests.push(mk_test(newdesc, input.clone(), expect.clone(), TokenizerOpts { exact_errors: exact_errors, initial_state: state, last_start_tag_name: start_tag.clone(),