Skip to content

Commit

Permalink
refactor: Rename module
Browse files Browse the repository at this point in the history
  • Loading branch information
epage committed Apr 17, 2019
1 parent b6aabc9 commit f8d4211
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 8 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 6 additions & 6 deletions benches/tokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,30 +6,30 @@ mod data;

#[bench]
fn tokenize_empty(b: &mut test::Bencher) {
b.iter(|| defenestrate::identifier::tokenize(data::EMPTY.as_bytes()).collect::<Vec<_>>());
b.iter(|| defenestrate::tokens::tokenize(data::EMPTY.as_bytes()).collect::<Vec<_>>());
}

#[bench]
fn tokenize_no_tokens(b: &mut test::Bencher) {
b.iter(|| defenestrate::identifier::tokenize(data::NO_TOKENS.as_bytes()).collect::<Vec<_>>());
b.iter(|| defenestrate::tokens::tokenize(data::NO_TOKENS.as_bytes()).collect::<Vec<_>>());
}

#[bench]
fn tokenize_single_token(b: &mut test::Bencher) {
b.iter(|| defenestrate::identifier::tokenize(data::SINGLE_TOKEN.as_bytes()).collect::<Vec<_>>());
b.iter(|| defenestrate::tokens::tokenize(data::SINGLE_TOKEN.as_bytes()).collect::<Vec<_>>());
}

#[bench]
fn tokenize_sherlock(b: &mut test::Bencher) {
b.iter(|| defenestrate::identifier::tokenize(data::SHERLOCK.as_bytes()).collect::<Vec<_>>());
b.iter(|| defenestrate::tokens::tokenize(data::SHERLOCK.as_bytes()).collect::<Vec<_>>());
}

#[bench]
fn tokenize_code(b: &mut test::Bencher) {
b.iter(|| defenestrate::identifier::tokenize(data::CODE.as_bytes()).collect::<Vec<_>>());
b.iter(|| defenestrate::tokens::tokenize(data::CODE.as_bytes()).collect::<Vec<_>>());
}

#[bench]
fn tokenize_corpus(b: &mut test::Bencher) {
b.iter(|| defenestrate::identifier::tokenize(data::CORPUS.as_bytes()).collect::<Vec<_>>());
b.iter(|| defenestrate::tokens::tokenize(data::CORPUS.as_bytes()).collect::<Vec<_>>());
}
4 changes: 2 additions & 2 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ extern crate serde_derive;

mod dict;

pub mod identifier;
pub mod report;
pub mod tokens;

pub use crate::dict::*;

Expand All @@ -16,7 +16,7 @@ pub fn process_file(path: &std::path::Path, dictionary: &Dictionary, report: rep
File::open(path)?.read_to_end(&mut buffer)?;
for (line_idx, line) in grep_searcher::LineIter::new(b'\n', &buffer).enumerate() {
let line_num = line_idx + 1;
for token in identifier::tokenize(line) {
for token in tokens::tokenize(line) {
// Correct tokens as-is
if let Some(correction) = dictionary.correct_bytes(token.token) {
let word = String::from_utf8_lossy(token.token);
Expand Down
File renamed without changes.

0 comments on commit f8d4211

Please sign in to comment.