Skip to content

Commit

Permalink
Migrate lexer submodule to separate nix-lexer crate
Browse files Browse the repository at this point in the history
  • Loading branch information
ebkalderon committed Mar 25, 2020
1 parent 76d99ed commit 2f05fb2
Show file tree
Hide file tree
Showing 21 changed files with 74 additions and 58 deletions.
21 changes: 15 additions & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,4 @@ codegen-units = 1
lto = true

[workspace]
members = ["nix-parser", "nix-parser2"]
members = ["nix-parser", "nix-lexer", "nix-parser2"]
26 changes: 26 additions & 0 deletions nix-lexer/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
[package]
name = "nix-lexer"
version = "0.1.0"
authors = ["Eyal Kalderon <ebkalderon@gmail.com>"]
edition = "2018"

[features]
default = []
serialization = ["codespan/serialization", "serde"]

[dependencies]
codespan = "0.9.1"
nom = { version = "5.1", default-features = false }
nom_locate = "2.0"
smallvec = "1.2"

serde = { version = "1.0", features = ["derive"], optional = true }

[dev-dependencies]
criterion = "0.3"
nix-parser = { path = "../nix-parser" }
serde_json = "1.0"

[[bench]]
name = "lexer"
harness = false
3 changes: 1 addition & 2 deletions nix-parser2/benches/lexer.rs → nix-lexer/benches/lexer.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion, Throughput};
use nix_parser::lexer::Lexer;
use nix_parser2::lexer;

const EXAMPLE_FILE: &str = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
Expand All @@ -20,7 +19,7 @@ fn lexer(b: &mut Criterion) {
});
group.bench_function("new", move |b| {
b.iter(|| {
let lexer: Vec<_> = lexer::tokenize(module).collect();
let lexer: Vec<_> = nix_lexer::tokenize(module).collect();
black_box(lexer);
});
});
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::io::Read;

use codespan::Files;
use nix_parser2::lexer;

fn main() {
let mut buffer = String::new();
Expand All @@ -11,7 +10,7 @@ fn main() {
let file_id = files.add("<stdin>", &buffer);
let source = files.source(file_id);

for token in lexer::tokenize(&source) {
for token in nix_lexer::tokenize(&source) {
println!("{}", token.display(&source));
}
}
24 changes: 22 additions & 2 deletions nix-parser2/src/lexer.rs → nix-lexer/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
//! Low-level lexer for the Nix language.

#![forbid(unsafe_code)]

pub use self::split::split_lines_without_indent;
pub use self::tokens::{DisplayToken, LiteralKind, StringKind, Token, TokenKind};
pub use self::unescape::unescape_str;
Expand All @@ -16,15 +18,33 @@ use nom::multi::{many0_count, many1_count, many_till};
use nom::sequence::{delimited, pair, preceded, terminated, tuple};
use smallvec::SmallVec;

use crate::ToSpan;

mod split;
mod tokens;
mod unescape;

type LocatedSpan<'a> = nom_locate::LocatedSpan<&'a str>;
type IResult<'a, T> = nom::IResult<LocatedSpan<'a>, T>;

/// A trait for converting a value to a `codespan::Span`.
///
/// This is helpful for getting spanned types from external crates to interoperate with `codespan`.
pub trait ToSpan {
/// Converts the given value to a `Span`.
fn to_span(&self) -> Span;
}

impl ToSpan for Span {
fn to_span(&self) -> Span {
*self
}
}

impl<'a, T: ToSpan> ToSpan for &'a T {
fn to_span(&self) -> Span {
(*self).to_span()
}
}

impl<'a> ToSpan for LocatedSpan<'a> {
fn to_span(&self) -> Span {
let start = self.location_offset();
Expand Down
2 changes: 1 addition & 1 deletion nix-parser2/src/lexer/split.rs → nix-lexer/src/split.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use super::LocatedSpan;
/// # Examples
///
/// ```
/// use nix_parser2::lexer::split_lines_without_indent;
/// use nix_lexer::split_lines_without_indent;
///
/// let string_literal = "
/// hello world
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use super::tokens::StringKind;
/// Normal strings:
///
/// ```
/// use nix_parser2::lexer::{unescape_str, StringKind};
/// use nix_lexer::{unescape_str, StringKind};
///
/// let unescaped = unescape_str("foo \\${bar} \\n\\r\\t \\' \\^ baz", StringKind::Normal);
/// assert_eq!(unescaped, "foo ${bar} \n\r\t ' ^ baz");
Expand All @@ -20,7 +20,7 @@ use super::tokens::StringKind;
/// Indented strings:
///
/// ```
/// use nix_parser2::lexer::{unescape_str, StringKind};
/// use nix_lexer::{unescape_str, StringKind};
///
/// let unescaped = unescape_str("foo ''${bar} ''' ''^ baz", StringKind::Indented);
/// assert_eq!(unescaped, "foo ${bar} ' ''^ baz");
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use codespan::Files;
use nix_parser2::lexer;

macro_rules! assert_tokens_match {
($expression_file_name:ident) => {
Expand All @@ -10,18 +9,18 @@ macro_rules! assert_tokens_match {
stringify!($expression_file_name),
include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/tests/lexer/",
"/tests/",
stringify!($expression_file_name),
".nix"
)),
);

let source = files.source(file_id);
let actual = lexer::tokenize(&source).map(|t| t.display(&source).to_string());
let actual = nix_lexer::tokenize(&source).map(|t| t.display(&source).to_string());

let expected = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/tests/lexer/",
"/tests/",
stringify!($expression_file_name),
".snap"
))
Expand Down
File renamed without changes.
File renamed without changes.
16 changes: 2 additions & 14 deletions nix-parser2/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,13 @@ edition = "2018"

[features]
default = []
serialization = ["codespan/serialization", "serde"]
serialization = ["codespan/serialization", "nix-lexer/serialization"]

[dependencies]
codespan = "0.9.1"
codespan-reporting = "0.9"
lsp-types = "0.73"
nom = { version = "5.1", default-features = false }
nom_locate = "2.0"
nix-lexer = { path = "../nix-lexer" }
smallvec = "1.2"
smol_str = "0.1"
url = "2.1"

serde = { version = "1.0", features = ["derive"], optional = true }

[dev-dependencies]
criterion = "0.3"
nix-parser = { path = "../nix-parser" }
serde_json = "1.0"

[[bench]]
name = "lexer"
harness = false
23 changes: 0 additions & 23 deletions nix-parser2/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,25 +1,2 @@
use codespan::Span;

pub mod ast;
pub mod error;
pub mod lexer;

/// A trait for converting a value to a `codespan::Span`.
///
/// This is helpful for getting spanned types from external crates to interoperate with `codespan`.
pub trait ToSpan {
/// Converts the given value to a `Span`.
fn to_span(&self) -> Span;
}

impl ToSpan for Span {
fn to_span(&self) -> Span {
*self
}
}

impl<'a, T: ToSpan> ToSpan for &'a T {
fn to_span(&self) -> Span {
(*self).to_span()
}
}
1 change: 0 additions & 1 deletion nix-parser2/tests/lib.rs

This file was deleted.

0 comments on commit 2f05fb2

Please sign in to comment.