/
simple.rs
59 lines (50 loc) · 1.28 KB
/
simple.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
#![feature(plugin,main)]
#![plugin(rustlex)]
#[allow(plugin_as_library)]
extern crate rustlex;
use std::io::BufReader;
use self::Token::TokA;
use self::TokenB::TokB;
#[derive(PartialEq,Debug)]
pub enum Token {
TokA(String),
}
rustlex! SimpleLexer {
let A = 'a';
. => |_:&mut SimpleLexer<R>| None
A => |lexer:&mut SimpleLexer<R>| Some(TokA ( lexer.yystr() ))
}
#[test]
fn test_simple() {
let expected = vec!(TokA("a".to_string()), TokA("a".to_string()));
let str = "aa";
let inp = BufReader::new(str.as_bytes());
let lexer = SimpleLexer::new(inp);
let mut iter = expected.iter();
for tok in lexer {
assert!(iter.next().unwrap() == &tok);
}
assert!(iter.next() == None);
}
#[derive(PartialEq,Debug)]
pub enum TokenB {
TokB(String)
}
rustlex! OtherLexer {
token TokenB;
let B = 'b';
. => |_:&mut OtherLexer<R>| None
B => |lexer:&mut OtherLexer<R>| Some(TokB ( lexer.yystr() ))
}
#[test]
fn test_other() {
let expected = vec!(TokB("b".to_string()), TokB("b".to_string()));
let str = "bb";
let inp = BufReader::new(str.as_bytes());
let lexer = OtherLexer::new(inp);
let mut iter = expected.iter();
for tok in lexer {
assert!(iter.next().unwrap() == &tok);
}
assert!(iter.next() == None);
}