Skip to content

Commit

Permalink
list
Browse files Browse the repository at this point in the history
  • Loading branch information
takurinton committed Jun 23, 2024
1 parent a707596 commit d75c23e
Showing 1 changed file with 35 additions and 5 deletions.
40 changes: 35 additions & 5 deletions markdown/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,13 @@ impl<'a> Lexer<'a> {

match self.ch {
Some('#') => Some(self.read_heading()),
Some('*') => Some(self.read_italic_or_bold()),
Some('*') => {
if self.peek_char() == Some(' ') {
Some(self.read_list())
} else {
Some(self.read_italic_or_bold())
}
}
Some('[') => Some(self.read_link()),
Some('-') if self.peek_char() == Some(' ') => Some(self.read_list()),
Some(_) => Some(self.read_text()),
Expand Down Expand Up @@ -110,11 +116,9 @@ impl<'a> Lexer<'a> {
}

fn read_list(&mut self) -> Token {
self.read_char();
self.read_char();
let start = self.position;
if self.peek_char() == Some(' ') || self.peek_char() == Some('\t') {
// Consume the following space or tab
self.read_char();
}
while let Some(ch) = self.ch {
if ch == '\n' {
break;
Expand Down Expand Up @@ -284,4 +288,30 @@ mod tests {
let token = lexer.next_token();
assert_eq!(token, None);
}

#[test]
fn test_next_token_list() {
let input = "* Item 1\n";
let mut lexer = Lexer::new(input);
let token = lexer.next_token();
assert_eq!(token, Some(Token::ListItem("Item 1".to_string())));
}

// #[test]
// fn test_next_token_ordered_list() {
// let input = "1. Item 1\n";
// let mut lexer = Lexer::new(input);
// let token = lexer.next_token();
// assert_eq!(token, Some(Token::ListItem("Item 1".to_string())));
// }

#[test]
fn test_next_token_list_multiple() {
let input = "* Item 1\n* Item 2\n";
let mut lexer = Lexer::new(input);
let token = lexer.next_token();
assert_eq!(token, Some(Token::ListItem("Item 1".to_string())));
let token = lexer.next_token();
assert_eq!(token, Some(Token::ListItem("Item 2".to_string())));
}
}

0 comments on commit d75c23e

Please sign in to comment.