From d75c23ec065f8d84429749ab250522190960f5f6 Mon Sep 17 00:00:00 2001 From: takurinton Date: Sun, 23 Jun 2024 12:32:59 +0900 Subject: [PATCH] list --- markdown/src/lexer.rs | 40 +++++++++++++++++++++++++++++++++++----- 1 file changed, 35 insertions(+), 5 deletions(-) diff --git a/markdown/src/lexer.rs b/markdown/src/lexer.rs index 5b48399..a15da1d 100644 --- a/markdown/src/lexer.rs +++ b/markdown/src/lexer.rs @@ -44,7 +44,13 @@ impl<'a> Lexer<'a> { match self.ch { Some('#') => Some(self.read_heading()), - Some('*') => Some(self.read_italic_or_bold()), + Some('*') => { + if self.peek_char() == Some(' ') { + Some(self.read_list()) + } else { + Some(self.read_italic_or_bold()) + } + } Some('[') => Some(self.read_link()), Some('-') if self.peek_char() == Some(' ') => Some(self.read_list()), Some(_) => Some(self.read_text()), @@ -110,11 +116,9 @@ impl<'a> Lexer<'a> { } fn read_list(&mut self) -> Token { + self.read_char(); + self.read_char(); let start = self.position; - if self.peek_char() == Some(' ') || self.peek_char() == Some('\t') { - // Consume the following space or tab - self.read_char(); - } while let Some(ch) = self.ch { if ch == '\n' { break; @@ -284,4 +288,30 @@ mod tests { let token = lexer.next_token(); assert_eq!(token, None); } + + #[test] + fn test_next_token_list() { + let input = "* Item 1\n"; + let mut lexer = Lexer::new(input); + let token = lexer.next_token(); + assert_eq!(token, Some(Token::ListItem("Item 1".to_string()))); + } + + // #[test] + // fn test_next_token_ordered_list() { + // let input = "1. Item 1\n"; + // let mut lexer = Lexer::new(input); + // let token = lexer.next_token(); + // assert_eq!(token, Some(Token::ListItem("Item 1".to_string()))); + // } + + #[test] + fn test_next_token_list_multiple() { + let input = "* Item 1\n* Item 2\n"; + let mut lexer = Lexer::new(input); + let token = lexer.next_token(); + assert_eq!(token, Some(Token::ListItem("Item 1".to_string()))); + let token = lexer.next_token(); + assert_eq!(token, Some(Token::ListItem("Item 2".to_string()))); + } }