Skip to content

Commit

Permalink
Merge pull request #124 from I-Language-Development/v1.0.0-alpha.5
Browse files Browse the repository at this point in the history
V1.0.0 alpha.5
  • Loading branch information
Aitareo-Das committed Mar 27, 2024
2 parents d64a172 + ae0df6e commit c49574d
Show file tree
Hide file tree
Showing 13 changed files with 194 additions and 57 deletions.
31 changes: 31 additions & 0 deletions crates/compiler/src/lexer.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
// lexer.rs

#[derive(Debug, PartialEq)]
enum Token {
Number(i64),
Plus,
Minus,
Multiply,
Divide,
}

fn lex(input: &str) -> Vec<Token> {
let mut tokens = Vec::new();
// Implement your lexer logic here
// For simplicity, let's assume input contains only numbers, +, -, *, /
// You'd need to handle whitespace, error cases, and more complex tokens in a real compiler
for token_str in input.split_whitespace() {
match token_str {
"+" => tokens.push(Token::Plus),
"-" => tokens.push(Token::Minus),
"*" => tokens.push(Token::Multiply),
"/" => tokens.push(Token::Divide),
_ => {
if let Ok(num) = token_str.parse::<i64>() {
tokens.push(Token::Number(num));
}
}
}
}
tokens
}
23 changes: 23 additions & 0 deletions crates/compiler/src/parser.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// parser.rs

#[derive(Debug)]
enum Expr {
Number(i64),
BinaryOp(Box<Expr>, Token, Box<Expr>),
}

fn parse(tokens: &[Token]) -> Expr {
// Implement your parser logic here
// For simplicity, assume correct input and only handle + and * operators
// You'd need to handle operator precedence and other expressions in a real compiler
// Recursive descent parsing is a common approach
// Build the AST based on the token stream
// ...
}

fn main() {
let input = "10 + 5 * 3";
let tokens = lex(input);
let ast = parse(&tokens);
println!("{:?}", ast);
}
12 changes: 8 additions & 4 deletions crates/lexer/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,18 +43,22 @@ pub enum LexerError {
/// An error which will be returned if a mark was invalid for some reason.
/// This can occur when the starting character of a mark is valid, but the character after it is not.
#[error("invalid mark at {location}")]
InvalidMark { location: Location },
InvalidMark { location: Location, error: String },

/// An error which will be returned if an unexpected character is encountered.
/// this is most likely to occur when using unicode characters as they are not supported.
#[error("unexpected character `{character}` at {location}")]
UnexpectedCharacter { character: char, location: Location },
UnexpectedCharacter {
character: char,
location: Location,
error: String,
},

/// An error which will be returned if a comment is not terminated by a closing `*/`.
#[error("unterminated comment at {location}")]
UnterminatedComment { location: Location },
UnterminatedComment { location: Location, error: String },

/// An error which will be returned if a string is not terminated by a closing quote or the quote is escaped.
#[error("unterminated string at {location}")]
UnterminatedString { location: Location },
UnterminatedString { location: Location, error: String },
}
15 changes: 9 additions & 6 deletions crates/lexer/src/lex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,10 +189,12 @@ pub fn lex(input: &str, file: &str) -> Result<Vec<Token>, LexerError> {
}],
};

let renderer: annotate_snippets::Renderer =
annotate_snippets::Renderer::styled();
eprintln!("{}", renderer.render(snippet));
error = Some(LexerError::InvalidMark { location });
error = Some(LexerError::InvalidMark {
location,
error: annotate_snippets::Renderer::styled()
.render(snippet)
.to_string(),
});
}
} else if character.is_ascii_digit() {
buffer.push(character);
Expand Down Expand Up @@ -261,11 +263,12 @@ pub fn lex(input: &str, file: &str) -> Result<Vec<Token>, LexerError> {
}],
};

let renderer: annotate_snippets::Renderer = annotate_snippets::Renderer::styled();
eprintln!("{}", renderer.render(snippet));
error = Some(LexerError::UnexpectedCharacter {
character,
location,
error: annotate_snippets::Renderer::styled()
.render(snippet)
.to_string(),
});
}

Expand Down
18 changes: 1 addition & 17 deletions crates/lexer/src/tokens/keyword.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,21 +52,17 @@ pub enum Keyword {
Const,
/// The `continue` keyword. Used to continue a loop before all of it's code is executed.
Continue,
/// The `default` keyword. Used in combination with the [`match`](`Keyword::Match`) and [`case`](`Keyword::Case`) keywords to match the default case.
Default,
/// The `else` keyword. Used to define the "otherwise" block of an [`if`](`Keyword::If`) statement.
Else,
/// The `finally` keyword. Used in combination with the [`try`](`Keyword::Try`) keyword to execute code even after an exception has been raised.
Finally,
/// The `for` keyword. Used to create a loop over an iterator.
For,
/// The `function` keyword. Will probably be replaced by either `fn` or `func` soon. Used to define a function.
Function, // TODO (ElBe): Replace with "fn" or "func"?
/// The `if` keyword. Used to check whether a condition is true or false and execute code based on that condition.
If,
/// The `import` keyword. Used to import code from other modules.
Import,
/// The `match` keyword. Used in combination with the [`case`](`Keyword::Case`) and [`default`](`Keyword::Default`) keywords.
/// The `match` keyword. Used in combination with the [`case`](`Keyword::Case`) keyword.
Match,
/// The `pub` keyword. Used to export an item out of the current scope.
Pub,
Expand Down Expand Up @@ -96,11 +92,9 @@ impl core::fmt::Display for Keyword {
&Self::Class => write!(formatter, "class"),
&Self::Const => write!(formatter, "const"),
&Self::Continue => write!(formatter, "continue"),
&Self::Default => write!(formatter, "default"),
&Self::Else => write!(formatter, "else"),
&Self::Finally => write!(formatter, "finally"),
&Self::For => write!(formatter, "for"),
&Self::Function => write!(formatter, "function"),
&Self::If => write!(formatter, "if"),
&Self::Import => write!(formatter, "import"),
&Self::Match => write!(formatter, "match"),
Expand Down Expand Up @@ -157,11 +151,6 @@ impl GetToken for Keyword {
content: "continue".to_owned(),
token_type: TokenType::Keyword(Keyword::Continue),
}),
"default" => Some(Token {
location,
content: "default".to_owned(),
token_type: TokenType::Keyword(Keyword::Default),
}),
"else" => Some(Token {
location,
content: "else".to_owned(),
Expand All @@ -177,11 +166,6 @@ impl GetToken for Keyword {
content: "for".to_owned(),
token_type: TokenType::Keyword(Keyword::For),
}),
"function" => Some(Token {
location,
content: "function".to_owned(),
token_type: TokenType::Keyword(Keyword::Function),
}),
"if" => Some(Token {
location,
content: "if".to_owned(),
Expand Down
8 changes: 0 additions & 8 deletions crates/lexer/src/tokens/mark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,6 @@ pub enum Mark {
AddAssign,
/// The `&&` (**logical** and) mark. Used for creating a true/false value based on whether both the left and right tokens are true.
And,
/// The `->` mark. Used for indicating what value will be returned from a function.
Arrow,
/// The `=` mark. Used for assigning a value to a variable.
Assign,
/// The `@` mark. Currently, it has no use, but it's reserved for later usage and will probably be used in future.
Expand Down Expand Up @@ -146,7 +144,6 @@ impl core::fmt::Display for Mark {
&Self::Add => write!(formatter, "+"),
&Self::AddAssign => write!(formatter, "+="),
&Self::And => write!(formatter, "&&"),
&Self::Arrow => write!(formatter, "->"),
&Self::Assign => write!(formatter, "="),
&Self::At => write!(formatter, "@"),
&Self::Bang => write!(formatter, "!"),
Expand Down Expand Up @@ -218,11 +215,6 @@ impl GetToken for Mark {
content: "&&".to_owned(),
token_type: TokenType::Mark(Mark::And),
}),
"->" => Some(Token {
location,
content: "->".to_owned(),
token_type: TokenType::Mark(Mark::Arrow),
}),
"=" => Some(Token {
location,
content: "=".to_owned(),
Expand Down
46 changes: 30 additions & 16 deletions crates/lexer/src/tokens/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,7 @@ impl TypeDefinition {
.collect::<Vec<char>>();

let help: String = format!("Add `{quote_type}` here");
let file: String = location.file.clone();
let snippet: annotate_snippets::Snippet = annotate_snippets::Snippet {
title: Some(annotate_snippets::Annotation {
id: Some("E0002"),
Expand All @@ -231,7 +232,7 @@ impl TypeDefinition {
slices: vec![annotate_snippets::Slice {
source: line,
line_start: location.line,
origin: Some(&location.file),
origin: Some(&file),
annotations: vec![
annotate_snippets::SourceAnnotation {
range: (location.column - 1, location.column),
Expand Down Expand Up @@ -268,14 +269,20 @@ impl TypeDefinition {

if let Some((_, next_character)) = iterator.next() {
if next_character != quote_type {
let renderer: annotate_snippets::Renderer = annotate_snippets::Renderer::styled();
eprintln!("{}", renderer.render(snippet));
return Err(LexerError::UnterminatedString { location });
return Err(LexerError::UnterminatedString {
location,
error: annotate_snippets::Renderer::styled()
.render(snippet)
.to_string(),
});
}
} else {
let renderer: annotate_snippets::Renderer = annotate_snippets::Renderer::styled();
eprintln!("{}", renderer.render(snippet));
return Err(LexerError::UnterminatedString { location });
return Err(LexerError::UnterminatedString {
location,
error: annotate_snippets::Renderer::styled()
.render(snippet)
.to_string(),
});
}

Ok(Token {
Expand Down Expand Up @@ -307,19 +314,22 @@ pub enum TokenType {
Identifier,
/// A token representing a comment, e.g. `// comment`.
Comment,
/// A token representing a comment across multiple lines, e.g. `/* comment */`.
BlockComment,
}

impl core::fmt::Display for TokenType {
#[inline]
#[allow(clippy::pattern_type_mismatch)]
fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::Type(type_name) => write!(formatter, "{type_name}"),
Self::TypeDefinition(type_definition) => write!(formatter, "{type_definition}"),
Self::Keyword(keyword) => write!(formatter, "{keyword}"),
Self::Mark(mark) => write!(formatter, "{mark}"),
Self::Type(type_name) => type_name.fmt(formatter),
Self::TypeDefinition(type_definition) => type_definition.fmt(formatter),
Self::Keyword(keyword) => keyword.fmt(formatter),
Self::Mark(mark) => mark.fmt(formatter),
Self::Identifier => write!(formatter, "identifier"),
Self::Comment => write!(formatter, "comment"),
Self::BlockComment => write!(formatter, "block comment"),
}
}
}
Expand Down Expand Up @@ -414,6 +424,7 @@ impl TokenType {
.collect::<Vec<char>>();

if buffer.last() != Some(&'*') {
let file: String = location.file.clone();
let snippet: annotate_snippets::Snippet = annotate_snippets::Snippet {
title: Some(annotate_snippets::Annotation {
id: Some("E0001"),
Expand All @@ -424,7 +435,7 @@ impl TokenType {
slices: vec![annotate_snippets::Slice {
source: line,
line_start: location.line,
origin: Some(&location.file),
origin: Some(&file),
annotations: vec![annotate_snippets::SourceAnnotation {
range: (location.column - 1, line.len() - iterator.clone().count()),
label: "Unterminated comment",
Expand All @@ -434,9 +445,12 @@ impl TokenType {
}],
};

let renderer: annotate_snippets::Renderer = annotate_snippets::Renderer::styled();
eprintln!("{}", renderer.render(snippet));
return Err(LexerError::UnterminatedComment { location });
return Err(LexerError::UnterminatedComment {
location,
error: annotate_snippets::Renderer::styled()
.render(snippet)
.to_string(),
});
}

iterator.next();
Expand All @@ -448,7 +462,7 @@ impl TokenType {
.collect::<String>()
.trim()
.to_owned(),
token_type: TokenType::Comment,
token_type: TokenType::BlockComment,
}));
} else if &buffer.iter().collect::<String>() == "//" {
buffer = line[location.column + 1..].chars().collect::<Vec<char>>();
Expand Down
4 changes: 0 additions & 4 deletions crates/lexer/tests/tokens/keyword.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,9 @@ mod tests {
assert_eq!(&format!("{}", Keyword::Class), "class");
assert_eq!(&format!("{}", Keyword::Const), "const");
assert_eq!(&format!("{}", Keyword::Continue), "continue");
assert_eq!(&format!("{}", Keyword::Default), "default");
assert_eq!(&format!("{}", Keyword::Else), "else");
assert_eq!(&format!("{}", Keyword::Finally), "finally");
assert_eq!(&format!("{}", Keyword::For), "for");
assert_eq!(&format!("{}", Keyword::Function), "function");
assert_eq!(&format!("{}", Keyword::If), "if");
assert_eq!(&format!("{}", Keyword::Import), "import");
assert_eq!(&format!("{}", Keyword::Match), "match");
Expand Down Expand Up @@ -81,11 +79,9 @@ mod tests {
assert!(generate_test(&location, "class", Keyword::Class));
assert!(generate_test(&location, "const", Keyword::Const));
assert!(generate_test(&location, "continue", Keyword::Continue));
assert!(generate_test(&location, "default", Keyword::Default));
assert!(generate_test(&location, "else", Keyword::Else));
assert!(generate_test(&location, "finally", Keyword::Finally));
assert!(generate_test(&location, "for", Keyword::For));
assert!(generate_test(&location, "function", Keyword::Function));
assert!(generate_test(&location, "if", Keyword::If));
assert!(generate_test(&location, "import", Keyword::Import));
assert!(generate_test(&location, "match", Keyword::Match));
Expand Down
2 changes: 0 additions & 2 deletions crates/lexer/tests/tokens/mark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ mod tests {
assert_eq!(&format!("{}", Mark::Add), "+");
assert_eq!(&format!("{}", Mark::AddAssign), "+=");
assert_eq!(&format!("{}", Mark::And), "&&");
assert_eq!(&format!("{}", Mark::Arrow), "->");
assert_eq!(&format!("{}", Mark::Assign), "=");
assert_eq!(&format!("{}", Mark::At), "@");
assert_eq!(&format!("{}", Mark::Bang), "!");
Expand Down Expand Up @@ -104,7 +103,6 @@ mod tests {
assert!(generate_test(&location, "+", Mark::Add));
assert!(generate_test(&location, "+=", Mark::AddAssign));
assert!(generate_test(&location, "&&", Mark::And));
assert!(generate_test(&location, "->", Mark::Arrow));
assert!(generate_test(&location, "=", Mark::Assign));
assert!(generate_test(&location, "@", Mark::At));
assert!(generate_test(&location, "!", Mark::Bang));
Expand Down
1 change: 1 addition & 0 deletions crates/tools/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ clap-verbosity-flag = { version = "2.1.1", optional = true }
current_locale = { version = "0.1.1", optional = true }
env_logger = "0.11.2"
localizer-rs = { version = "1.2.0", optional = true }
regex = "1.10.4"

log.workspace = true

Expand Down
1 change: 1 addition & 0 deletions crates/tools/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,5 +30,6 @@ pub mod beta;
pub mod iterator;
pub mod logging;
pub mod panic_handler;
pub mod terminal;
#[cfg(feature = "localization")]
pub mod translation;
Loading

0 comments on commit c49574d

Please sign in to comment.