Skip to content
This repository has been archived by the owner on Jul 9, 2022. It is now read-only.

Commit

Permalink
Add lexer's ability to create tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
DavidMacDonald11 committed Jan 22, 2022
1 parent 2694ff7 commit 6a39eb8
Showing 1 changed file with 17 additions and 2 deletions.
19 changes: 17 additions & 2 deletions modules/lexing/lexer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from position.position import Position
from position.symbol_position import SymbolPosition
from tokens.constant import NumericalConstant
from tokens.punctuator import Operator
from tokens.punctuator import Punctuator, Punc
from . import errors

class Lexer:
Expand All @@ -13,7 +16,7 @@ def __init__(self, in_stream):
self.skip()

def skip(self):
self.symbol = self.in_stream.read()
self.symbol = self.in_stream.read_symbol()

def advance(self):
self.skip()
Expand Down Expand Up @@ -47,4 +50,16 @@ def make_tokens(self):
self.tokens += [self.take_token()]

def take_token(self):
return ""
while self.symbol.isspace():
self.advance()

position = self.new_position()

for token_type in (Punctuator, NumericalConstant, Operator):
if self.symbol in token_type.symbols():
token = token_type.construct(self)
token.position = position

return token

raise errors.UnknownSymbolError(self.take())

0 comments on commit 6a39eb8

Please sign in to comment.