Skip to content

Commit

Permalink
Merge 70b937c into 3d434d4
Browse files Browse the repository at this point in the history
  • Loading branch information
project-eutopia committed Mar 15, 2020
2 parents 3d434d4 + 70b937c commit 847c3b2
Show file tree
Hide file tree
Showing 9 changed files with 367 additions and 15 deletions.
11 changes: 6 additions & 5 deletions .travis.yml
@@ -1,8 +1,9 @@
sudo: false
language: ruby
before_install:
- gem install bundler
rvm:
- 2.3.8
- 2.4.5
- 2.5.3
- 2.6.0
before_install: gem install bundler -v 1.17.1
- 2.4.9
- 2.5.7
- 2.6.5
- 2.7.0
6 changes: 4 additions & 2 deletions keisan.gemspec
Expand Up @@ -21,9 +21,11 @@ Gem::Specification.new do |spec|

spec.required_ruby_version = ">= 2.3.0"

spec.add_dependency "cmath", "~> 1.0"

spec.add_development_dependency "coveralls"
spec.add_development_dependency "bundler", "~> 1.14"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 13.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "pry"
spec.add_development_dependency "pry-stack_explorer"
Expand Down
1 change: 1 addition & 0 deletions lib/keisan.rb
Expand Up @@ -85,6 +85,7 @@
require "keisan/tokens/line_separator"
require "keisan/tokens/unknown"

require "keisan/string_and_group_parser"
require "keisan/tokenizer"

require "keisan/parsing/component"
Expand Down
195 changes: 195 additions & 0 deletions lib/keisan/string_and_group_parser.rb
@@ -0,0 +1,195 @@
module Keisan
class StringAndGroupParser
class Portion
attr_reader :expression, :start_index, :end_index

def initialize(expression, start_index)
@expression = expression
@start_index = start_index
end
end

class StringPortion < Portion
attr_reader :string

def initialize(expression, start_index)
super

@string = expression[start_index]
index = start_index + 1

while index < expression.size
c = expression[index]
if c == quote_type
@string << c
index += 1
break
end

# escape character
if c == "\\"
index += 1
if index >= expression.size
raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, no closing quote #{quote_type}")
end
c = expression[index]

case c
when "\\", '"', "'"
@string << c
when "a"
@string << "\a"
when "b"
@string << "\b"
when "r"
@string << "\r"
when "n"
@string << "\n"
when "s"
@string << "\s"
when "t"
@string << "\t"
else
raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, unknown escape character: \\#{c}")
end
else
@string << c
end

index += 1
end

@end_index = index

if @string.size <= 1 || @string[-1] != @string[0]
raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, no closing quote #{quote_type}")
end
end

def quote_type
@string[0]
end

def size
string.size
end

def to_s
string
end
end

class GroupPortion < Portion
attr_reader :opening_brace, :closing_brace ,:portions, :size

def initialize(expression, start_index)
super

case expression[start_index]
when OPEN_GROUP_REGEX
@opening_brace = expression[start_index]

else
raise Keisan::Exceptions::TokenizingError.new("Internal error, GroupPortion did not start with brace")
end

case opening_brace
when "("
@closing_brace = ")"
when "{"
@closing_brace = "}"
when "["
@closing_brace = "]"
end

parser = StringAndGroupParser.new(expression, start_index: start_index + 1, ending_character: closing_brace)
@portions = parser.portions
@size = parser.size + 2

if start_index + size > expression.size || expression[start_index + size - 1] != closing_brace
raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, group with opening brace #{opening_brace} did not have closing brace")
end
end

def to_s
opening_brace + portions.map(&:to_s).join + closing_brace
end
end

class OtherPortion < Portion
attr_reader :string

def initialize(expression, start_index)
super

case expression[start_index]
when STRING_CHARACTER_REGEX, OPEN_GROUP_REGEX, CLOSED_GROUP_REGEX
raise Keisan::Exceptions::TokenizingError.new("Internal error, OtherPortion should not have string/braces at start")
else
index = start_index + 1
end

while index < expression.size
case expression[index]
when STRING_CHARACTER_REGEX, OPEN_GROUP_REGEX, CLOSED_GROUP_REGEX
break
else
index += 1
end
end

@end_index = index
@string = expression[start_index...end_index]
end

def size
string.size
end

def to_s
string
end
end

# An ordered array of "portions", which
attr_reader :portions, :size

STRING_CHARACTER_REGEX = /["']/
OPEN_GROUP_REGEX = /[\(\{\[]/
CLOSED_GROUP_REGEX = /[\)\}\]]/

# Ending character is used as a second ending condition besides expression size
def initialize(expression, start_index: 0, ending_character: nil)
index = start_index
@portions = []

while index < expression.size && (ending_character.nil? || expression[index] != ending_character)
case expression[index]
when STRING_CHARACTER_REGEX
portion = StringPortion.new(expression, index)
index = portion.end_index
@portions << portion

when OPEN_GROUP_REGEX
portion = GroupPortion.new(expression, index)
index += portion.size
@portions << portion

when CLOSED_GROUP_REGEX
raise Keisan::Exceptions::TokenizingError.new("Tokenizing error, unexpected closing brace #{expression[start_index]}")

else
portion = OtherPortion.new(expression, index)
index += portion.size
@portions << portion
end
end

@size = index - start_index
end

def to_s
portions.map(&:to_s).join
end
end
end
24 changes: 18 additions & 6 deletions lib/keisan/tokenizer.rb
@@ -1,8 +1,6 @@
module Keisan
class Tokenizer
TOKEN_CLASSES = [
Tokens::Group,
Tokens::String,
Tokens::Null,
Tokens::Boolean,
Tokens::Word,
Expand All @@ -27,8 +25,22 @@ class Tokenizer

def initialize(expression)
@expression = self.class.normalize_expression(expression)
@scan = @expression.scan(TOKEN_REGEX)
@tokens = tokenize!

portions = StringAndGroupParser.new(@expression).portions

@tokens = portions.inject([]) do |tokens, portion|
case portion
when StringAndGroupParser::StringPortion
tokens << Tokens::String.new(portion.to_s)
when StringAndGroupParser::GroupPortion
tokens << Tokens::Group.new(portion.to_s)
when StringAndGroupParser::OtherPortion
scan = portion.to_s.scan(TOKEN_REGEX)
tokens += tokenize!(scan)
end

tokens
end
end

def self.normalize_expression(expression)
Expand All @@ -46,8 +58,8 @@ def self.remove_comments(expression)
expression.gsub(/#[^;]*/, "")
end

def tokenize!
@scan.map do |scan_result|
def tokenize!(scan)
scan.map do |scan_result|
i = scan_result.find_index {|token| !token.nil?}
token_string = scan_result[i]
token = TOKEN_CLASSES[i].new(token_string)
Expand Down
5 changes: 3 additions & 2 deletions lib/keisan/tokens/group.rb
@@ -1,12 +1,13 @@
module Keisan
module Tokens
class Group < Token
REGEX = /(\((?:[^\[\]\(\)\{\}]*+\g<1>*+)*+\)|\[(?:[^\[\]\(\)\{\}]*+\g<1>*+)*+\]|\{(?:[^\[\]\(\)\{\}]*+\g<1>*+)*+\})/
REGEX = /(\(|\)|\[|\]|\{|\})/

attr_reader :sub_tokens

def initialize(string)
super
@string = string
raise Exceptions::InvalidToken.new(string) unless string[0].match(regex) && string[-1].match(regex)
@sub_tokens = Tokenizer.new(string[1...-1]).tokens
end

Expand Down
6 changes: 6 additions & 0 deletions spec/keisan/calculator_spec.rb
Expand Up @@ -131,6 +131,12 @@
end
end

describe "unmatched braces inside strings" do
it "does not match against actual braces outside strings" do
expect(calculator.evaluate("'1'+'2'+(']\n]') + (('3') + '4')")).to eq "12];]34"
end
end

describe "#simplify" do
it "allows for undefined variables to still exist and returns a string representation of the expression" do
expect{calculator.evaluate("0*x+1")}.to raise_error(Keisan::Exceptions::UndefinedVariableError)
Expand Down

0 comments on commit 847c3b2

Please sign in to comment.