Skip to content

Commit

Permalink
Fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
camertron committed Nov 20, 2019
1 parent 6b79816 commit f91b105
Show file tree
Hide file tree
Showing 5 changed files with 1,656 additions and 33 deletions.
1 change: 0 additions & 1 deletion lib/twitter_cldr/resources/timezone_tests_importer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
module TwitterCldr
module Resources

# This class should be used with JRuby in 1.9 mode
class TimezoneTestsImporter < Importer
TEST_TIME = Time.utc(2019, 11, 17, 0, 0, 0)

Expand Down
46 changes: 22 additions & 24 deletions lib/twitter_cldr/tokenizers/calendars/date_time_tokenizer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,6 @@ def tokenizer
TokenRecognizer.new(:plaintext, //)
])
end

def full_tokenizer
@full_tokenizer ||= begin
new_tok = Tokenizer.union(
data_reader.date_reader.tokenizer.tokenizer,
data_reader.time_reader.tokenizer.tokenizer
) do |recognizer|
recognizer.token_type != :plaintext
end

new_tok.recognizers << TokenRecognizer.new(:plaintext, //)
new_tok
end
end
end

attr_reader :data_reader
Expand All @@ -39,25 +25,19 @@ def initialize(data_reader)
end

def tokenize(pattern)
expand_tokens(tokenizer.tokenize(pattern))
expand_tokens(
PatternTokenizer.new(data_reader, tokenizer).tokenize(pattern)
)
end

# Tokenizes mixed date and time pattern strings,
# used to tokenize the additional date format patterns.
def full_tokenize(pattern)
full_tokenizer.tokenize(pattern)
PatternTokenizer.new(data_reader, full_tokenizer).tokenize(pattern)
end

protected

def tokenizer
@tokenizer ||= PatternTokenizer.new(data_reader, self.class.tokenizer)
end

def full_tokenizer
@full_tokenizer ||= PatternTokenizer.new(data_reader, self.class.full_tokenizer)
end

def expand_tokens(tokens)
tokens.inject([]) do |ret, token|
ret + case token.type
Expand All @@ -81,6 +61,24 @@ def expand_time(token)
time_reader.tokenizer.tokenize(time_reader.pattern)
end

def full_tokenizer
@@full_tokenizer ||= begin
new_tok = Tokenizer.union(
data_reader.date_reader.tokenizer.tokenizer,
data_reader.time_reader.tokenizer.tokenizer
) do |recognizer|
recognizer.token_type != :plaintext
end

new_tok.recognizers << TokenRecognizer.new(:plaintext, //)
new_tok
end
end

def tokenizer
self.class.tokenizer
end

end
end
end
6 changes: 2 additions & 4 deletions lib/twitter_cldr/tokenizers/calendars/date_tokenizer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,11 @@ def initialize(data_reader)
end

def tokenize(pattern)
tokenizer.tokenize(pattern)
PatternTokenizer.new(data_reader, tokenizer).tokenize(pattern)
end

private

def tokenizer
@tokenizer ||= PatternTokenizer.new(data_reader, self.class.tokenizer)
self.class.tokenizer
end

end
Expand Down
6 changes: 2 additions & 4 deletions lib/twitter_cldr/tokenizers/calendars/time_tokenizer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,11 @@ def initialize(data_reader)
end

def tokenize(pattern)
tokenizer.tokenize(pattern)
PatternTokenizer.new(data_reader, tokenizer).tokenize(pattern)
end

private

def tokenizer
@tokenizer ||= PatternTokenizer.new(data_reader, self.class.tokenizer)
self.class.tokenizer
end

end
Expand Down
Loading

0 comments on commit f91b105

Please sign in to comment.