Skip to content

Commit

Permalink
Numerous performance improvements for datetimes and timespans
Browse files Browse the repository at this point in the history
  • Loading branch information
Cameron Dutro committed Jan 24, 2013
1 parent 43165d9 commit b713d9f
Show file tree
Hide file tree
Showing 6 changed files with 48 additions and 16 deletions.
11 changes: 10 additions & 1 deletion lib/twitter_cldr/formatters/calendars/datetime_formatter.rb
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,12 @@ class DateTimeFormatter < Base
}

def initialize(options = {})
@tokenizer = TwitterCldr::Tokenizers::DateTimeTokenizer.new(:locale => extract_locale(options), :calendar_type => options[:calendar_type])
locale = extract_locale(options)
cache_key = TwitterCldr::Utils.compute_cache_key(locale, options[:calendar_type])
@tokenizer = tokenizer_cache[cache_key] ||= TwitterCldr::Tokenizers::DateTimeTokenizer.new(
:locale => locale,
:calendar_type => options[:calendar_type]
)
end

def result_for_token(token, index, date)
Expand All @@ -58,6 +63,10 @@ def self.additional_formats_for(locale)

protected

def tokenizer_cache
@@tokenizer_cache ||= {}
end

# there is incomplete era data in CLDR for certain locales like Hindi
# fall back if that happens
def era(date, pattern, length)
Expand Down
7 changes: 6 additions & 1 deletion lib/twitter_cldr/formatters/plurals/rules.rb
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,12 @@ def rule_for(number, locale = TwitterCldr.locale)

def get_resource(locale)
locale = TwitterCldr.convert_locale(locale)
eval(TwitterCldr.get_locale_resource(locale, :plurals)[locale])[locale][:i18n][:plural]
cache_key = TwitterCldr::Utils.compute_cache_key(locale)
locale_cache[cache_key] ||= eval(TwitterCldr.get_locale_resource(locale, :plurals)[locale])[locale][:i18n][:plural]
end

def locale_cache
@locale_cache ||= {}
end

end
Expand Down
19 changes: 11 additions & 8 deletions lib/twitter_cldr/tokenizers/base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,9 @@ def tokens_for(path, additional_cache_key_params = [])
end

def tokens_for_pattern(pattern, path, additional_cache_key_params = [])
@@token_cache ||= {}
cache_key = TwitterCldr::Utils.compute_cache_key(@locale, path.join('.'), type, format || "nil", *additional_cache_key_params)

unless @@token_cache.include?(cache_key)
unless token_cache.include?(cache_key)
result = []
tokens = expand_pattern(pattern)

Expand All @@ -69,25 +68,29 @@ def tokens_for_pattern(pattern, path, additional_cache_key_params = [])
end
end

@@token_cache[cache_key] = result
token_cache[cache_key] = result
end

@@token_cache[cache_key]
token_cache[cache_key]
end

def tokens_with_placeholders_for(key)
@@token_cache ||= {}
cache_key = compute_cache_key(@locale, key, type)

unless @@token_cache.include?(cache_key)
unless token_cache.include?(cache_key)
result = []
tokens = tokenize_pattern(pattern_for(traverse(key)))
tokens.each do |token|
result << token
end
@@token_cache[cache_key] = result
token_cache[cache_key] = result
end
@@token_cache[cache_key]

token_cache[cache_key]
end

def token_cache
@@token_cache ||= {}
end

def compute_cache_key(*pieces)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ def find_closest(goal_pattern)
if !goal_pattern || goal_pattern.strip.empty?
nil
else
rank(goal_pattern).min do |(p1, score1), (p2, score2)|
cache_key = TwitterCldr::Utils.compute_cache_key(goal_pattern)
pattern_cache[cache_key] ||= rank(goal_pattern).min do |(p1, score1), (p2, score2)|
score1 <=> score2
end.first
end
Expand All @@ -28,6 +29,10 @@ def patterns

protected

def pattern_cache
@pattern_cache ||= {}
end

def separate(pattern_key)
last_char = ""
pattern_key.each_char.each_with_index.inject([]) do |ret, (char, index)|
Expand Down
4 changes: 4 additions & 0 deletions lib/twitter_cldr/tokenizers/calendars/datetime_tokenizer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,10 @@ def pattern_for(resource)
resource.is_a?(Hash) ? resource[:pattern] : resource
end
end

def pattern_cache
@@pattern_cache ||= {}
end
end
end
end
16 changes: 11 additions & 5 deletions lib/twitter_cldr/tokenizers/calendars/timespan_tokenizer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -58,18 +58,24 @@ def initialize(options = {})
def tokens(options = {})
path = full_path(options[:direction], options[:unit], options[:type])
pluralization = options[:rule] || TwitterCldr::Formatters::Plurals::Rules.rule_for(options[:number], @locale)
available = traverse(path)

case pluralization # sometimes the plural rule will return ":one" when the resource only contains a path with "1"
when :zero
pluralization = 0 if token_exists(path + [0])
pluralization = 0 if available.include?(0)
when :one
pluralization = 1 if token_exists(path + [1])
pluralization = 1 if available.include?(1)
when :two
pluralization = 2 if token_exists(path + [2])
pluralization = 2 if available.include?(2)
end
path << pluralization

tokens_with_placeholders_for(path) if token_exists(path)
if available.include?(pluralization)
path << pluralization
else
path << available.keys.first
end

tokens_with_placeholders_for(path)
end

def token_exists(path)
Expand Down

0 comments on commit b713d9f

Please sign in to comment.