Permalink
Browse files

Numerous performance improvements for datetimes and timespans

  • Loading branch information...
1 parent 43165d9 commit b713d9f09f601ed0db3e27bb3d31f9909a09450f Cameron Dutro committed Jan 24, 2013
@@ -41,7 +41,12 @@ class DateTimeFormatter < Base
}
def initialize(options = {})
- @tokenizer = TwitterCldr::Tokenizers::DateTimeTokenizer.new(:locale => extract_locale(options), :calendar_type => options[:calendar_type])
+ locale = extract_locale(options)
+ cache_key = TwitterCldr::Utils.compute_cache_key(locale, options[:calendar_type])
+ @tokenizer = tokenizer_cache[cache_key] ||= TwitterCldr::Tokenizers::DateTimeTokenizer.new(
+ :locale => locale,
+ :calendar_type => options[:calendar_type]
+ )
end
def result_for_token(token, index, date)
@@ -58,6 +63,10 @@ def self.additional_formats_for(locale)
protected
+ def tokenizer_cache
+ @@tokenizer_cache ||= {}
+ end
+
# there is incomplete era data in CLDR for certain locales like Hindi
# fall back if that happens
def era(date, pattern, length)
@@ -30,7 +30,12 @@ def rule_for(number, locale = TwitterCldr.locale)
def get_resource(locale)
locale = TwitterCldr.convert_locale(locale)
- eval(TwitterCldr.get_locale_resource(locale, :plurals)[locale])[locale][:i18n][:plural]
+ cache_key = TwitterCldr::Utils.compute_cache_key(locale)
+ locale_cache[cache_key] ||= eval(TwitterCldr.get_locale_resource(locale, :plurals)[locale])[locale][:i18n][:plural]
+ end
+
+ def locale_cache
+ @locale_cache ||= {}
end
end
@@ -54,10 +54,9 @@ def tokens_for(path, additional_cache_key_params = [])
end
def tokens_for_pattern(pattern, path, additional_cache_key_params = [])
- @@token_cache ||= {}
cache_key = TwitterCldr::Utils.compute_cache_key(@locale, path.join('.'), type, format || "nil", *additional_cache_key_params)
- unless @@token_cache.include?(cache_key)
+ unless token_cache.include?(cache_key)
result = []
tokens = expand_pattern(pattern)
@@ -69,25 +68,29 @@ def tokens_for_pattern(pattern, path, additional_cache_key_params = [])
end
end
- @@token_cache[cache_key] = result
+ token_cache[cache_key] = result
end
- @@token_cache[cache_key]
+ token_cache[cache_key]
end
def tokens_with_placeholders_for(key)
- @@token_cache ||= {}
cache_key = compute_cache_key(@locale, key, type)
- unless @@token_cache.include?(cache_key)
+ unless token_cache.include?(cache_key)
result = []
tokens = tokenize_pattern(pattern_for(traverse(key)))
tokens.each do |token|
result << token
end
- @@token_cache[cache_key] = result
+ token_cache[cache_key] = result
end
- @@token_cache[cache_key]
+
+ token_cache[cache_key]
+ end
+
+ def token_cache
+ @@token_cache ||= {}
end
def compute_cache_key(*pieces)
@@ -16,7 +16,8 @@ def find_closest(goal_pattern)
if !goal_pattern || goal_pattern.strip.empty?
nil
else
- rank(goal_pattern).min do |(p1, score1), (p2, score2)|
+ cache_key = TwitterCldr::Utils.compute_cache_key(goal_pattern)
+ pattern_cache[cache_key] ||= rank(goal_pattern).min do |(p1, score1), (p2, score2)|
score1 <=> score2
end.first
end
@@ -28,6 +29,10 @@ def patterns
protected
+ def pattern_cache
+ @pattern_cache ||= {}
+ end
+
def separate(pattern_key)
last_char = ""
pattern_key.each_char.each_with_index.inject([]) do |ret, (char, index)|
@@ -116,6 +116,10 @@ def pattern_for(resource)
resource.is_a?(Hash) ? resource[:pattern] : resource
end
end
+
+ def pattern_cache
+ @@pattern_cache ||= {}
+ end
end
end
end
@@ -58,18 +58,24 @@ def initialize(options = {})
def tokens(options = {})
path = full_path(options[:direction], options[:unit], options[:type])
pluralization = options[:rule] || TwitterCldr::Formatters::Plurals::Rules.rule_for(options[:number], @locale)
+ available = traverse(path)
case pluralization # sometimes the plural rule will return ":one" when the resource only contains a path with "1"
when :zero
- pluralization = 0 if token_exists(path + [0])
+ pluralization = 0 if available.include?(0)
when :one
- pluralization = 1 if token_exists(path + [1])
+ pluralization = 1 if available.include?(1)
when :two
- pluralization = 2 if token_exists(path + [2])
+ pluralization = 2 if available.include?(2)
end
- path << pluralization
- tokens_with_placeholders_for(path) if token_exists(path)
+ if available.include?(pluralization)
+ path << pluralization
+ else
+ path << available.keys.first
+ end
+
+ tokens_with_placeholders_for(path)
end
def token_exists(path)

0 comments on commit b713d9f

Please sign in to comment.