Skip to content

Commit

Permalink
Optimize Concurrent::Map#[] on CRuby by letting the backing Hash hand…
Browse files Browse the repository at this point in the history
…le the default_proc

* Before:
  Hash#[]     14.477M (± 2.7%) i/s -     72.882M in   5.038078s
   Map#[]      7.837M (± 1.7%) i/s -     39.947M in   5.098411s
  After:
  Hash#[]     14.340M (± 1.5%) i/s -     72.074M in   5.027414s
   Map#[]      9.840M (± 0.8%) i/s -     50.106M in   5.092390s
  • Loading branch information
eregon committed Jan 27, 2023
1 parent fa9c790 commit ebf63ce
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 43 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
## Current

* (#989) Optimize `Concurrent::Map#[]` on CRuby by letting the backing Hash handle the `default_proc`.

## Release v1.2.0 (23 Jan 2023)

* (#962) Fix ReentrantReadWriteLock to use the same granularity for locals as for Mutex it uses.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ module Collection
# @!visibility private
class MriMapBackend < NonConcurrentMapBackend

def initialize(options = nil)
super(options)
def initialize(options = nil, &default_proc)
super(options, &default_proc)
@write_lock = Mutex.new
end

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,10 @@ class NonConcurrentMapBackend
# directly without calling each other. This is important because of the
# SynchronizedMapBackend which uses a non-reentrant mutex for performance
# reasons.
def initialize(options = nil)
@backend = {}
def initialize(options = nil, &default_proc)
validate_options_hash!(options) if options.kind_of?(::Hash)
@backend = Hash.new(&default_proc)
@default_proc = default_proc
end

def [](key)
Expand Down Expand Up @@ -55,7 +57,7 @@ def compute_if_present(key)
end

def compute(key)
store_computed_value(key, yield(@backend[key]))
store_computed_value(key, yield(get_or_default(key, nil)))
end

def merge_pair(key, value)
Expand All @@ -67,7 +69,7 @@ def merge_pair(key, value)
end

def get_and_set(key, value)
stored_value = @backend[key]
stored_value = get_or_default(key, nil)
@backend[key] = value
stored_value
end
Expand Down Expand Up @@ -109,13 +111,11 @@ def get_or_default(key, default_value)
@backend.fetch(key, default_value)
end

alias_method :_get, :[]
alias_method :_set, :[]=
private :_get, :_set
private

def initialize_copy(other)
super
@backend = {}
@backend = Hash.new(&@default_proc)
self
end

Expand Down
65 changes: 34 additions & 31 deletions lib/concurrent-ruby/concurrent/map.rb
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,12 @@ class Map < Collection::MapImplementation
# @note Atomic methods taking a block do not allow the `self` instance
# to be used within the block. Doing so will cause a deadlock.

# @!method []=(key, value)
# Set a value with key
# @param [Object] key
# @param [Object] value
# @return [Object] the new value

# @!method compute_if_absent(key)
# Compute and store new value for key if the key is absent.
# @param [Object] key
Expand Down Expand Up @@ -119,41 +125,38 @@ class Map < Collection::MapImplementation
# @return [true, false] true if deleted
# @!macro map.atomic_method

#
def initialize(options = nil, &block)
if options.kind_of?(::Hash)
validate_options_hash!(options)
else
options = nil
end
# NonConcurrentMapBackend handles default_proc natively
unless defined?(Collection::NonConcurrentMapBackend) and self < Collection::NonConcurrentMapBackend

super(options)
@default_proc = block
end
# @param [Hash, nil] options options to set the :initial_capacity or :load_factor. Ignored on some Rubies.
# @param [Proc] default_proc Optional block to compute the default value if the key is not set, like `Hash#default_proc`
def initialize(options = nil, &default_proc)
if options.kind_of?(::Hash)
validate_options_hash!(options)
else
options = nil
end

# Get a value with key
# @param [Object] key
# @return [Object] the value
def [](key)
if value = super # non-falsy value is an existing mapping, return it right away
value
# re-check is done with get_or_default(key, NULL) instead of a simple !key?(key) in order to avoid a race condition, whereby by the time the current thread gets to the key?(key) call
# a key => value mapping might have already been created by a different thread (key?(key) would then return true, this elsif branch wouldn't be taken and an incorrent +nil+ value
# would be returned)
# note: nil == value check is not technically necessary
elsif @default_proc && nil == value && NULL == (value = get_or_default(key, NULL))
@default_proc.call(self, key)
else
value
super(options)
@default_proc = default_proc
end
end

# Set a value with key
# @param [Object] key
# @param [Object] value
# @return [Object] the new value
def []=(key, value)
super
# Get a value with key
# @param [Object] key
# @return [Object] the value
def [](key)
if value = super # non-falsy value is an existing mapping, return it right away
value
# re-check is done with get_or_default(key, NULL) instead of a simple !key?(key) in order to avoid a race condition, whereby by the time the current thread gets to the key?(key) call
# a key => value mapping might have already been created by a different thread (key?(key) would then return true, this elsif branch wouldn't be taken and an incorrent +nil+ value
# would be returned)
# note: nil == value check is not technically necessary
elsif @default_proc && nil == value && NULL == (value = get_or_default(key, NULL))
@default_proc.call(self, key)
else
value
end
end
end

alias_method :get, :[]
Expand Down
4 changes: 2 additions & 2 deletions spec/concurrent/map_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -777,8 +777,8 @@ def key # assert_collision_resistance expects to be able to call .key to get the
end
expect_no_size_change cache do
expect_size_change 1, dupped do
expect(:default_value).to eq dupped[:d]
expect(false).to eq cache.key?(:d)
expect(dupped[:d]).to eq :default_value
expect(cache.key?(:d)).to eq false
end
end
end
Expand Down

0 comments on commit ebf63ce

Please sign in to comment.