Skip to content

Commit

Permalink
Merge pull request #8258 from Homebrew/dependabot/bundler/Library/Hom…
Browse files Browse the repository at this point in the history
…ebrew/concurrent-ruby-1.1.7

build(deps): bump concurrent-ruby from 1.1.6 to 1.1.7 in /Library/Homebrew
  • Loading branch information
MikeMcQuaid committed Aug 10, 2020
2 parents 283583c + 48dd758 commit e72275e
Show file tree
Hide file tree
Showing 124 changed files with 122 additions and 86 deletions.
2 changes: 1 addition & 1 deletion Library/Homebrew/Gemfile.lock
Expand Up @@ -18,7 +18,7 @@ GEM
colorize (0.8.1)
commander (4.5.2)
highline (~> 2.0.0)
concurrent-ruby (1.1.6)
concurrent-ruby (1.1.7)
connection_pool (2.2.3)
diff-lcs (1.4.4)
docile (1.3.2)
Expand Down
23 changes: 17 additions & 6 deletions Library/Homebrew/vendor/bundle/bundler/setup.rb
Expand Up @@ -3,7 +3,7 @@
ruby_engine = defined?(RUBY_ENGINE) ? RUBY_ENGINE : 'ruby'
ruby_version = RbConfig::CONFIG["ruby_version"]
path = File.expand_path('..', __FILE__)
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/concurrent-ruby-1.1.6/lib/concurrent-ruby"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/concurrent-ruby-1.1.7/lib/concurrent-ruby"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/i18n-1.8.5/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/minitest-5.14.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/thread_safe-0.3.6/lib"
Expand All @@ -21,7 +21,10 @@
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/docile-1.3.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/simplecov-html-0.12.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/simplecov-0.18.5/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/codecov-0.2.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/codecov-0.2.5/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/coderay-1.1.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/highline-2.0.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/commander-4.5.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/connection_pool-2.2.3/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/diff-lcs-1.4.4/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/unf_ext-0.0.7.7"
Expand All @@ -42,13 +45,17 @@
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ntlm-http-0.1.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/webrobots-0.1.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mechanize-2.7.6/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/method_source-1.0.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/mustache-1.1.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parallel-1.19.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parallel_tests-3.0.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parallel_tests-3.1.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parser-2.7.1.4/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/patchelf-1.1.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/plist-3.5.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rainbow-3.0.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-runtime-0.5.5823/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/parlour-4.0.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/patchelf-1.2.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/plist-3.5.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/pry-0.13.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/extensions/universal-darwin-19/2.6.0/rdiscount-2.2.0.1"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rdiscount-2.2.0.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/regexp_parser-1.7.1/lib"
Expand All @@ -62,10 +69,14 @@
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-its-1.3.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-retry-0.6.2/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rspec-wait-0.0.9/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-ast-0.2.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-ast-0.3.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ruby-progressbar-1.10.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/unicode-display_width-1.7.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-0.88.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-performance-1.7.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/rubocop-rspec-1.42.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/ruby-macho-2.2.0/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-static-0.5.5823-universal-darwin-19/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/sorbet-0.5.5823/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/thor-1.0.1/lib"
$:.unshift "#{path}/../#{ruby_engine}/#{ruby_version}/gems/tapioca-0.4.1/lib"
Binary file not shown.

This file was deleted.

Expand Up @@ -16,7 +16,7 @@ module Concurrent
# operation therefore when two `+=` operations are executed concurrently updates
# may be lost. Use `#concat` instead.
#
# @see http://ruby-doc.org/core-2.2.0/Array.html Ruby standard library `Array`
# @see http://ruby-doc.org/core/Array.html Ruby standard library `Array`

# @!macro internal_implementation_note
ArrayImplementation = case
Expand Down
Expand Up @@ -58,26 +58,6 @@ module Concurrent
# end
# ```
#
# When defining a constructor it is critical that the first line be a call to
# `super` with no arguments. The `super` method initializes the background
# thread and other asynchronous components.
#
# ```
# class BackgroundLogger
# include Concurrent::Async
#
# def initialize(level)
# super()
# @logger = Logger.new(STDOUT)
# @logger.level = level
# end
#
# def info(msg)
# @logger.info(msg)
# end
# end
# ```
#
# Mixing this module into a class provides each object two proxy methods:
# `async` and `await`. These methods are thread safe with respect to the
# enclosing object. The former proxy allows methods to be called
Expand Down Expand Up @@ -309,6 +289,7 @@ def initialize(delegate)
@delegate = delegate
@queue = []
@executor = Concurrent.global_io_executor
@ruby_pid = $$
end

# Delegates method calls to the wrapped object.
Expand All @@ -326,6 +307,7 @@ def method_missing(method, *args, &block)

ivar = Concurrent::IVar.new
synchronize do
reset_if_forked
@queue.push [ivar, method, args, block]
@executor.post { perform } if @queue.length == 1
end
Expand Down Expand Up @@ -361,6 +343,13 @@ def perform
end
end
end

def reset_if_forked
if $$ != @ruby_pid
@queue.clear
@ruby_pid = $$
end
end
end
private_constant :AsyncDelegator

Expand Down
Expand Up @@ -28,38 +28,27 @@ class RubyThreadLocalVar < AbstractThreadLocalVar
# But when a Thread is GC'd, we need to drop the reference to its thread-local
# array, so we don't leak memory

# @!visibility private
FREE = []
LOCK = Mutex.new
ARRAYS = {} # used as a hash set
# noinspection RubyClassVariableUsageInspection
@@next = 0
QUEUE = Queue.new
THREAD = Thread.new do
while true
method, i = QUEUE.pop
case method
when :thread_local_finalizer
LOCK.synchronize do
FREE.push(i)
# The cost of GC'ing a TLV is linear in the number of threads using TLVs
# But that is natural! More threads means more storage is used per TLV
# So naturally more CPU time is required to free more storage
ARRAYS.each_value do |array|
array[i] = nil
end
end
when :thread_finalizer
LOCK.synchronize do
# The thread which used this thread-local array is now gone
# So don't hold onto a reference to the array (thus blocking GC)
ARRAYS.delete(i)
end
end
FREE = []
LOCK = Mutex.new
THREAD_LOCAL_ARRAYS = {} # used as a hash set

# synchronize when not on MRI
# on MRI using lock in finalizer leads to "can't be called from trap context" error
# so the code is carefully written to be tread-safe on MRI relying on GIL

if Concurrent.on_cruby?
# @!visibility private
def self.semi_sync(&block)
block.call
end
else
# @!visibility private
def self.semi_sync(&block)
LOCK.synchronize(&block)
end
end

private_constant :FREE, :LOCK, :ARRAYS, :QUEUE, :THREAD
private_constant :FREE, :LOCK, :THREAD_LOCAL_ARRAYS

# @!macro thread_local_var_method_get
def value
Expand All @@ -85,7 +74,7 @@ def value=(value)
# Using Ruby's built-in thread-local storage is faster
unless (array = get_threadlocal_array(me))
array = set_threadlocal_array([], me)
LOCK.synchronize { ARRAYS[array.object_id] = array }
self.class.semi_sync { THREAD_LOCAL_ARRAYS[array.object_id] = array }
ObjectSpace.define_finalizer(me, self.class.thread_finalizer(array.object_id))
end
array[@index] = (value.nil? ? NULL : value)
Expand All @@ -95,32 +84,50 @@ def value=(value)
protected

# @!visibility private
# noinspection RubyClassVariableUsageInspection
def allocate_storage
@index = LOCK.synchronize do
FREE.pop || begin
result = @@next
@@next += 1
result
end
end
@index = FREE.pop || next_index

ObjectSpace.define_finalizer(self, self.class.thread_local_finalizer(@index))
end

# @!visibility private
def self.thread_local_finalizer(index)
# avoid error: can't be called from trap context
proc { QUEUE.push [:thread_local_finalizer, index] }
proc do
semi_sync do
# The cost of GC'ing a TLV is linear in the number of threads using TLVs
# But that is natural! More threads means more storage is used per TLV
# So naturally more CPU time is required to free more storage
THREAD_LOCAL_ARRAYS.each_value { |array| array[index] = nil }
# free index has to be published after the arrays are cleared
FREE.push(index)
end
end
end

# @!visibility private
def self.thread_finalizer(id)
# avoid error: can't be called from trap context
proc { QUEUE.push [:thread_finalizer, id] }
proc do
semi_sync do
# The thread which used this thread-local array is now gone
# So don't hold onto a reference to the array (thus blocking GC)
THREAD_LOCAL_ARRAYS.delete(id)
end
end
end

private

# noinspection RubyClassVariableUsageInspection
@@next = 0
# noinspection RubyClassVariableUsageInspection
def next_index
LOCK.synchronize do
result = @@next
@@next += 1
result
end
end

if Thread.instance_methods.include?(:thread_variable_get)

def get_threadlocal_array(thread = Thread.current)
Expand Down
Expand Up @@ -19,7 +19,7 @@ def []=(key, value)
end

def compute_if_absent(key)
if stored_value = _get(key) # fast non-blocking path for the most likely case
if NULL != (stored_value = @backend.fetch(key, NULL)) # fast non-blocking path for the most likely case
stored_value
else
@write_lock.synchronize { super }
Expand Down
Binary file not shown.
Expand Up @@ -16,6 +16,9 @@ module Concurrent
# Default maximum number of seconds a thread in the pool may remain idle
# before being reclaimed.

# @!macro thread_pool_executor_constant_default_synchronous
# Default value of the :synchronous option.

# @!macro thread_pool_executor_attr_reader_max_length
# The maximum number of threads that may be created in the pool.
# @return [Integer] The maximum number of threads that may be created in the pool.
Expand All @@ -40,6 +43,10 @@ module Concurrent
# The number of seconds that a thread may be idle before being reclaimed.
# @return [Integer] The number of seconds that a thread may be idle before being reclaimed.

# @!macro thread_pool_executor_attr_reader_synchronous
# Whether or not a value of 0 for :max_queue option means the queue must perform direct hand-off or rather unbounded queue.
# @return [true, false]

# @!macro thread_pool_executor_attr_reader_max_queue
# The maximum number of tasks that may be waiting in the work queue at any one time.
# When the queue size reaches `max_queue` subsequent tasks will be rejected in
Expand Down
Expand Up @@ -21,12 +21,18 @@ class JavaThreadPoolExecutor < JavaExecutorService
# @!macro thread_pool_executor_constant_default_thread_timeout
DEFAULT_THREAD_IDLETIMEOUT = 60

# @!macro thread_pool_executor_constant_default_synchronous
DEFAULT_SYNCHRONOUS = false

# @!macro thread_pool_executor_attr_reader_max_length
attr_reader :max_length

# @!macro thread_pool_executor_attr_reader_max_queue
attr_reader :max_queue

# @!macro thread_pool_executor_attr_reader_synchronous
attr_reader :synchronous

# @!macro thread_pool_executor_method_initialize
def initialize(opts = {})
super(opts)
Expand Down Expand Up @@ -94,16 +100,22 @@ def ns_initialize(opts)
max_length = opts.fetch(:max_threads, DEFAULT_MAX_POOL_SIZE).to_i
idletime = opts.fetch(:idletime, DEFAULT_THREAD_IDLETIMEOUT).to_i
@max_queue = opts.fetch(:max_queue, DEFAULT_MAX_QUEUE_SIZE).to_i
@synchronous = opts.fetch(:synchronous, DEFAULT_SYNCHRONOUS)
@fallback_policy = opts.fetch(:fallback_policy, :abort)

raise ArgumentError.new("`synchronous` cannot be set unless `max_queue` is 0") if @synchronous && @max_queue > 0
raise ArgumentError.new("`max_threads` cannot be less than #{DEFAULT_MIN_POOL_SIZE}") if max_length < DEFAULT_MIN_POOL_SIZE
raise ArgumentError.new("`max_threads` cannot be greater than #{DEFAULT_MAX_POOL_SIZE}") if max_length > DEFAULT_MAX_POOL_SIZE
raise ArgumentError.new("`min_threads` cannot be less than #{DEFAULT_MIN_POOL_SIZE}") if min_length < DEFAULT_MIN_POOL_SIZE
raise ArgumentError.new("`min_threads` cannot be more than `max_threads`") if min_length > max_length
raise ArgumentError.new("#{fallback_policy} is not a valid fallback policy") unless FALLBACK_POLICY_CLASSES.include?(@fallback_policy)

if @max_queue == 0
queue = java.util.concurrent.LinkedBlockingQueue.new
if @synchronous
queue = java.util.concurrent.SynchronousQueue.new
else
queue = java.util.concurrent.LinkedBlockingQueue.new
end
else
queue = java.util.concurrent.LinkedBlockingQueue.new(@max_queue)
end
Expand Down
Expand Up @@ -23,6 +23,9 @@ class RubyThreadPoolExecutor < RubyExecutorService
# @!macro thread_pool_executor_constant_default_thread_timeout
DEFAULT_THREAD_IDLETIMEOUT = 60

# @!macro thread_pool_executor_constant_default_synchronous
DEFAULT_SYNCHRONOUS = false

# @!macro thread_pool_executor_attr_reader_max_length
attr_reader :max_length

Expand All @@ -35,6 +38,9 @@ class RubyThreadPoolExecutor < RubyExecutorService
# @!macro thread_pool_executor_attr_reader_max_queue
attr_reader :max_queue

# @!macro thread_pool_executor_attr_reader_synchronous
attr_reader :synchronous

# @!macro thread_pool_executor_method_initialize
def initialize(opts = {})
super(opts)
Expand Down Expand Up @@ -114,9 +120,11 @@ def ns_initialize(opts)
@max_length = opts.fetch(:max_threads, DEFAULT_MAX_POOL_SIZE).to_i
@idletime = opts.fetch(:idletime, DEFAULT_THREAD_IDLETIMEOUT).to_i
@max_queue = opts.fetch(:max_queue, DEFAULT_MAX_QUEUE_SIZE).to_i
@synchronous = opts.fetch(:synchronous, DEFAULT_SYNCHRONOUS)
@fallback_policy = opts.fetch(:fallback_policy, :abort)
raise ArgumentError.new("#{@fallback_policy} is not a valid fallback policy") unless FALLBACK_POLICIES.include?(@fallback_policy)

raise ArgumentError.new("`synchronous` cannot be set unless `max_queue` is 0") if @synchronous && @max_queue > 0
raise ArgumentError.new("#{@fallback_policy} is not a valid fallback policy") unless FALLBACK_POLICIES.include?(@fallback_policy)
raise ArgumentError.new("`max_threads` cannot be less than #{DEFAULT_MIN_POOL_SIZE}") if @max_length < DEFAULT_MIN_POOL_SIZE
raise ArgumentError.new("`max_threads` cannot be greater than #{DEFAULT_MAX_POOL_SIZE}") if @max_length > DEFAULT_MAX_POOL_SIZE
raise ArgumentError.new("`min_threads` cannot be less than #{DEFAULT_MIN_POOL_SIZE}") if @min_length < DEFAULT_MIN_POOL_SIZE
Expand Down Expand Up @@ -201,6 +209,8 @@ def ns_assign_worker(*args, &task)
#
# @!visibility private
def ns_enqueue(*args, &task)
return false if @synchronous

if !ns_limited_queue? || @queue.size < @max_queue
@queue << [task, args]
true
Expand Down
Expand Up @@ -73,7 +73,8 @@ class ThreadPoolExecutor < ThreadPoolExecutorImplementation
# @option opts [Symbol] :fallback_policy (:abort) the policy for handling new
# tasks that are received when the queue size has reached
# `max_queue` or the executor has shut down
#
# @option opts [Boolean] :synchronous (DEFAULT_SYNCHRONOUS) whether or not a value of 0
# for :max_queue means the queue must perform direct hand-off rather than unbounded.
# @raise [ArgumentError] if `:max_threads` is less than one
# @raise [ArgumentError] if `:min_threads` is less than zero
# @raise [ArgumentError] if `:fallback_policy` is not one of the values specified
Expand Down
Expand Up @@ -10,7 +10,7 @@ module Concurrent
# or writing at a time. This includes iteration methods like `#each`,
# which takes the lock repeatedly when reading an item.
#
# @see http://ruby-doc.org/core-2.2.0/Hash.html Ruby standard library `Hash`
# @see http://ruby-doc.org/core/Hash.html Ruby standard library `Hash`

# @!macro internal_implementation_note
HashImplementation = case
Expand Down

0 comments on commit e72275e

Please sign in to comment.