New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove String#freeze calls #3759

Merged
merged 2 commits into from Feb 17, 2018
File filter...
Filter file types
Jump to file or symbol
Failed to load files and symbols.
+93 −92
Diff settings

Always

Just for now

Copy path View file
@@ -2,6 +2,12 @@

[Sidekiq Changes](https://github.com/mperham/sidekiq/blob/master/Changes.md) | [Sidekiq Pro Changes](https://github.com/mperham/sidekiq/blob/master/Pro-Changes.md) | [Sidekiq Enterprise Changes](https://github.com/mperham/sidekiq/blob/master/Ent-Changes.md)

HEAD
-----------

- Remove `freeze` calls on String constants. This is superfluous with Ruby
2.3+ and `frozen_string_literal: true`. [#3759]

5.1.1
-----------

Copy path View file
@@ -1,4 +1,3 @@
# encoding: utf-8
# frozen_string_literal: true
require 'sidekiq/version'
fail "Sidekiq #{Sidekiq::VERSION} does not support Ruby versions below 2.2.2." if RUBY_PLATFORM != 'java' && RUBY_VERSION < '2.2.2'
@@ -12,7 +11,7 @@
require 'json'

module Sidekiq
NAME = 'Sidekiq'.freeze
NAME = 'Sidekiq'
LICENSE = 'See LICENSE and the LGPL-3.0 for licensing details.'

DEFAULTS = {
@@ -48,7 +47,7 @@ module Sidekiq
"connected_clients" => "9999",
"used_memory_human" => "9P",
"used_memory_peak_human" => "9P"
}.freeze

This comment has been minimized.

@SamSaffron

SamSaffron Feb 16, 2018

note this is a behavior change cause Hash is not a string literal so the hash will no longer be frozen

This comment has been minimized.

@mperham

mperham Feb 17, 2018

Owner

In this case, it's not a concern.

}

def self.❨╯°□°❩╯︵┻━┻
puts "Calm down, yo."
Copy path View file
@@ -1,4 +1,3 @@
# encoding: utf-8
# frozen_string_literal: true
require 'sidekiq'

@@ -51,21 +50,21 @@ def queues
def fetch_stats!
pipe1_res = Sidekiq.redis do |conn|
conn.pipelined do
conn.get('stat:processed'.freeze)
conn.get('stat:failed'.freeze)
conn.zcard('schedule'.freeze)
conn.zcard('retry'.freeze)
conn.zcard('dead'.freeze)
conn.scard('processes'.freeze)
conn.lrange('queue:default'.freeze, -1, -1)
conn.smembers('processes'.freeze)
conn.smembers('queues'.freeze)
conn.get('stat:processed')
conn.get('stat:failed')
conn.zcard('schedule')
conn.zcard('retry')
conn.zcard('dead')
conn.scard('processes')
conn.lrange('queue:default', -1, -1)
conn.smembers('processes')
conn.smembers('queues')
end
end

pipe2_res = Sidekiq.redis do |conn|
conn.pipelined do
pipe1_res[7].each {|key| conn.hget(key, 'busy'.freeze) }
pipe1_res[7].each {|key| conn.hget(key, 'busy') }
pipe1_res[8].each {|queue| conn.llen("queue:#{queue}") }
end
end
@@ -77,7 +76,7 @@ def fetch_stats!
default_queue_latency = if (entry = pipe1_res[6].first)
job = Sidekiq.load_json(entry) rescue {}
now = Time.now.to_f
thence = job['enqueued_at'.freeze] || now
thence = job['enqueued_at'] || now
now - thence
else
0
@@ -119,7 +118,7 @@ def stat(s)
class Queues
def lengths
Sidekiq.redis do |conn|
queues = conn.smembers('queues'.freeze)
queues = conn.smembers('queues')

lengths = conn.pipelined do
queues.each do |queue|
@@ -163,7 +162,7 @@ def date_stat_hash(stat)

while i < @days_previous
date = @start_date - i
datestr = date.strftime("%Y-%m-%d".freeze)
datestr = date.strftime("%Y-%m-%d")
keys << "stat:#{stat}:#{datestr}"
dates << datestr
i += 1
@@ -204,7 +203,7 @@ class Queue
# Return all known queues within Redis.
#
def self.all
Sidekiq.redis { |c| c.smembers('queues'.freeze) }.sort.map { |q| Sidekiq::Queue.new(q) }
Sidekiq.redis { |c| c.smembers('queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
end

attr_reader :name
@@ -273,7 +272,7 @@ def clear
Sidekiq.redis do |conn|
conn.multi do
conn.del(@rname)
conn.srem("queues".freeze, name)
conn.srem("queues", name)
end
end
end
@@ -349,9 +348,9 @@ def display_args
job_args
end
else
if self['encrypt'.freeze]
if self['encrypt']
# no point in showing 150+ bytes of random garbage
args[-1] = '[encrypted data]'.freeze
args[-1] = '[encrypted data]'
end
args
end
Copy path View file
@@ -1,4 +1,3 @@
# encoding: utf-8
# frozen_string_literal: true
$stdout.sync = true

@@ -17,7 +16,7 @@ class CLI
include Singleton unless $TESTING

PROCTITLES = [
proc { 'sidekiq'.freeze },
proc { 'sidekiq' },
proc { Sidekiq::VERSION },
proc { |me, data| data['tag'] },
proc { |me, data| "[#{Processor::WORKER_STATE.size} of #{data['concurrency']} busy]" },
Copy path View file
@@ -68,11 +68,11 @@ def initialize(redis_pool=nil)
#
def push(item)
normed = normalize_item(item)
payload = process_single(item['class'.freeze], normed)
payload = process_single(item['class'], normed)

if payload
raw_push([payload])
payload['jid'.freeze]
payload['jid']
end
end

@@ -89,19 +89,19 @@ def push(item)
# Returns an array of the of pushed jobs' jids. The number of jobs pushed can be less
# than the number given if the middleware stopped processing for one or more jobs.
def push_bulk(items)
arg = items['args'.freeze].first
arg = items['args'].first
return [] unless arg # no jobs to push
raise ArgumentError, "Bulk arguments must be an Array of Arrays: [[1], [2]]" if !arg.is_a?(Array)

normed = normalize_item(items)
payloads = items['args'.freeze].map do |args|
copy = normed.merge('args'.freeze => args, 'jid'.freeze => SecureRandom.hex(12), 'enqueued_at'.freeze => Time.now.to_f)
result = process_single(items['class'.freeze], copy)
payloads = items['args'].map do |args|
copy = normed.merge('args' => args, 'jid' => SecureRandom.hex(12), 'enqueued_at' => Time.now.to_f)
result = process_single(items['class'], copy)
result ? result : nil
end.compact

raw_push(payloads) if !payloads.empty?
payloads.collect { |payload| payload['jid'.freeze] }
payloads.collect { |payload| payload['jid'] }
end

# Allows sharding of jobs across any number of Redis instances. All jobs
@@ -144,14 +144,14 @@ def push_bulk(items)
# Messages are enqueued to the 'default' queue.
#
def enqueue(klass, *args)
klass.client_push('class'.freeze => klass, 'args'.freeze => args)
klass.client_push('class' => klass, 'args' => args)
end

# Example usage:
# Sidekiq::Client.enqueue_to(:queue_name, MyWorker, 'foo', 1, :bat => 'bar')
#
def enqueue_to(queue, klass, *args)
klass.client_push('queue'.freeze => queue, 'class'.freeze => klass, 'args'.freeze => args)
klass.client_push('queue' => queue, 'class' => klass, 'args' => args)
end

# Example usage:
@@ -162,8 +162,8 @@ def enqueue_to_in(queue, interval, klass, *args)
now = Time.now.to_f
ts = (int < 1_000_000_000 ? now + int : int)

item = { 'class'.freeze => klass, 'args'.freeze => args, 'at'.freeze => ts, 'queue'.freeze => queue }
item.delete('at'.freeze) if ts <= now
item = { 'class' => klass, 'args' => args, 'at' => ts, 'queue' => queue }
item.delete('at') if ts <= now

klass.client_push(item)
end
@@ -188,51 +188,51 @@ def raw_push(payloads)
end

def atomic_push(conn, payloads)
if payloads.first['at'.freeze]
conn.zadd('schedule'.freeze, payloads.map do |hash|
at = hash.delete('at'.freeze).to_s
if payloads.first['at']
conn.zadd('schedule', payloads.map do |hash|
at = hash.delete('at').to_s
[at, Sidekiq.dump_json(hash)]
end)
else
q = payloads.first['queue'.freeze]
q = payloads.first['queue']
now = Time.now.to_f
to_push = payloads.map do |entry|
entry['enqueued_at'.freeze] = now
entry['enqueued_at'] = now
Sidekiq.dump_json(entry)
end
conn.sadd('queues'.freeze, q)
conn.sadd('queues', q)
conn.lpush("queue:#{q}", to_push)
end
end

def process_single(worker_class, item)
queue = item['queue'.freeze]
queue = item['queue']

middleware.invoke(worker_class, item, queue, @redis_pool) do
item
end
end

def normalize_item(item)
raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.has_key?('class'.freeze) && item.has_key?('args'.freeze)
raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.has_key?('class') && item.has_key?('args')
raise(ArgumentError, "Job args must be an Array") unless item['args'].is_a?(Array)
raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item['class'.freeze].is_a?(Class) || item['class'.freeze].is_a?(String)
raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.has_key?('at'.freeze) && !item['at'].is_a?(Numeric)
raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item['class'].is_a?(Class) || item['class'].is_a?(String)
raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.has_key?('at') && !item['at'].is_a?(Numeric)
#raise(ArgumentError, "Arguments must be native JSON types, see https://github.com/mperham/sidekiq/wiki/Best-Practices") unless JSON.load(JSON.dump(item['args'])) == item['args']

normalized_hash(item['class'.freeze])
normalized_hash(item['class'])
.each{ |key, value| item[key] = value if item[key].nil? }

item['class'.freeze] = item['class'.freeze].to_s
item['queue'.freeze] = item['queue'.freeze].to_s
item['jid'.freeze] ||= SecureRandom.hex(12)
item['created_at'.freeze] ||= Time.now.to_f
item['class'] = item['class'].to_s
item['queue'] = item['queue'].to_s
item['jid'] ||= SecureRandom.hex(12)
item['created_at'] ||= Time.now.to_f
item
end

def normalized_hash(item_class)
if item_class.is_a?(Class)
raise(ArgumentError, "Message must include a Sidekiq::Worker class, not class name: #{item_class.ancestors.inspect}") if !item_class.respond_to?('get_sidekiq_options'.freeze)
raise(ArgumentError, "Message must include a Sidekiq::Worker class, not class name: #{item_class.ancestors.inspect}") if !item_class.respond_to?('get_sidekiq_options')
item_class.get_sidekiq_options
else
Sidekiq.default_worker_options
Copy path View file
@@ -13,7 +13,7 @@ def acknowledge
end

def queue_name
queue.sub(/.*queue:/, ''.freeze)
queue.sub(/.*queue:/, '')
end

def requeue
Copy path View file
@@ -3,7 +3,7 @@ class JobLogger

def call(item, queue)
start = Time.now
logger.info("start".freeze)
logger.info("start")
yield
logger.info("done: #{elapsed(start)} sec")
rescue Exception
Copy path View file
@@ -1,4 +1,3 @@
# encoding: utf-8
# frozen_string_literal: true
require 'sidekiq/manager'
require 'sidekiq/fetch'
@@ -76,13 +75,13 @@ def ❤
Processor::FAILURE.update {|curr| fails = curr; 0 }
Processor::PROCESSED.update {|curr| procd = curr; 0 }

workers_key = "#{key}:workers".freeze
nowdate = Time.now.utc.strftime("%Y-%m-%d".freeze)
workers_key = "#{key}:workers"
nowdate = Time.now.utc.strftime("%Y-%m-%d")
Sidekiq.redis do |conn|
conn.multi do
conn.incrby("stat:processed".freeze, procd)
conn.incrby("stat:processed", procd)
conn.incrby("stat:processed:#{nowdate}", procd)
conn.incrby("stat:failed".freeze, fails)
conn.incrby("stat:failed", fails)
conn.incrby("stat:failed:#{nowdate}", fails)
conn.del(workers_key)
Processor::WORKER_STATE.each_pair do |tid, hash|
Copy path View file
@@ -33,9 +33,9 @@ def self.tid
def self.job_hash_context(job_hash)
# If we're using a wrapper class, like ActiveJob, use the "wrapped"
# attribute to expose the underlying thing.
klass = job_hash['wrapped'.freeze] || job_hash["class".freeze]
bid = job_hash['bid'.freeze]
"#{klass} JID-#{job_hash['jid'.freeze]}#{" BID-#{bid}" if bid}"
klass = job_hash['wrapped'] || job_hash["class"]
bid = job_hash['bid']
"#{klass} JID-#{job_hash['jid']}#{" BID-#{bid}" if bid}"
end

def self.with_job_hash_context(job_hash, &block)
Copy path View file
@@ -1,4 +1,3 @@
# encoding: utf-8
# frozen_string_literal: true
require 'sidekiq/util'
require 'sidekiq/processor'
Copy path View file
@@ -132,9 +132,9 @@ def dispatch(job_hash, queue)
# the Reloader. It handles code loading, db connection management, etc.
# Effectively this block denotes a "unit of work" to Rails.
@reloader.call do
klass = constantize(job_hash['class'.freeze])
klass = constantize(job_hash['class'])
worker = klass.new
worker.jid = job_hash['jid'.freeze]
worker.jid = job_hash['jid']
@retrier.local(worker, pristine, queue) do
yield worker
end
@@ -166,7 +166,7 @@ def process(work)
ack = true
dispatch(job_hash, queue) do |worker|
Sidekiq.server_middleware.invoke(worker, job_hash, queue) do
execute_job(worker, cloned(job_hash['args'.freeze]))
execute_job(worker, cloned(job_hash['args']))
end
end
rescue Sidekiq::Shutdown
@@ -70,7 +70,7 @@ def client_opts(options)
opts.delete(:network_timeout)
end

opts[:driver] ||= 'ruby'.freeze
opts[:driver] ||= 'ruby'

# Issue #3303, redis-rb will silently retry an operation.
# This can lead to duplicate jobs if Sidekiq::Client's LPUSH
Copy path View file
@@ -17,7 +17,7 @@ def enqueue_jobs(now=Time.now.to_f.to_s, sorted_sets=SETS)
# We need to go through the list one at a time to reduce the risk of something
# going wrong between the time jobs are popped from the scheduled queue and when
# they are pushed onto a work queue and losing the jobs.
while job = conn.zrangebyscore(sorted_set, '-inf'.freeze, now, :limit => [0, 1]).first do
while job = conn.zrangebyscore(sorted_set, '-inf', now, :limit => [0, 1]).first do

# Pop item off the queue and add it to the work queue. If the job can't be popped from
# the queue, it's because another process already popped it so we can move on to the
Copy path View file
@@ -21,7 +21,7 @@ def watchdog(last_words)

def safe_thread(name, &block)
Thread.new do
Thread.current['sidekiq_label'.freeze] = name
Thread.current['sidekiq_label'] = name
watchdog(name, &block)
end
end
Oops, something went wrong.
ProTip! Use n and p to navigate between commits in a pull request.