/
job.rb
647 lines (550 loc) · 18.5 KB
/
job.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
require 'fugit'
require 'sidekiq'
require 'sidekiq/util'
require 'sidekiq/cron/support'
module Sidekiq
module Cron
class Job
include Util
extend Util
#how long we would like to store informations about previous enqueues
REMEMBER_THRESHOLD = 24 * 60 * 60
LAST_ENQUEUE_TIME_FORMAT = '%Y-%m-%d %H:%M:%S %z'
# Use the exists? method if we're on a newer version of redis.
REDIS_EXISTS_METHOD = Gem.loaded_specs['redis'].version < Gem::Version.new('4.2') ? :exists : :exists?
#crucial part of whole enquing job
def should_enque? time
enqueue = false
enqueue = Sidekiq.redis do |conn|
status == "enabled" &&
not_past_scheduled_time?(time) &&
not_enqueued_after?(time) &&
conn.zadd(job_enqueued_key, formated_enqueue_time(time), formated_last_time(time))
end
enqueue
end
# remove previous informations about run times
# this will clear redis and make sure that redis will
# not overflow with memory
def remove_previous_enques time
Sidekiq.redis do |conn|
conn.zremrangebyscore(job_enqueued_key, 0, "(#{(time.to_f - REMEMBER_THRESHOLD).to_s}")
end
end
#test if job should be enqued If yes add it to queue
def test_and_enque_for_time! time
#should this job be enqued?
if should_enque?(time)
enque!
remove_previous_enques(time)
end
end
#enque cron job to queue
def enque! time = Time.now.utc
@last_enqueue_time = time.strftime(LAST_ENQUEUE_TIME_FORMAT)
klass_const =
begin
Sidekiq::Cron::Support.constantize(@klass.to_s)
rescue NameError
nil
end
jid =
if klass_const
if defined?(ActiveJob::Base) && klass_const < ActiveJob::Base
enqueue_active_job(klass_const).try :provider_job_id
else
enqueue_sidekiq_worker(klass_const)
end
else
if @active_job
Sidekiq::Client.push(active_job_message)
else
Sidekiq::Client.push(sidekiq_worker_message)
end
end
save_last_enqueue_time
add_jid_history jid
logger.debug { "enqueued #{@name}: #{@message}" }
end
def is_active_job?
@active_job || defined?(ActiveJob::Base) && Sidekiq::Cron::Support.constantize(@klass.to_s) < ActiveJob::Base
rescue NameError
false
end
def enqueue_active_job(klass_const)
klass_const.set(queue: @queue).perform_later(*@args)
end
def enqueue_sidekiq_worker(klass_const)
klass_const.set(queue: queue_name_with_prefix).perform_async(*@args)
end
# siodekiq worker message
def sidekiq_worker_message
@message.is_a?(String) ? Sidekiq.load_json(@message) : @message
end
def queue_name_with_prefix
return @queue unless is_active_job?
if !"#{@active_job_queue_name_delimiter}".empty?
queue_name_delimiter = @active_job_queue_name_delimiter
elsif defined?(ActiveJob::Base) && defined?(ActiveJob::Base.queue_name_delimiter) && !ActiveJob::Base.queue_name_delimiter.empty?
queue_name_delimiter = ActiveJob::Base.queue_name_delimiter
else
queue_name_delimiter = '_'
end
if !"#{@active_job_queue_name_prefix}".empty?
queue_name = "#{@active_job_queue_name_prefix}#{queue_name_delimiter}#{@queue}"
elsif defined?(ActiveJob::Base) && defined?(ActiveJob::Base.queue_name_prefix) && !"#{ActiveJob::Base.queue_name_prefix}".empty?
queue_name = "#{ActiveJob::Base.queue_name_prefix}#{queue_name_delimiter}#{@queue}"
else
queue_name = @queue
end
queue_name
end
# active job has different structure how it is loading data from sidekiq
# queue, it createaswrapper arround job
def active_job_message
{
'class' => 'ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper',
'wrapped' => @klass,
'queue' => @queue_name_with_prefix,
'description' => @description,
'args' => [{
'job_class' => @klass,
'job_id' => SecureRandom.uuid,
'queue_name' => @queue_name_with_prefix,
'arguments' => @args
}]
}
end
# load cron jobs from Hash
# input structure should look like:
# {
# 'name_of_job' => {
# 'class' => 'MyClass',
# 'cron' => '1 * * * *',
# 'args' => '(OPTIONAL) [Array or Hash]',
# 'description' => '(OPTIONAL) Description of job'
# },
# 'My super iber cool job' => {
# 'class' => 'SecondClass',
# 'cron' => '*/5 * * * *'
# }
# }
#
def self.load_from_hash hash
array = hash.inject([]) do |out,(key, job)|
job['name'] = key
out << job
end
load_from_array array
end
# like to {#load_from_hash}
# If exists old jobs in redis but removed from args, destroy old jobs
def self.load_from_hash! hash
destroy_removed_jobs(hash.keys)
load_from_hash(hash)
end
# load cron jobs from Array
# input structure should look like:
# [
# {
# 'name' => 'name_of_job',
# 'class' => 'MyClass',
# 'cron' => '1 * * * *',
# 'args' => '(OPTIONAL) [Array or Hash]',
# 'description' => '(OPTIONAL) Description of job'
# },
# {
# 'name' => 'Cool Job for Second Class',
# 'class' => 'SecondClass',
# 'cron' => '*/5 * * * *'
# }
# ]
#
def self.load_from_array array
errors = {}
array.each do |job_data|
job = new(job_data)
errors[job.name] = job.errors unless job.save
end
errors
end
# like to {#load_from_array}
# If exists old jobs in redis but removed from args, destroy old jobs
def self.load_from_array! array
job_names = array.map { |job| job["name"] }
destroy_removed_jobs(job_names)
load_from_array(array)
end
# get all cron jobs
def self.all
job_hashes = nil
Sidekiq.redis do |conn|
set_members = conn.smembers(jobs_key)
job_hashes = conn.pipelined do
set_members.each do |key|
conn.hgetall(key)
end
end
end
job_hashes.compact.reject(&:empty?).collect do |h|
# no need to fetch missing args from redis since we just got this hash from there
Sidekiq::Cron::Job.new(h.merge(fetch_missing_args: false))
end
end
def self.count
out = 0
Sidekiq.redis do |conn|
out = conn.scard(jobs_key)
end
out
end
def self.find name
#if name is hash try to get name from it
name = name[:name] || name['name'] if name.is_a?(Hash)
output = nil
Sidekiq.redis do |conn|
if exists? name
output = Job.new conn.hgetall( redis_key(name) )
end
end
output
end
# create new instance of cron job
def self.create hash
new(hash).save
end
#destroy job by name
def self.destroy name
#if name is hash try to get name from it
name = name[:name] || name['name'] if name.is_a?(Hash)
if job = find(name)
job.destroy
else
false
end
end
attr_accessor :name, :cron, :description, :klass, :args, :message
attr_reader :last_enqueue_time, :fetch_missing_args
def initialize input_args = {}
args = Hash[input_args.map{ |k, v| [k.to_s, v] }]
@fetch_missing_args = args.delete('fetch_missing_args')
@fetch_missing_args = true if @fetch_missing_args.nil?
@name = args["name"]
@cron = args["cron"]
@description = args["description"] if args["description"]
#get class from klass or class
@klass = args["klass"] || args["class"]
#set status of job
@status = args['status'] || status_from_redis
#set last enqueue time - from args or from existing job
if args['last_enqueue_time'] && !args['last_enqueue_time'].empty?
@last_enqueue_time = parse_enqueue_time(args['last_enqueue_time'])
else
@last_enqueue_time = last_enqueue_time_from_redis
end
#get right arguments for job
@args = args["args"].nil? ? [] : parse_args( args["args"] )
@args += [Time.now.to_f] if args["date_as_argument"]
@active_job = args["active_job"] == true || ("#{args["active_job"]}" =~ (/^(true|t|yes|y|1)$/i)) == 0 || false
@active_job_queue_name_prefix = args["queue_name_prefix"]
@active_job_queue_name_delimiter = args["queue_name_delimiter"]
if args["message"]
@message = args["message"]
message_data = Sidekiq.load_json(@message) || {}
@queue = message_data['queue'] || "default"
elsif @klass
message_data = {
"class" => @klass.to_s,
"args" => @args,
}
#get right data for message
#only if message wasn't specified before
klass_data = case @klass
when Class
@klass.get_sidekiq_options
when String
begin
Sidekiq::Cron::Support.constantize(@klass).get_sidekiq_options
rescue Exception => e
#Unknown class
{"queue"=>"default"}
end
end
message_data = klass_data.merge(message_data)
#override queue if setted in config
#only if message is hash - can be string (dumped JSON)
if args['queue']
@queue = message_data['queue'] = args['queue']
else
@queue = message_data['queue'] || "default"
end
#dump message as json
@message = message_data
end
@queue_name_with_prefix = queue_name_with_prefix
end
def status
@status
end
def disable!
@status = "disabled"
save
end
def enable!
@status = "enabled"
save
end
def enabled?
@status == "enabled"
end
def disabled?
!enabled?
end
def pretty_message
JSON.pretty_generate Sidekiq.load_json(message)
rescue JSON::ParserError
message
end
def status_from_redis
out = "enabled"
if fetch_missing_args
Sidekiq.redis do |conn|
status = conn.hget redis_key, "status"
out = status if status
end
end
out
end
def last_enqueue_time_from_redis
out = nil
if fetch_missing_args
Sidekiq.redis do |conn|
out = parse_enqueue_time(conn.hget(redis_key, "last_enqueue_time")) rescue nil
end
end
out
end
def jid_history_from_redis
out =
Sidekiq.redis do |conn|
conn.lrange(jid_history_key, 0, -1) rescue nil
end
# returns nil if out nil
out && out.map do |jid_history_raw|
Sidekiq.load_json jid_history_raw
end
end
#export job data to hash
def to_hash
{
name: @name,
klass: @klass,
cron: @cron,
description: @description,
args: @args.is_a?(String) ? @args : Sidekiq.dump_json(@args || []),
message: @message.is_a?(String) ? @message : Sidekiq.dump_json(@message || {}),
status: @status,
active_job: @active_job,
queue_name_prefix: @active_job_queue_name_prefix,
queue_name_delimiter: @active_job_queue_name_delimiter,
last_enqueue_time: @last_enqueue_time,
}
end
def errors
@errors ||= []
end
def valid?
#clear previous errors
@errors = []
errors << "'name' must be set" if @name.nil? || @name.size == 0
if @cron.nil? || @cron.size == 0
errors << "'cron' must be set"
else
begin
@parsed_cron = Fugit.do_parse_cron(@cron)
rescue => e
errors << "'cron' -> #{@cron.inspect} -> #{e.class}: #{e.message}"
end
end
errors << "'klass' (or class) must be set" unless klass_valid
errors.empty?
end
def klass_valid
case @klass
when Class
true
when String
@klass.size > 0
else
end
end
# add job to cron jobs
# input:
# name: (string) - name of job
# cron: (string: '* * * * *' - cron specification when to run job
# class: (string|class) - which class to perform
# optional input:
# queue: (string) - which queue to use for enquing (will override class queue)
# args: (array|hash|nil) - arguments for permorm method
def save
#if job is invalid return false
return false unless valid?
Sidekiq.redis do |conn|
#add to set of all jobs
conn.sadd self.class.jobs_key, redis_key
#add informations for this job!
conn.hmset redis_key, *hash_to_redis(to_hash)
#add information about last time! - don't enque right after scheduler poller starts!
time = Time.now.utc
conn.zadd(job_enqueued_key, time.to_f.to_s, formated_last_time(time).to_s) unless conn.public_send(REDIS_EXISTS_METHOD, job_enqueued_key)
end
logger.info { "Cron Jobs - added job with name: #{@name}" }
end
def save_last_enqueue_time
Sidekiq.redis do |conn|
# update last enqueue time
conn.hset redis_key, 'last_enqueue_time', @last_enqueue_time
end
end
def add_jid_history(jid)
jid_history = {
jid: jid,
enqueued: @last_enqueue_time
}
@history_size ||= (Sidekiq.options[:cron_history_size] || 10).to_i - 1
Sidekiq.redis do |conn|
conn.lpush jid_history_key,
Sidekiq.dump_json(jid_history)
# keep only last 10 entries in a fifo manner
conn.ltrim jid_history_key, 0, @history_size
end
end
# remove job from cron jobs by name
# input:
# first arg: name (string) - name of job (must be same - case sensitive)
def destroy
Sidekiq.redis do |conn|
#delete from set
conn.srem self.class.jobs_key, redis_key
#delete runned timestamps
conn.del job_enqueued_key
# delete jid_history
conn.del jid_history_key
#delete main job
conn.del redis_key
end
logger.info { "Cron Jobs - deleted job with name: #{@name}" }
end
# remove all job from cron
def self.destroy_all!
all.each do |job|
job.destroy
end
logger.info { "Cron Jobs - deleted all jobs" }
end
# remove "removed jobs" between current jobs and new jobs
def self.destroy_removed_jobs new_job_names
current_job_names = Sidekiq::Cron::Job.all.map(&:name)
removed_job_names = current_job_names - new_job_names
removed_job_names.each { |j| Sidekiq::Cron::Job.destroy(j) }
removed_job_names
end
# Parse cron specification '* * * * *' and returns
# time when last run should be performed
def last_time now = Time.now.utc
parsed_cron.previous_time(now.utc).utc
end
def formated_enqueue_time now = Time.now.utc
last_time(now).getutc.to_f.to_s
end
def formated_last_time now = Time.now.utc
last_time(now).getutc.iso8601
end
def self.exists? name
out = false
Sidekiq.redis do |conn|
out = conn.public_send(REDIS_EXISTS_METHOD, redis_key(name))
end
out
end
def exists?
self.class.exists? @name
end
def sort_name
"#{status == "enabled" ? 0 : 1}_#{name}".downcase
end
private
def parsed_cron
@parsed_cron ||= Fugit.parse_cron(@cron)
end
def not_enqueued_after?(time)
@last_enqueue_time.nil? || @last_enqueue_time.to_i < last_time(time).to_i
end
# Try parsing inbound args into an array.
# args from Redis will be encoded JSON;
# try to load JSON, then failover
# to string array.
def parse_args(args)
case args
when String
begin
Sidekiq.load_json(args)
rescue JSON::ParserError
[*args] # cast to string array
end
when Hash
[args] # just put hash into array
when Array
args # do nothing, already array
else
[*args] # cast to string array
end
end
def parse_enqueue_time(timestamp)
DateTime.strptime(timestamp, LAST_ENQUEUE_TIME_FORMAT).to_time.utc
rescue ArgumentError
DateTime.parse(timestamp).to_time.utc
end
def not_past_scheduled_time?(current_time)
last_cron_time = parsed_cron.previous_time(current_time).utc
# or could it be?
#last_cron_time = last_time(current_time)
return false if (current_time.to_i - last_cron_time.to_i) > 60
true
end
# Redis key for set of all cron jobs
def self.jobs_key
"cron_jobs"
end
# Redis key for storing one cron job
def self.redis_key name
"cron_job:#{name}"
end
# Redis key for storing one cron job
def redis_key
self.class.redis_key @name
end
# Redis key for storing one cron job run times
# (when poller added job to queue)
def self.job_enqueued_key name
"cron_job:#{name}:enqueued"
end
def self.jid_history_key name
"cron_job:#{name}:jid_history"
end
# Redis key for storing one cron job run times
# (when poller added job to queue)
def job_enqueued_key
self.class.job_enqueued_key @name
end
def jid_history_key
self.class.jid_history_key @name
end
# Give Hash
# returns array for using it for redis.hmset
def hash_to_redis hash
hash.inject([]){ |arr,kv| arr + [kv[0], kv[1]] }
end
end
end
end