From f209c5ab45b321928adfad3cdb98515b54ddc069 Mon Sep 17 00:00:00 2001 From: Anthony Truskinger Date: Tue, 7 Jun 2016 00:10:35 +1000 Subject: [PATCH 1/5] Feature: Analysis Jobsintegration complete. Analysis Jobs' models and controllers were previously setup. This commit introduces changes that make the jobs workflow functionally complete. Also included is a comprehensive suite of API level specs that simulate and tests many full-lifecycle scenarios of the analysis jobs system. This is a combination of 14 commits that were squashed when cleaning up this work. - Integration test defintions for AnalysisJobs/Items - Started implementing tests - Implementation work - Got :new integration tests working - Made a fake resque job - Made job notification emails - Fixed faulty update_status_timestamps method and fixed associated tests - Hardened state transition test for AnalysisJobsItem - more work on processing stage - almost got prepare working - Begun adding aasm workflow to analysis_jobs_items - Got most of processing tests working - Added extra scripts field to support arbitrary options for analysis_action - Work on processing analysis_jobs_items - Fixed strong params definition for both AnalysisJobs and AnalysisJobsItems - 75% of unit tests complete - Added timecop for micking time in tests (later removed this gem as well - it wasn't needed) - All creating an analysis job request specs now work! :tada: - Added support for deleting analysis jobs. Analysis jobs can only be deleted when their status is processing, suspended, or completed. - Also added tests for AnalysisJobs and AnalysisJobsItems --- Gemfile | 7 + Gemfile.lock | 7 +- app/controllers/analysis_jobs_controller.rb | 36 +- .../analysis_jobs_items_controller.rb | 41 +- app/mailers/analysis_job_mailer.rb | 45 + app/models/ability.rb | 2 +- app/models/analysis_job.rb | 450 ++++-- app/models/analysis_jobs_item.rb | 317 +++-- app/models/script.rb | 4 +- .../analysis_job_complete_message.haml | 28 + .../analysis_job_new_message.haml | 28 + .../analysis_job_retry_message.haml | 29 + config/environments/development.rb | 2 +- config/environments/test.rb | 2 +- ...0614230504_analysis_jobs_column_changes.rb | 10 + ...20160712051359_add_extra_scripts_fields.rb | 7 + ...ancel_started_at_to_analysis_jobs_items.rb | 5 + db/structure.sql | 16 +- lib/modules/aasm_helpers.rb | 24 + lib/validators/existence_validator.rb | 2 + spec/acceptance/analysis_jobs_items_spec.rb | 11 +- .../analysis_jobs_controller_spec.rb | 160 --- spec/factories/analysis_job_factory.rb | 4 +- spec/factories/analysis_jobs_item_factory.rb | 4 +- spec/factories/script_factory.rb | 1 + spec/helpers/resque_helper.rb | 21 +- spec/lib/creation.rb | 25 +- spec/models/analysis_job_spec.rb | 104 +- spec/models/analysis_jobs_item_spec.rb | 174 +-- spec/requests/analysis_jobs_spec.rb | 1255 +++++++++++++++++ 30 files changed, 2246 insertions(+), 575 deletions(-) create mode 100644 app/mailers/analysis_job_mailer.rb create mode 100644 app/views/analysis_jobs_mailer/analysis_job_complete_message.haml create mode 100644 app/views/analysis_jobs_mailer/analysis_job_new_message.haml create mode 100644 app/views/analysis_jobs_mailer/analysis_job_retry_message.haml create mode 100644 db/migrate/20160614230504_analysis_jobs_column_changes.rb create mode 100644 db/migrate/20160712051359_add_extra_scripts_fields.rb create mode 100644 db/migrate/20160726014747_add_cancel_started_at_to_analysis_jobs_items.rb create mode 100644 lib/modules/aasm_helpers.rb delete mode 100644 spec/controllers/analysis_jobs_controller_spec.rb create mode 100644 spec/requests/analysis_jobs_spec.rb diff --git a/Gemfile b/Gemfile index 088a3f85..cb7dc65c 100644 --- a/Gemfile +++ b/Gemfile @@ -98,6 +98,9 @@ gem 'enumerize', '~> 1.0' gem 'uuidtools', '~> 2.1.5' gem 'acts_as_paranoid', git: 'https://github.com/ActsAsParanoid/acts_as_paranoid.git', branch: :master, ref: 'ab31723bc1' +# for state machines +gem 'aasm' + # SETTINGS # ------------------------------------- gem 'settingslogic', '~> 2.0.9' @@ -204,4 +207,8 @@ end group :test do gem 'webmock', '~> 1.24.0' gem 'shoulda-matchers', '< 3.0.0', require: false + + gem 'rspec-mocks', '~>3.4.1' + + gem 'timecop' end \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index b7d6b2c5..497d7f7b 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -52,6 +52,7 @@ GIT GEM remote: https://rubygems.org/ specs: + aasm (4.11.0) actionmailer (4.2.6) actionpack (= 4.2.6) actionview (= 4.2.6) @@ -430,6 +431,7 @@ GEM thread (0.2.2) thread_safe (0.3.5) tilt (2.0.2) + timecop (0.8.1) timeliness (0.3.8) tins (1.6.0) tzinfo (1.2.2) @@ -458,6 +460,7 @@ PLATFORMS ruby DEPENDENCIES + aasm activesupport-json_encoder! acts_as_paranoid! baw-audio-tools! @@ -510,6 +513,7 @@ DEPENDENCIES resque-status (~> 0.5.0) role_model (~> 0.8.1) rspec (~> 3.4.0) + rspec-mocks (~> 3.4.1) rspec-rails (~> 3.4.0) rspec_api_documentation (~> 4.7.0) rubocop (~> 0.39.0) @@ -521,6 +525,7 @@ DEPENDENCIES simplecov (~> 0.11.1) therubyracer (~> 0.12.1) thin (~> 1.6.3) + timecop tzinfo (~> 1.2.2) tzinfo-data (~> 1.2016) uglifier (>= 1.3.0) @@ -529,4 +534,4 @@ DEPENDENCIES zonebie BUNDLED WITH - 1.12.0.rc + 1.12.5 diff --git a/app/controllers/analysis_jobs_controller.rb b/app/controllers/analysis_jobs_controller.rb index 968b2ec7..e03476dc 100644 --- a/app/controllers/analysis_jobs_controller.rb +++ b/app/controllers/analysis_jobs_controller.rb @@ -41,11 +41,12 @@ def create do_set_attributes(analysis_job_create_params) do_authorize_instance + # runs first step in analysis_job workflow (`initialize_workflow`) and then saves if @analysis_job.save # now create and enqueue job items (which updates status attributes again) # needs to be called after save as it makes use of the analysis_job id. - @analysis_job.begin_work(current_user) + @analysis_job.prepare! respond_create_success else @@ -60,7 +61,16 @@ def update do_load_resource do_authorize_instance - if @analysis_job.update_attributes(analysis_job_update_params) + parameters = analysis_job_update_params + + # allow the API to transition this analysis job to a new state. + # Used for suspending and resuming an analysis_job + if parameters.key?(:overall_status) + @analysis_job.transition_to_state(parameters[:overall_status].to_sym) + parameters = parameters.except(:overall_status) + end + + if @analysis_job.update_attributes(parameters) respond_show else respond_change_fail @@ -74,13 +84,23 @@ def destroy do_load_resource do_authorize_instance - @analysis_job.destroy - add_archived_at_header(@analysis_job) + # only allow deleting from suspended or completed states + can_delete = @analysis_job.completed? || @analysis_job.suspended? - # TODO: delete pending analysis jobs from worker message queue - # TODO: change all pending analysis_job_items to :cancelled + # also allow from processing, since we can suspend + if @analysis_job.processing? && @analysis_job.may_suspend? + can_delete = true + @analysis_job.suspend! + end - respond_destroy + unless can_delete + respond_error(:conflict, "Cannot be deleted while `overall_status` is `#{@analysis_job.overall_status}`") + else + @analysis_job.destroy + add_archived_at_header(@analysis_job) + + respond_destroy + end end # GET|POST /analysis_jobs/filter @@ -127,7 +147,7 @@ def analysis_job_create_params def analysis_job_update_params # Only name and description can be updated via API. # Other properties are updated by the processing system. - params.require(:analysis_job).permit(:name, :description) + params.require(:analysis_job).permit(:name, :description, :overall_status) end def get_analysis_jobs diff --git a/app/controllers/analysis_jobs_items_controller.rb b/app/controllers/analysis_jobs_items_controller.rb index da0a9681..3ff99ef2 100644 --- a/app/controllers/analysis_jobs_items_controller.rb +++ b/app/controllers/analysis_jobs_items_controller.rb @@ -73,13 +73,47 @@ def update end do_load_resource + do_get_analysis_job do_authorize_instance - if @analysis_jobs_item.update_attributes(analysis_jobs_item_update_params) + parameters = analysis_jobs_item_update_params + desired_state = parameters[:status].to_sym + + client_cancelled = desired_state == :cancelled + should_cancel = @analysis_jobs_item.may_confirm_cancel?(desired_state) + valid_transition = @analysis_jobs_item.may_transition_to_state(desired_state) + + if valid_transition + @analysis_jobs_item.transition_to_state(desired_state) + elsif should_cancel + @analysis_jobs_item.confirm_cancel(desired_state) + end + + saved = @analysis_jobs_item.save + + if saved + # update progress statistics and check if job has been completed + @analysis_job.check_progress + end + + if should_cancel && !client_cancelled && saved + # If someone tried to :cancelling-->:working instead of :cancelling-->:cancelled then it is an error + # However if client :cancelled when we expected :cancelling-->:cancelled then well behaved + respond_error( + :unprocessable_entity, + "This entity has been cancelled - can not set new state to `#{desired_state}`") + + elsif !valid_transition + respond_error( + :unprocessable_entity, + "Cannot transition from `#{@analysis_jobs_item.status}` to `#{desired_state}`") + elsif saved respond_show else respond_change_fail end + + end # GET|POST /analysis_jobs/:analysis_job_id/audio_recordings/ @@ -105,6 +139,7 @@ def filter def analysis_jobs_item_update_params # Only status can be updated via API # Other properties are updated by the model/initial processing system + #:analysis_job_id, :audio_recording_id, :format params.require(:analysis_jobs_item).permit(:status) end @@ -130,7 +165,9 @@ def do_get_opts end def do_get_analysis_job - @analysis_job = AnalysisJob.find(@analysis_job_id) unless @is_system_job + # We use with deleted here because we always needs to be able to load AnalysisJobsItem even if the parent + # AnalysisJob has been deleted. + @analysis_job = AnalysisJob.with_deleted.find(@analysis_job_id) unless @is_system_job end def get_query diff --git a/app/mailers/analysis_job_mailer.rb b/app/mailers/analysis_job_mailer.rb new file mode 100644 index 00000000..d009dd29 --- /dev/null +++ b/app/mailers/analysis_job_mailer.rb @@ -0,0 +1,45 @@ +class AnalysisJobMailer < ActionMailer::Base + default from: Settings.mailer.emails.sender_address + + # @param [AnalysisJob] analysis_job + # @param [ActionDispatch::Request] rails_request + def new_job_message(analysis_job, rails_request) + send_message(analysis_job, rails_request, 'New analysis job', 'analysis_job_new_message') + end + + # @param [AnalysisJob] analysis_job + # @param [ActionDispatch::Request] rails_request + def completed_job_message(analysis_job, rails_request) + send_message(analysis_job, rails_request, 'Completed analysis job', 'analysis_job_complete_message') + end + + # @param [AnalysisJob] analysis_job + # @param [ActionDispatch::Request] rails_request + def retry_job_message(analysis_job, rails_request) + send_message(analysis_job, rails_request, 'Retrying analysis job', 'analysis_job_retry_message') + end + + private + + # Construct the email. + # @param [AnalysisJob] analysis_job + # @param [ActionDispatch::Request] rails_request + # @param [string] subject_prefix + # @param [string] template_name + def send_message(analysis_job, rails_request, subject_prefix, template_name) + user_emails = User.find([analysis_job.creator_id, analysis_job.updater_id]).map {|u| u.email }.uniq + + @info = { + analysis_job: analysis_job, + datestamp: Time.zone.now.utc.iso8601 + } + + # email gets sent to required recipients (creator, updater, admins) + mail( + to: user_emails.concat(Settings.mailer.emails.required_recipients).uniq, + subject: "#{Settings.mailer.emails.email_prefix} [#{subject_prefix}] #{@info[:analysis_job].name}.", + template_path: 'analysis_jobs_mailer', + template_name: template_name) + end + +end \ No newline at end of file diff --git a/app/models/ability.rb b/app/models/ability.rb index 1440b0d2..77c7fbaf 100644 --- a/app/models/ability.rb +++ b/app/models/ability.rb @@ -103,7 +103,7 @@ def for_harvester # omitted: :new, :create, # applied by default: :index, :show, :filter - can [ :update ], AnalysisJobsItem + can [ :show, :update ], AnalysisJobsItem end def to_project(user) diff --git a/app/models/analysis_job.rb b/app/models/analysis_job.rb index 0b3442d1..d006919b 100644 --- a/app/models/analysis_job.rb +++ b/app/models/analysis_job.rb @@ -1,9 +1,13 @@ class AnalysisJob < ActiveRecord::Base - extend Enumerize - # ensures that creator_id, updater_id, deleter_id are set include UserChange + # allow a state machine to work with this class + include AASM + include AASMHelpers + + OVERALL_PROGRESS_REFRESH_SECONDS = 30.0 + belongs_to :creator, class_name: 'User', foreign_key: :creator_id, inverse_of: :created_analysis_jobs belongs_to :updater, class_name: 'User', foreign_key: :updater_id, inverse_of: :updated_analysis_jobs belongs_to :deleter, class_name: 'User', foreign_key: :deleter_id, inverse_of: :deleted_analysis_jobs @@ -17,10 +21,6 @@ class AnalysisJob < ActiveRecord::Base acts_as_paranoid validates_as_paranoid - # store progress as json in a text column - # this stores jason in the form - # { queued: 5, working: 10, successful: 4, failed: 3, total: 22} - serialize :overall_progress, JSON # association validations validates :script, existence: true @@ -38,30 +38,6 @@ class AnalysisJob < ActiveRecord::Base validates :started_at, allow_blank: true, allow_nil: true, timeliness: {on_or_before: lambda { Time.zone.now }, type: :datetime} validates :overall_data_length_bytes, presence: true, numericality: {only_integer: true, greater_than_or_equal_to: 0} - # job status values - completed just means all processing has finished, whether it succeeds or not. - AVAILABLE_JOB_STATUS_SYMBOLS = [:new, :preparing, :processing, :suspended, :completed] - AVAILABLE_JOB_STATUS = AVAILABLE_JOB_STATUS_SYMBOLS.map { |item| item.to_s } - - AVAILABLE_JOB_STATUS_DISPLAY = [ - {id: :new, name: 'New'}, - {id: :preparing, name: 'Preparing'}, - {id: :processing, name: 'Processing'}, - {id: :suspended, name: 'Suspended'}, - {id: :completed, name: 'Completed'}, - ] - - enumerize :overall_status, in: AVAILABLE_JOB_STATUS, predicates: true - - # - # State transition map - # - # :new → :preparing → :processing → :completed - # ⇅ - # :suspended - # - - after_initialize :initialise_job_tracking, if: Proc.new { |analysis_job| analysis_job.new_record? } - def self.filter_settings fields = [ @@ -107,163 +83,329 @@ def self.filter_settings } end + # + # public methods + # + + def self.batch_size + 1000 + end + + # Intended to be called when we know a change in status has happened - + # i.e. when an analysis_jobs_item status has changed. + # Disabled caching idea because it looks like a premature optimization. + def check_progress + #if (Time.zone.now - analysis_job.overall_progress_modified_at) > OVERALL_PROGRESS_REFRESH_SECONDS + + + # https://github.com/ActsAsParanoid/acts_as_paranoid/issues/45 + was_deleted = deleted? + if was_deleted + old_paranoid_value = self.paranoid_value + self.paranoid_value = nil + end + + # skip validations and callbacks, and do not set updated_at, save value to database + update_columns(update_job_progress) + + # finally, after each update, check if we can finally finish the job! + if self.may_complete? + self.complete! + end + + self.paranoid_value = old_paranoid_value if was_deleted + end + # analysis_job lifecycle: - # 1. when a new analysis job is created, the required attributes will be initialised by `initialise_job_tracking` - # the new analysis job can be saved at this point (and is saved if created via create action on controller), - # but it has not been started and no resque jobs have been enqueued - # 2. Start an analysis job by calling `begin_work`. Calling `begin_work` when :overall_status is not 'new' or analysis job has not been saved is an error - # If :overall_status is 'new', the analysis job will immediately transition to 'preparing' status, then create and enqueue resque jobs. - # 3. Once all resque jobs have been enqeued, the analysis job will transition to 'processing' status. - # 4. resque jobs will update the analysis job as resque jobs change states using `update_job_progress` - # TODO more... - - - # Update status and modified timestamp if changes are made. Does not persist changes. - # @param [Symbol, String] status - # @return [void] - def update_job_status(status) - current_status = self.overall_status.blank? ? 'new' : self.overall_status.to_s - new_status = status.blank? ? current_status : status.to_s + # 1. When a new analysis job is created, the state will be `before_save`. + # The required attributes will be initialised by `initialize_workflow` and state will be transitioned to `new`. + # The new analysis job should be saved at this point (and is saved if created via create action on controller). + # Note: no AnalysisJobsItems have been made and no resque jobs have been enqueued. + # 2. Then the job must be prepared. Currently synchronous but designed to be asynchronous. + # Do this by calling `prepare` which will transition from `new` to `preparing`. + # Note: AnalysisJobsItems are enqueued progressively here. Some actions by be processed and even completed + # before the AnalysisJob even finishes preparing! + # 3. Transition to `processing` + # Note: the processing transition should be automatic + # 3. Once all resque jobs have been enqueued, the analysis job will transition to 'processing' status. + # 4. Resque jobs will update the analysis job (via analysis jobs items) as resque jobs change states. + # `check_progress` is used to update progress and also is the callback that checks for job completion. + # 5. When all items have finished processing, the job transitions to `completed`. Users are notified with an email + # and the job is marked as completed. + # + # Additional states: + # - Jobs can transition between processing and suspended. When suspended all analysis jobs items are marked as + # `cancelling`. When resumed, all `cancelling` or `cancelled` items are re-added back to the queue. + # Note: We can't add or remove items from the message queue, which is why we need the cancelling/cancelled + # distinction. + # - Jobs can be retried. In this case, the failed items (only) are re-queued and the job is set to `processing` + # + # State transition map + # + # :before_save → :new → :preparing → :processing → :completed + # ↑ ↓ ↑ | + # | :suspended | + # ---------------------------- + # + aasm column: :overall_status, no_direct_assignment: true, whiny_persistence: true do + # We don't need to explicitly set display_name - they get humanized by default + state :before_save, {initial: true, display_name: 'Before save'} + state :new, enter: [:initialise_job_tracking, :update_job_progress] + state :preparing, enter: :send_preparing_email, after_enter: [:prepare_job, :process!] + state :processing, enter: [:update_job_progress] + state :suspended, enter: [:suspend_job, :update_job_progress] + + # completed just means all processing has finished, whether it succeeds or not. + state :completed, enter: [:update_job_progress], after_enter: :send_completed_email + + event :initialize_workflow, unless: :has_status_timestamp? do + transitions from: :before_save, to: :new + end + + # we send email here because initialize_workflow does not guarantee that an id is set. + event :prepare, guard: :has_id? do + transitions from: :new, to: :preparing + end + + # you shouldn't need to call process manually + event :process, guard: :are_all_enqueued? do + transitions from: :preparing, to: :processing, unless: :all_job_items_completed? + transitions from: :preparing, to: :completed, guard: :all_job_items_completed? + end + + event :suspend do + transitions from: :processing, to: :suspended + end + + # Guard against race conditions. Do not allow to resume if there are still analysis_jobs_items that are :queued. + event :resume do + transitions from: :suspended, to: :processing, guard: :are_all_cancelled?, after: :resume_job + end + + # https://github.com/aasm/aasm/issues/324 + # event :api_update do + # transitions from: :processing, to: :suspended + # transitions from: :suspended, to: :processing, guard: :are_all_cancelled? + # end + + event :complete do + transitions from: :processing, to: :completed, guard: :all_job_items_completed? + end + + # retry just the failures + event :retry do + transitions from: :completed, to: :processing, + guard: :are_any_job_items_failed?, after: [:retry_job, :send_retry_email] + end - self.overall_status = new_status - self.overall_status_modified_at = Time.zone.now if current_status != new_status || self.overall_status_modified_at.blank? + after_all_transitions :update_status_timestamp end - # Update progress and modified timestamp if changes are made. Does not persist changes. - # @param [Integer] queued_count - # @param [Integer] working_count - # @param [Integer] successful_count - # @param [Integer] failed_count - # @return [void] - def update_job_progress(queued_count = nil, working_count = nil, successful_count = nil, failed_count = nil) - current_progress = self.overall_progress - - current_queued_count = current_progress.blank? ? 0 : current_progress['queued'].to_i - current_working_count = current_progress.blank? ? 0 : current_progress['working'].to_i - current_successful_count = current_progress.blank? ? 0 : current_progress['successful'].to_i - current_failed_count = current_progress.blank? ? 0 : current_progress['failed'].to_i - - new_queued_count = queued_count.blank? ? current_queued_count : queued_count.to_i - new_working_count = working_count.blank? ? current_working_count : working_count.to_i - new_successful_count = successful_count.blank? ? current_successful_count : successful_count.to_i - new_failed_count = failed_count.blank? ? current_failed_count : failed_count.to_i - - calculated_total = new_queued_count + new_working_count + new_successful_count + new_failed_count - - new_progress = { - queued: new_queued_count, - working: new_working_count, - successful: new_successful_count, - failed: new_failed_count, - total: calculated_total, - } + # job status values + AVAILABLE_JOB_STATUS_SYMBOLS = self.aasm.states.map(&:name) + AVAILABLE_JOB_STATUS = AVAILABLE_JOB_STATUS_SYMBOLS.map { |item| item.to_s } + + AVAILABLE_JOB_STATUS_DISPLAY = self.aasm.states.map { |x| [x.name, x.display_name] }.to_h - self.overall_progress = new_progress - self.overall_progress_modified_at = Time.zone.now if current_progress != new_progress || self.overall_progress_modified_at.blank? + # hook active record callbacks into state machine + before_validation(on: :create) do + # if valid? is called twice, then overall_status already == :new and this will fail. So add extra may_*? check. + initialize_workflow if may_initialize_workflow? end - def create_payload(audio_recording) - # common payload info - command_format = self.script.executable_command.to_s - file_executable = '' - copy_paths = [] - config_string = self.custom_settings.to_s - job_id = self.id.to_i + private - { - command_format: command_format, + # + # guards for the state machine + # - # TODO: where do file_executable and copy_paths come from? - file_executable: file_executable, - copy_paths: copy_paths, + def has_status_timestamp? + !self.overall_status_modified_at.nil? + end - config: config_string, - job_id: job_id, + def has_id? + !self.id.nil? + end - uuid: audio_recording.uuid, - id: audio_recording.id, - datetime_with_offset: audio_recording.recorded_date.iso8601(3), - original_format: audio_recording.original_format_calculated - } + def are_all_enqueued? + self.overall_count == AnalysisJobsItem.for_analysis_job(self.id).count + end + + def are_all_cancelled? + AnalysisJobsItem.queued_for_analysis_job(self.id).count == 0 + end + + def all_job_items_completed? + AnalysisJobsItem.for_analysis_job(self.id).count == AnalysisJobsItem.completed_for_analysis_job(self.id).count + end + + def are_any_job_items_failed? + AnalysisJobsItem.failed_for_analysis_job(self.id).count > 0 + end + + # + # callbacks for the state machine + # + + def initialise_job_tracking + self.overall_count = 0 + self.overall_duration_seconds = 0 + self.overall_data_length_bytes = 0 + end + + def send_preparing_email + AnalysisJobMailer.new_job_message(self, nil).deliver_now end # Create payloads from audio recordings extracted from saved search. - # @param [User] user - # @return [Array] payloads - def begin_work(user) - user = Access::Core.validate_user(user) - - # ensure status is 'new' and analysis job has been saved - if self.overall_status != 'new' || !self.persisted? - msg_status = self.overall_status == 'new' ? '' : " Status must be 'new', but was '#{self.overall_status}'." - msg_saved = self.persisted? ? '' : ' Analysis job has not been saved.' - fail CustomErrors::AnalysisJobStartError.new("Analysis job cannot start.#{msg_status}#{msg_saved}") - end + # This method saves persists changes. + def prepare_job + # test: self.creator + # test: status == preparing + user = creator # TODO add logging and timing # TODO This may need to be an async operation itself depending on how fast it runs # counters - count = 0 - duration_seconds_sum = 0 - data_length_bytes_sum = 0 - queued_count = 0 - failed_count = 0 - - # update status and started timestamp - update_job_status('preparing') - self.started_at = Time.zone.now if self.started_at.blank? + options = { + count: 0, + duration_seconds_sum: 0, + data_length_bytes_sum: 0, + queued_count: 0, + failed_count: 0, + results: [], + analysis_job: self + } + + self.started_at = Time.zone.now self.save! # query associated saved_search to get audio_recordings query = self.saved_search.audio_recordings_extract(user) - # create one payload per each audio_recording - results = [] - query.find_each(batch_size: 1000) do |audio_recording| - payload = create_payload(audio_recording) + options = query + .find_in_batches(batch_size: AnalysisJob.batch_size) + .reduce(options, &self.method(:prepare_analysis_job_item)) - # update counters - count = count + 1 - duration_seconds_sum = duration_seconds_sum + audio_recording.duration_seconds - data_length_bytes_sum = data_length_bytes_sum + audio_recording.data_length_bytes - - # Enqueue payloads representing audio recordings from saved search to asynchronous processing queue. - result = nil - error = nil - - begin - result = BawWorkers::Analysis::Action.action_enqueue(payload) - queued_count = queued_count + 1 - rescue => e - error = e - Rails.logger.error "An error occurred when enqueuing an analysis job item: #{e}" - failed_count = failed_count + 1 - end - results.push({payload: payload, result: result, error: error}) - end - - # update counters, status, progress - update_job_status('processing') # don't update progress - resque jobs may already be processing or completed # the resque jobs can do the updating - self.overall_count = count - self.overall_duration_seconds = duration_seconds_sum - self.overall_data_length_bytes = data_length_bytes_sum + + self.overall_count = options[:count] + self.overall_duration_seconds = options[:duration_seconds_sum] + self.overall_data_length_bytes = options[:data_length_bytes_sum] self.save! - results + options[:results] end - private + # Suspends an analysis job by cancelling all queued items + def suspend_job + # get items + query = AnalysisJobsItem.queued_for_analysis_job(id) - def initialise_job_tracking - update_job_status('new') - update_job_progress - self.overall_count = 0 if self.overall_count.blank? - self.overall_duration_seconds = 0 if self.overall_duration_seconds.blank? - self.overall_data_length_bytes = 0 if self.overall_data_length_bytes.blank? + # batch update + query.find_in_batches(batch_size: AnalysisJob.batch_size) do |items| + items.each do |item| + # transition to cancelled state and save + item.cancel! + end + end end + + # Resumes an analysis job by converting cancelling or cancelled items to queued items + def resume_job + # get items + query = AnalysisJobsItem.cancelled_for_analysis_job(id) + + # batch update + query.find_in_batches(batch_size: AnalysisJob.batch_size) do |items| + items.each do |item| + # transition to cancelled state and save + item.retry! + end + end + end + + def send_completed_email + AnalysisJobMailer.completed_job_message(self, nil).deliver_now + end + + # Retry an analysis job by re-enqueuing all failed items + def retry_job + # get items + query = AnalysisJobsItem.failed_for_analysis_job(id) + + # batch update + query.find_in_batches(batch_size: AnalysisJob.batch_size) do |items| + items.each do |item| + # transition to cancelled state and save + item.retry! + end + end + end + + def send_retry_email + AnalysisJobMailer.retry_job_message(self, nil).deliver_now + end + + # Update status timestamp whenever a transition occurs. Does not persist changes - happens before aasm save. + def update_status_timestamp + self.overall_status_modified_at = Time.zone.now + end + + # Update progress and modified timestamp if changes are made. Does NOT persist changes. + # This callback happens after a model is saved in a new state or loaded from the database. + # We want all transactions to finish before we update the progresses - since they run a query themselves. + # @return [void] + def update_job_progress + defaults = AnalysisJobsItem::AVAILABLE_ITEM_STATUS.product([0]).to_h + statuses = AnalysisJobsItem.where(analysis_job_id: self.id).group(:status).count + statuses[:total] = statuses.map(&:last).reduce(:+) || 0 + + statuses = defaults.merge(statuses) + + self.overall_progress = statuses + self.overall_progress_modified_at = Time.zone.now + + {overall_progress: self.overall_progress, overall_progress_modified_at: self.overall_progress_modified_at} + end + + # + # other methods + # + + # Process a batch of audio_recordings + def prepare_analysis_job_item(options, audio_recordings) + audio_recordings.each do |audio_recording| + + # update counters + options[:count] += 1 + options[:duration_seconds_sum] += audio_recording.duration_seconds + options[:data_length_bytes_sum] += audio_recording.data_length_bytes + + + # create new analysis jobs item + item = AnalysisJobsItem.new + item.analysis_job = self + item.audio_recording = audio_recording + + item.save! + + # now try to transition to queued state (and save if transition successful) + success = item.queue! + raise 'Could not queue AnalysisJobItem' unless success + + options[:queued_count] += 1 if item.enqueue_results[:result] + options[:failed_count] += 1 if item.enqueue_results[:error] + + options[:results].push(item.enqueue_results) + end + + options + end + + end diff --git a/app/models/analysis_jobs_item.rb b/app/models/analysis_jobs_item.rb index 6ab93d88..554c81db 100644 --- a/app/models/analysis_jobs_item.rb +++ b/app/models/analysis_jobs_item.rb @@ -1,6 +1,9 @@ class AnalysisJobsItem < ActiveRecord::Base - extend Enumerize + # allow a state machine to work with this class + include AASM + include AASMHelpers + SYSTEM_JOB_ID = 'system' belongs_to :analysis_job, inverse_of: :analysis_jobs_items belongs_to :audio_recording, inverse_of: :analysis_jobs_items @@ -26,61 +29,6 @@ class AnalysisJobsItem < ActiveRecord::Base allow_blank: true, allow_nil: true, timeliness: {on_or_before: lambda { Time.zone.now }, type: :datetime} - # item status values - timed out is a special failure case where the worker never reports back - AVAILABLE_ITEM_STATUS_SYMBOLS = [:new, :queued, :working, :successful, :failed, :timed_out, :cancelled] - AVAILABLE_ITEM_STATUS = AVAILABLE_ITEM_STATUS_SYMBOLS.map { |item| item.to_s } - - AVAILABLE_JOB_STATUS_DISPLAY = [ - {id: :new, name: 'New'}, - {id: :queued, name: 'Queued'}, - {id: :working, name: 'Working'}, - {id: :successful, name: 'Successful'}, - {id: :failed, name: 'Failed'}, - {id: :timed_out, name: 'Timed out'}, - {id: :cancelled, name: 'Cancelled'}, - ] - - enumerize :status, in: AVAILABLE_ITEM_STATUS, predicates: true, default: :new - - SYSTEM_JOB_ID = 'system' - - # - # State transition map - # --> :successful - # | - # :new → :queued → :working ----> :failed - # | | | | - # | | | --> :timed_out - # | | | - # --------------------------> :cancelled - # - # IFF #retry implemented (not currently the case), then also: - # - # :failed ---------> :queued - # | - # :timed_out --- - - def status=(new_status) - old_status = self.status - - # don't let enumerize set the default value when selecting nil from the database - new_status = nil if !new_record? && new_status == :new.to_s && old_status == nil - - super(new_status) - - update_status_timestamps(self.status, old_status) - - end - - def analysis_job_id - super_id = super() - - # When fake records are returned from system_query their analysis_job_id's are nil. - # This patch ensures serialized system records have the analysis_job_id set to 'system' - good practice for - # RESTful object. E.g. routing to a resource is done with analysis_job_id; having it set simplifies client logic. - !new_record? && id.nil? ? SYSTEM_JOB_ID : super_id - end - def self.filter_settings(is_system = false) fields = [ @@ -156,33 +104,29 @@ def self.filter_settings(is_system = false) settings end - # Update status and modified timestamp if changes are made. Does not persist changes. - # @param [Symbol] new_status - # @param [Symbol] old_status - # @return [void] - def update_status_timestamps(new_status, old_status) - return if new_status == old_status - - case - #when new_status == :new and old_status == :new - # created_at = Time.zone.now # - created_at handled by rails - when (new_status == :queued && old_status == :new) - self.queued_at = Time.zone.now - when (new_status == :working && old_status == :queued) - self.work_started_at = Time.zone.now - when (new_status == :successful && old_status == :working), - (new_status == :failed and old_status == :working), - (new_status == :timed_out && old_status == :working) - self.completed_at = Time.zone.now - when (new_status == :cancelled && old_status == :new), - (new_status == :cancelled && old_status == :queued), - (new_status == :cancelled && old_status == :working) - self.completed_at = Time.zone.now - else - fail "AnalysisJobItem#status: Invalid state transition from #{ old_status } to #{ new_status }" - end + # + # scopes + # + + def self.for_analysis_job(analysis_job_id) + where(analysis_job_id: analysis_job_id) + end + + def self.completed_for_analysis_job(analysis_job_id) + where(analysis_job_id: analysis_job_id, status: COMPLETED_ITEM_STATUS_SYMBOLS) + end + + def self.failed_for_analysis_job(analysis_job_id) + where(analysis_job_id: analysis_job_id, status: FAILED_ITEM_STATUS_SYMBOLS) end + def self.queued_for_analysis_job(analysis_job_id) + queued.where(analysis_job_id: analysis_job_id) + end + + def self.cancelled_for_analysis_job(analysis_job_id) + where(analysis_job_id: analysis_job_id, status: [:cancelling, :cancelled]) + end # Scoped query for getting fake analysis_jobs. # @return [ActiveRecord::Relation] @@ -218,16 +162,223 @@ def self.system_query query_without_deleted end + # + # public methods + # + + attr_reader :enqueue_results + + def status=(new_status) + old_status = self.status + + # don't let enumerize set the default value when selecting nil from the database + new_status = nil if !new_record? && new_status == :new.to_s && old_status == nil + + super(new_status) + end + + def analysis_job_id + super_id = super() + + # When fake records are returned from system_query their analysis_job_id's are nil. + # This patch ensures serialized system records have the analysis_job_id set to 'system' - good practice for + # RESTful object. E.g. routing to a resource is done with analysis_job_id; having it set simplifies client logic. + !new_record? && id.nil? ? SYSTEM_JOB_ID : super_id + end + + + # + # State transition map + # --> :successful + # | + # :new → :queued → :working ----> :failed + # | | + # | --> :timed_out + # | + # ----> :cancelling --> :cancelled + # + # Retry an item: + # + # :failed ---------> :queued + # | + # :timed_out --- + # + # During cancellation: + # + # :cancelling --> :queued (same queue_id) + # + # After cancellation: + # + # :cancelled --> :queued (new queue_id) + # + # Avoid race conditions for cancellation: an item can always finish! + # + # :cancelling ⊕ :cancelled ----> :successful ⊕ :failed ⊕ :timed_out + # + aasm column: :status, no_direct_assignment: true, whiny_persistence: true do + state :new, initial: true + state :queued, before_enter: :add_to_queue, enter: :set_queued_at + state :working, enter: :set_work_started_at + state :successful, enter: :set_completed_at + state :failed, enter: :set_completed_at + state :timed_out, enter: :set_completed_at + state :cancelling, enter: :set_cancel_started_at + state :cancelled, enter: :set_completed_at + + event :queue, guards: [:not_system_job] do + transitions from: :new, to: :queued + end + + event :work, guards: [:not_system_job] do + transitions from: :queued, to: :working + end + + event :succeed, guards: [:not_system_job] do + transitions from: :working, to: :successful + transitions from: [:cancelling, :cancelled], to: :successful + end + + event :fail, guards: [:not_system_job] do + transitions from: :working, to: :failed + transitions from: [:cancelling, :cancelled], to: :failed + end + + event :time_out, guards: [:not_system_job] do + transitions from: :working, to: :timed_out + transitions from: [:cancelling, :cancelled], to: :timed_out + end + + # https://github.com/aasm/aasm/issues/324 + # event :complete, guards: [:not_system_job] do + # transitions from: :working, to: :successful + # transitions from: :working, to: :failed + # transitions from: :working, to: :timed_out + # transitions from: [:cancelling, :cancelled], to: :successful + # transitions from: [:cancelling, :cancelled], to: :failed + # transitions from: [:cancelling, :cancelled], to: :timed_out + # end + + + event :cancel, guards: [:not_system_job] do + transitions from: :queued, to: :cancelling + end + + event :confirm_cancel, guards: [:not_system_job] do + transitions from: :cancelling, to: :cancelled + end + + event :retry, guards: [:not_system_job] do + transitions from: [:failed, :timed_out], to: :queued + transitions from: :cancelled, to: :queued + transitions from: :cancelling, to: :queued + end + end + + + # item status values - timed out is a special failure case where the worker never reports back + AVAILABLE_ITEM_STATUS_SYMBOLS = self.aasm.states.map(&:name) + AVAILABLE_ITEM_STATUS = AVAILABLE_ITEM_STATUS_SYMBOLS.map { |item| item.to_s } + + AVAILABLE_ITEM_STATUS_DISPLAY = self.aasm.states.map { |x| [x.name, x.display_name] }.to_h + + COMPLETED_ITEM_STATUS_SYMBOLS = [:successful, :failed, :timed_out, :cancelled] + FAILED_ITEM_STATUS_SYMBOLS = [:failed, :timed_out, :cancelled] + private + # + # state machine guards + # + + def not_system_job + analysis_job_id != SYSTEM_JOB_ID + end + + + # + # state machine callbacks + # + + # Enqueue payloads representing audio recordings from saved search to asynchronous processing queue. + def add_to_queue + if !queue_id.blank? && cancelling? + # cancelling items already have a valid job payload on the queue - do not add again + return + end + + payload = AnalysisJobsItem::create_action_payload(analysis_job, audio_recording) + + result = nil + error = nil + + begin + result = BawWorkers::Analysis::Action.action_enqueue(payload) + + # the assumption here is that result is a unique identifier that we can later use to interrogate the message queue + self.queue_id = result + rescue => e + error = e + # TODO: swallowing exception? seems bad + Rails.logger.error "An error occurred when enqueuing an analysis job item: #{e}" + end + + @enqueue_results = {result: result, error: error} + end + + def set_queued_at + self.queued_at = Time.zone.now + end + + def set_work_started_at + self.work_started_at = Time.zone.now + end + + def set_cancel_started_at + self.cancel_started_at = Time.zone.now + end + + def set_completed_at + self.completed_at = Time.zone.now + end + + # + # other methods + # + + def self.create_action_payload(analysis_job, audio_recording) + # common payload info + command_format = analysis_job.script.executable_command.to_s + config_string = analysis_job.custom_settings.to_s + job_id = analysis_job.id.to_i + + # get base options for analysis from the script + # Options invariant to the AnalysisJob are stuck in here, like: + # - file_executable + # - copy_paths + payload = analysis_job.script.analysis_action_params.dup + + # merge base + payload.merge({ + command_format: command_format, + + config: config_string, + job_id: job_id, + + uuid: audio_recording.uuid, + id: audio_recording.id, + datetime_with_offset: audio_recording.recorded_date.iso8601(3), + original_format: audio_recording.original_format_calculated + }) + end + def is_new_when_created - unless status == :new + unless status == :new.to_s errors.add(:status, 'must be new when first created') end end def has_queue_id_when_needed - if !(status == :new) && queue_id.blank? + if !(status == :new.to_s) && queue_id.blank? errors.add(:queue_id, 'A queue_id must be provided when status is not new') end end diff --git a/app/models/script.rb b/app/models/script.rb index ba4bc38a..e02342a5 100644 --- a/app/models/script.rb +++ b/app/models/script.rb @@ -87,10 +87,10 @@ def all_versions def self.filter_settings { valid_fields: [:id, :group_id, :name, :description, :analysis_identifier, :executable_settings_media_type, - :version, :created_at, :creator_id, :is_last_version, :is_first_version], + :version, :created_at, :creator_id, :is_last_version, :is_first_version, :analysis_action_params], render_fields: [:id, :group_id, :name, :description, :analysis_identifier, :executable_settings, :executable_settings_media_type, :version, :created_at, :creator_id], - text_fields: [:name, :description, :analysis_identifier, :executable_settings_media_type], + text_fields: [:name, :description, :analysis_identifier, :executable_settings_media_type, :analysis_action_params], custom_fields: lambda { |item, user| virtual_fields = { is_last_version: item.is_last_version?, diff --git a/app/views/analysis_jobs_mailer/analysis_job_complete_message.haml b/app/views/analysis_jobs_mailer/analysis_job_complete_message.haml new file mode 100644 index 00000000..3806e346 --- /dev/null +++ b/app/views/analysis_jobs_mailer/analysis_job_complete_message.haml @@ -0,0 +1,28 @@ +%p + Hello, + +%p + You are receiving this email because your analysis job has completed. + + +%p + Your analysis job ( + = @info[:analysis_job][:name] + ) was processed by + = Settings.organisation_names.organisation_name + \. + +%p + You can access the results of your analysis job by clicking + =link_to "here", analysis_job_url(@info[:analysis_job]) + \. + +Additional information: +%p + Date stamp: + = @info[:datestamp] +%p + Analysis Job ID: #{ @info[:analysis_job][:id] } + +Regards, +%br= Settings.organisation_names.organisation_name \ No newline at end of file diff --git a/app/views/analysis_jobs_mailer/analysis_job_new_message.haml b/app/views/analysis_jobs_mailer/analysis_job_new_message.haml new file mode 100644 index 00000000..425fce07 --- /dev/null +++ b/app/views/analysis_jobs_mailer/analysis_job_new_message.haml @@ -0,0 +1,28 @@ +%p + Hello, + +%p + You are receiving this email because you started a new analysis job. + + +%p< + Your analysis job + =@info[:analysis_job][:name] + ) will be processed by + =Settings.organisation_names.organisation_name + \. + +%p< + You can get progress updates on your analysis job by clicking + =link_to "here", analysis_job_url(@info[:analysis_job]) + \. + +Additional information: +%p< + Date stamp: + =@info[:datestamp] +%p + Analysis Job ID: #{ @info[:analysis_job][:id] } + +Regards, +%br= Settings.organisation_names.organisation_name \ No newline at end of file diff --git a/app/views/analysis_jobs_mailer/analysis_job_retry_message.haml b/app/views/analysis_jobs_mailer/analysis_job_retry_message.haml new file mode 100644 index 00000000..baa30dcc --- /dev/null +++ b/app/views/analysis_jobs_mailer/analysis_job_retry_message.haml @@ -0,0 +1,29 @@ +%p + Hello, + +%p + You are receiving this email because your analysis job has been scheduled to be retried. + Failed items in the analysis job have been restarted. + + +%p + Your analysis job ( + = @info[:analysis_job][:name] + ) was processed by + = Settings.organisation_names.organisation_name + \. + +%p + You can get progress updates on your analysis job by clicking + =link_to "here", analysis_job_url(@info[:analysis_job]) + \. + +Additional information: +%p + Date stamp: + = @info[:datestamp] +%p + Analysis Job ID: #{ @info[:analysis_job][:id] } + +Regards, +%br= Settings.organisation_names.organisation_name \ No newline at end of file diff --git a/config/environments/development.rb b/config/environments/development.rb index 81df641d..ad1a8f17 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -52,7 +52,7 @@ # config.action_controller.action_on_unpermitted_parameters property in your environment files. # If set to :log the unpermitted attributes will be logged, if set to :raise an exception will # be raised. - config.action_controller.action_on_unpermitted_parameters = :log + config.action_controller.action_on_unpermitted_parameters = :raise # Raises error for missing translations config.action_view.raise_on_missing_translations = true diff --git a/config/environments/test.rb b/config/environments/test.rb index 6aebcc5b..5e27769a 100644 --- a/config/environments/test.rb +++ b/config/environments/test.rb @@ -49,7 +49,7 @@ # config.action_controller.action_on_unpermitted_parameters property in your environment files. # If set to :log the unpermitted attributes will be logged, if set to :raise an exception will # be raised. - config.action_controller.action_on_unpermitted_parameters = :log + config.action_controller.action_on_unpermitted_parameters = :raise # Raises error for missing translations config.action_view.raise_on_missing_translations = true diff --git a/db/migrate/20160614230504_analysis_jobs_column_changes.rb b/db/migrate/20160614230504_analysis_jobs_column_changes.rb new file mode 100644 index 00000000..a0201003 --- /dev/null +++ b/db/migrate/20160614230504_analysis_jobs_column_changes.rb @@ -0,0 +1,10 @@ +class AnalysisJobsColumnChanges < ActiveRecord::Migration + def change + + # playing with the state machines, we don't want defaults set + change_column_default :analysis_jobs, :overall_status, nil + + # make use of new JSON support in rails + change_column :analysis_jobs, :overall_progress, :JSON, {cast_as: 'json'} + end +end diff --git a/db/migrate/20160712051359_add_extra_scripts_fields.rb b/db/migrate/20160712051359_add_extra_scripts_fields.rb new file mode 100644 index 00000000..dc847f8a --- /dev/null +++ b/db/migrate/20160712051359_add_extra_scripts_fields.rb @@ -0,0 +1,7 @@ +class AddExtraScriptsFields < ActiveRecord::Migration + def change + + # extra options to be passed to actions as part of hash when creating actions + add_column :scripts, :analysis_action_params, :JSON + end +end diff --git a/db/migrate/20160726014747_add_cancel_started_at_to_analysis_jobs_items.rb b/db/migrate/20160726014747_add_cancel_started_at_to_analysis_jobs_items.rb new file mode 100644 index 00000000..fceb8037 --- /dev/null +++ b/db/migrate/20160726014747_add_cancel_started_at_to_analysis_jobs_items.rb @@ -0,0 +1,5 @@ +class AddCancelStartedAtToAnalysisJobsItems < ActiveRecord::Migration + def change + add_column :analysis_jobs_items, :cancel_started_at, :datetime, null: true + end +end diff --git a/db/structure.sql b/db/structure.sql index 30836b4a..881c5b3c 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -52,9 +52,9 @@ CREATE TABLE analysis_jobs ( description text, saved_search_id integer NOT NULL, started_at timestamp without time zone, - overall_status character varying DEFAULT 'new'::character varying NOT NULL, + overall_status character varying NOT NULL, overall_status_modified_at timestamp without time zone NOT NULL, - overall_progress text NOT NULL, + overall_progress json NOT NULL, overall_progress_modified_at timestamp without time zone NOT NULL, overall_count integer NOT NULL, overall_duration_seconds numeric(14,4) NOT NULL, @@ -94,7 +94,8 @@ CREATE TABLE analysis_jobs_items ( created_at timestamp without time zone NOT NULL, queued_at timestamp without time zone, work_started_at timestamp without time zone, - completed_at timestamp without time zone + completed_at timestamp without time zone, + cancel_started_at timestamp without time zone ); @@ -474,7 +475,8 @@ CREATE TABLE scripts ( created_at timestamp without time zone NOT NULL, executable_command text NOT NULL, executable_settings text NOT NULL, - executable_settings_media_type character varying(255) DEFAULT 'text/plain'::character varying + executable_settings_media_type character varying(255) DEFAULT 'text/plain'::character varying, + analysis_action_params json ); @@ -1820,3 +1822,9 @@ INSERT INTO schema_migrations (version) VALUES ('20160226130353'); INSERT INTO schema_migrations (version) VALUES ('20160420030414'); +INSERT INTO schema_migrations (version) VALUES ('20160614230504'); + +INSERT INTO schema_migrations (version) VALUES ('20160712051359'); + +INSERT INTO schema_migrations (version) VALUES ('20160726014747'); + diff --git a/lib/modules/aasm_helpers.rb b/lib/modules/aasm_helpers.rb new file mode 100644 index 00000000..6f24956a --- /dev/null +++ b/lib/modules/aasm_helpers.rb @@ -0,0 +1,24 @@ +# TODO: I'm pretty sure there is a better way to do this? +module AASMHelpers + + def may_transition_to_state(state) + events = events_for_transition_to_state(state) + + events.size == 1 + end + + def transition_to_state(state) + events = events_for_transition_to_state(state) + + raise "Cannot transition to next state (#{events.size} possible events found)" if events.size != 1 + + self.public_send(events[0].name, state) + end + + def events_for_transition_to_state(state) + # find the event in the state machine to fire + self.aasm.events.find_all do |event| + event.transitions_to_state?(state) + end + end +end \ No newline at end of file diff --git a/lib/validators/existence_validator.rb b/lib/validators/existence_validator.rb index 635622e6..d8207fa4 100644 --- a/lib/validators/existence_validator.rb +++ b/lib/validators/existence_validator.rb @@ -1,3 +1,5 @@ +# WARNING: this validator reloads associations if they are not loaded whenever the model is saved. +# This causes very inefficient behaviour - particularly for updates. class ExistenceValidator < ActiveModel::EachValidator # Required for Rails 4.2 diff --git a/spec/acceptance/analysis_jobs_items_spec.rb b/spec/acceptance/analysis_jobs_items_spec.rb index d78136b3..c48cf79a 100644 --- a/spec/acceptance/analysis_jobs_items_spec.rb +++ b/spec/acceptance/analysis_jobs_items_spec.rb @@ -65,10 +65,9 @@ def create_root_dir(dir = 'system') let!(:harvester_token) { Creation::Common.create_user_token(harvester_user) } let(:body_attributes) { - FactoryGirl.attributes_for(:analysis_jobs_item, - analysis_job_id: analysis_job.id, - audio_recording_id: audio_recording.id - ).to_json + { + 'status': 'queued' + }.to_json } ################################ @@ -261,14 +260,14 @@ def create_root_dir(dir = 'system') }) end - patch '/analysis_jobs/:analysis_job_id/audio_recordings/:audio_recording_id' do + put '/analysis_jobs/:analysis_job_id/audio_recordings/:audio_recording_id' do analysis_jobs_items_id_param analysis_jobs_items_body_params let(:audio_recording_id) { analysis_jobs_item.audio_recording_id } let(:analysis_job_id) { analysis_job.id } let(:raw_post) { body_attributes } let(:authentication_token) { harvester_token } - standard_request_options(:patch, 'UPDATE (as harvester)', :ok, { + standard_request_options(:put, 'UPDATE (as harvester)', :ok, { expected_json_path: 'data/analysis_job_id' }) end diff --git a/spec/controllers/analysis_jobs_controller_spec.rb b/spec/controllers/analysis_jobs_controller_spec.rb deleted file mode 100644 index 289c4eca..00000000 --- a/spec/controllers/analysis_jobs_controller_spec.rb +++ /dev/null @@ -1,160 +0,0 @@ -#require 'rails_helper' -# -## This spec was generated by rspec-rails when you ran the scaffold generator. -## It demonstrates how one might use RSpec to specify the controller code that -## was generated by Rails when you ran the scaffold generator. -## -## It assumes that the implementation code is generated by the rails scaffold -## generator. If you are using any extension libraries to generate different -## controller code, this generated spec may or may not pass. -## -## It only uses APIs available in rails and/or rspec-rails. There are a number -## of tools you can use to make these specs even more expressive, but we're -## sticking to rails and rspec-rails APIs to keep things simple and stable. -## -## Compared to earlier versions of this generator, there is very limited use of -## stubs and message expectations in this spec. Stubs are only used when there -## is no simpler way to get a handle on the object needed for the example. -## Message expectations are only used when there is no simpler way to specify -## that an instance is receiving a specific message. -# -#describe JobsController do -# -# # This should return the minimal set of attributes required to create a valid -# # Job. As you add validations to Job, be sure to -# # adjust the attributes here as well. -# let(:valid_attributes) { { "name" => "MyString" } } -# -# # This should return the minimal set of values that should be in the session -# # in order to pass any filters (e.g. authentication) defined in -# # JobsController. Be sure to keep this updated too. -# let(:valid_session) { {} } -# -# describe "GET index" do -# it "assigns all jobs as @jobs" do -# job = Job.create! valid_attributes -# get :index, {}, valid_session -# assigns(:jobs).should eq([job]) -# end -# end -# -# describe "GET show" do -# it "assigns the requested job as @job" do -# job = Job.create! valid_attributes -# get :show, {:id => job.to_param}, valid_session -# assigns(:job).should eq(job) -# end -# end -# -# describe "GET new" do -# it "assigns a new job as @job" do -# get :new, {}, valid_session -# assigns(:job).should be_a_new(Job) -# end -# end -# -# describe "GET edit" do -# it "assigns the requested job as @job" do -# job = Job.create! valid_attributes -# get :edit, {:id => job.to_param}, valid_session -# assigns(:job).should eq(job) -# end -# end -# -# describe "POST create" do -# describe "with valid params" do -# it "creates a new Job" do -# expect { -# post :create, {:job => valid_attributes}, valid_session -# }.to change(Job, :count).by(1) -# end -# -# it "assigns a newly created job as @job" do -# post :create, {:job => valid_attributes}, valid_session -# assigns(:job).should be_a(Job) -# assigns(:job).should be_persisted -# end -# -# it "redirects to the created job" do -# post :create, {:job => valid_attributes}, valid_session -# response.should redirect_to(Job.last) -# end -# end -# -# describe "with invalid params" do -# it "assigns a newly created but unsaved job as @job" do -# # Trigger the behavior that occurs when invalid params are submitted -# Job.any_instance.stub(:save).and_return(false) -# post :create, {:job => { "name" => "invalid value" }}, valid_session -# assigns(:job).should be_a_new(Job) -# end -# -# it "re-renders the 'new' template" do -# # Trigger the behavior that occurs when invalid params are submitted -# Job.any_instance.stub(:save).and_return(false) -# post :create, {:job => { "name" => "invalid value" }}, valid_session -# response.should render_template("new") -# end -# end -# end -# -# describe "PUT update" do -# describe "with valid params" do -# it "updates the requested job" do -# job = Job.create! valid_attributes -# # Assuming there are no other jobs in the database, this -# # specifies that the Job created on the previous line -# # receives the :update_attributes message with whatever params are -# # submitted in the request. -# Job.any_instance.should_receive(:update_attributes).with({ "name" => "MyString" }) -# put :update, {:id => job.to_param, :job => { "name" => "MyString" }}, valid_session -# end -# -# it "assigns the requested job as @job" do -# job = Job.create! valid_attributes -# put :update, {:id => job.to_param, :job => valid_attributes}, valid_session -# assigns(:job).should eq(job) -# end -# -# it "redirects to the job" do -# job = Job.create! valid_attributes -# put :update, {:id => job.to_param, :job => valid_attributes}, valid_session -# response.should redirect_to(job) -# end -# end -# -# describe "with invalid params" do -# it "assigns the job as @job" do -# job = Job.create! valid_attributes -# # Trigger the behavior that occurs when invalid params are submitted -# Job.any_instance.stub(:save).and_return(false) -# put :update, {:id => job.to_param, :job => { "name" => "invalid value" }}, valid_session -# assigns(:job).should eq(job) -# end -# -# it "re-renders the 'edit' template" do -# job = Job.create! valid_attributes -# # Trigger the behavior that occurs when invalid params are submitted -# Job.any_instance.stub(:save).and_return(false) -# put :update, {:id => job.to_param, :job => { "name" => "invalid value" }}, valid_session -# response.should render_template("edit") -# end -# end -# end -# -# describe "DELETE destroy" do -# it "destroys the requested job" do -# job = Job.create! valid_attributes -# expect { -# delete :destroy, {:id => job.to_param}, valid_session -# }.to change(Job, :count).by(-1) -# end -# -# it "redirects to the jobs list" do -# job = Job.create! valid_attributes -# delete :destroy, {:id => job.to_param}, valid_session -# response.should redirect_to(jobs_url) -# end -# end -# -#end diff --git a/spec/factories/analysis_job_factory.rb b/spec/factories/analysis_job_factory.rb index 2d1ab10a..a417c52e 100644 --- a/spec/factories/analysis_job_factory.rb +++ b/spec/factories/analysis_job_factory.rb @@ -14,7 +14,9 @@ overall_data_length_bytes 1024 started_at { Time.zone.now } - overall_status_modified_at { Time.zone.now } + + # should be set by the workflow + #overall_status_modified_at { Time.zone.now } overall_progress_modified_at { Time.zone.now } end diff --git a/spec/factories/analysis_jobs_item_factory.rb b/spec/factories/analysis_jobs_item_factory.rb index 780bc038..837dd594 100644 --- a/spec/factories/analysis_jobs_item_factory.rb +++ b/spec/factories/analysis_jobs_item_factory.rb @@ -5,8 +5,10 @@ audio_recording queue_id { SecureRandom.uuid } - status "new" + # Handled by workflow + #status "new" + #queue_id { SecureRandom.uuid } end end \ No newline at end of file diff --git a/spec/factories/script_factory.rb b/spec/factories/script_factory.rb index 3debc205..fccabcab 100644 --- a/spec/factories/script_factory.rb +++ b/spec/factories/script_factory.rb @@ -10,6 +10,7 @@ sequence(:executable_command) { |n| "executable command #{n}"} sequence(:executable_settings) { |n| "executable settings #{n}"} sequence(:executable_settings_media_type) { |n| 'text/plain' } + sequence(:analysis_action_params) { |n| {custom_setting: n}} creator diff --git a/spec/helpers/resque_helper.rb b/spec/helpers/resque_helper.rb index 9ab76b65..6086c9f4 100644 --- a/spec/helpers/resque_helper.rb +++ b/spec/helpers/resque_helper.rb @@ -23,7 +23,8 @@ def without_forking # @param [Boolean] verbose # @param [Boolean] fork # @return [Array] worker, job -def emulate_resque_worker(queue, verbose, fork) +# @param [String] override_class - specify to switch what type actually processes the payload at the last minute. Useful for testing. +def emulate_resque_worker(queue, verbose, fork, override_class = nil) queue = queue || 'test_queue' worker = Resque::Worker.new(queue) @@ -49,9 +50,23 @@ def worker.done_working else job = worker.reserve - finished_job = worker.perform(job) - job = finished_job + + if !job.nil? + job.payload["class"] = override_class if override_class + finished_job = worker.perform(job) + job = finished_job + end end [worker, job] +end + +class FakeJob + + def self.perform(*job_args) + { + job_args: job_args, + result: Time.now + } + end end \ No newline at end of file diff --git a/spec/lib/creation.rb b/spec/lib/creation.rb index 6865ec0a..19f282d3 100644 --- a/spec/lib/creation.rb +++ b/spec/lib/creation.rb @@ -28,6 +28,22 @@ def create_entire_hierarchy prepare_analysis_jobs_item end + # create audio recordings and all parent entities + def create_audio_recordings_hierarchy + prepare_users + + prepare_project + + # available after permission system is upgraded + #prepare_permission_owner + prepare_permission_writer + prepare_permission_reader + + prepare_site + + prepare_audio_recording + end + def prepare_users let!(:admin_user) { FactoryGirl.create(:admin) } let!(:admin_token) { Common.create_user_token(admin_user) } @@ -182,8 +198,13 @@ def create_audio_event_comment(creator, audio_event) FactoryGirl.create(:comment, creator: creator, audio_event: audio_event) end - def create_saved_search(creator, project) - saved_search = FactoryGirl.create(:saved_search, creator: creator) + def create_saved_search(creator, project, stored_query = nil) + if stored_query.nil? + saved_search = FactoryGirl.create(:saved_search, creator: creator) + else + saved_search = FactoryGirl.create(:saved_search, creator: creator, stored_query: stored_query) + end + saved_search.projects << project saved_search.save! saved_search diff --git a/spec/models/analysis_job_spec.rb b/spec/models/analysis_job_spec.rb index 4b58e537..1904615f 100644 --- a/spec/models/analysis_job_spec.rb +++ b/spec/models/analysis_job_spec.rb @@ -1,5 +1,6 @@ require 'rails_helper' require 'helpers/resque_helper' +require 'aasm/rspec' describe AnalysisJob, type: :model do it 'has a valid factory' do @@ -11,9 +12,6 @@ it { is_expected.to belong_to(:updater).with_foreign_key(:updater_id) } it { is_expected.to belong_to(:deleter).with_foreign_key(:deleter_id) } - # .with_predicates(true).with_multiple(false) - it { is_expected.to enumerize(:overall_status).in(*AnalysisJob::AVAILABLE_JOB_STATUS) } - it { is_expected.to validate_presence_of(:name) } it 'is invalid without a name' do expect(build(:analysis_job, name: nil)).not_to be_valid @@ -25,10 +23,11 @@ end it 'should ensure name is unique (case-insensitive)' do create(:analysis_job, name: 'There ain\'t room enough in this town for two of us sonny!') - as2 = build(:analysis_job, name: 'THERE AIN\'T ROOM ENOUGH IN THIS TOWN FOR TWO OF US SONNY!') - expect(as2).not_to be_valid - expect(as2.valid?).to be_falsey - expect(as2.errors[:name].size).to eq(1) + aj2 = build(:analysis_job, name: 'THERE AIN\'T ROOM ENOUGH IN THIS TOWN FOR TWO OF US SONNY!') + + expect(aj2).not_to be_valid + expect(aj2.valid?).to be_falsey + expect(aj2.errors[:name].size).to eq(1) end it 'fails validation when script is nil' do @@ -55,66 +54,77 @@ expect(build(:analysis_job, saved_search: nil)).not_to be_valid end - context 'job items' do - it 'extracts the correct payloads' do - project_1 = create(:project) - user = project_1.creator - site_1 = create(:site, projects: [project_1], creator: user) - create(:audio_recording, site: site_1, creator: user, uploader: user) + describe 'state machine' do + let(:analysis_job) { + create(:analysis_job) + } - project_2 = create(:project, creator: user) - site_2 = create(:site, projects: [project_2], creator: user) - audio_recording_2 = create(:audio_recording, site: site_2, creator: user, uploader: user) + it 'defines the initialize_workflow event (allows before_save->new)' do + analysis_job = build(:analysis_job, overall_status_modified_at: nil) + + expect(analysis_job).to transition_from(:before_save).to(:new).on_event(:initialize_workflow) + end - ss = create(:saved_search, creator: user, stored_query: {id: {in: [audio_recording_2.id]}}) - s = create(:script, creator: user, verified: true) + it 'can\'t transition to new twice' do + analysis_job = build(:analysis_job, overall_status_modified_at: nil) - aj = create(:analysis_job, creator: user, script: s, saved_search: ss) + analysis_job.initialize_workflow - payload = aj.create_payload(audio_recording_2) - result = aj.begin_work(user) - - # ensure result is as expected - expect(result.size).to eq(1) - expect(result[0].is_a?(Hash)).to be_truthy - expect(result[0][:payload][:command_format]).to eq(aj.script.executable_command) - expect(result[0][:error]).to be_blank - expect(result[0][:payload]).to eq(payload) - expect(result[0][:result].is_a?(String)).to be_truthy - expect(result[0][:result].size).to eq(32) + expect(analysis_job).to_not allow_event(:initialize_workflow) end - it 'enqueues and processes payloads' do - project_1 = create(:project) - user = project_1.creator - site_1 = create(:site, projects: [project_1], creator: user) + it 'calls initialize_workflow when created' do + analysis_job = build(:analysis_job) - create(:audio_recording, site: site_1, creator: user, uploader: user) + allow(analysis_job).to receive(:save!).and_call_original + allow(analysis_job).to receive(:initialize_workflow).and_call_original - project_2 = create(:project, creator: user) - site_2 = create(:site, projects: [project_2], creator: user) - audio_recording_2 = create(:audio_recording, site: site_2, creator: user, uploader: user) + analysis_job.save! - ss = create(:saved_search, creator: user, stored_query: {id: {in: [audio_recording_2.id]}}) - s = create(:script, creator: user, verified: true) + expect(analysis_job).to have_received(:save!) + expect(analysis_job).to have_received(:initialize_workflow).once + end - aj = create(:analysis_job, creator: user, script: s, saved_search: ss) + it 'defines the prepare event' do + allow(analysis_job).to receive(:process!).and_return(nil) - result = aj.begin_work(user) + expect(analysis_job).to transition_from(:new).to(:preparing).on_event(:prepare) + end - queue_name = Settings.actions.analysis.queue + it 'defines the process event' do + allow(analysis_job).to receive(:all_job_items_completed?).and_return(false) + expect(analysis_job).to transition_from(:preparing).to(:processing).on_event(:process) + end - expect(Resque.size(queue_name)).to eq(1) - worker, job = emulate_resque_worker(queue_name, false, true) - expect(Resque.size(queue_name)).to eq(0) + it 'defines the process event - and complete if all items are done!' do + allow(analysis_job).to receive(:all_job_items_completed?).and_return(true) + expect(analysis_job).to transition_from(:preparing).to(:completed).on_event(:process) + end + + it 'defines the suspend event' do + expect(analysis_job).to transition_from(:processing).to(:suspended).on_event(:suspend) + end - #expect(BawWorkers::ResqueApi.jobs.inspect).to eq(1) + it 'defines the resume event' do + expect(analysis_job).to transition_from(:suspended).to(:processing).on_event(:resume) + end + it 'defines the complete event' do + expect(analysis_job).to transition_from(:processing).to(:completed).on_event(:complete) + end + it 'defines the retry event - which does not work if all items successful' do + allow(analysis_job).to receive(:are_any_job_items_failed?).and_return(false) + allow(analysis_job).to receive(:retry_job).and_return(nil) + expect(analysis_job).not_to allow_event(:retry) end + it 'defines the retry event' do + allow(analysis_job).to receive(:are_any_job_items_failed?).and_return(true) + expect(analysis_job).to transition_from(:completed).to(:processing).on_event(:retry) + end end end \ No newline at end of file diff --git a/spec/models/analysis_jobs_item_spec.rb b/spec/models/analysis_jobs_item_spec.rb index 0b0cc200..3a6cc183 100644 --- a/spec/models/analysis_jobs_item_spec.rb +++ b/spec/models/analysis_jobs_item_spec.rb @@ -1,5 +1,6 @@ require 'rails_helper' require 'helpers/resque_helper' +require 'aasm/rspec' describe AnalysisJobsItem, type: :model do let!(:analysis_jobs_item) { create(:analysis_jobs_item) } @@ -36,11 +37,6 @@ it { should validate_uniqueness_of(:queue_id) } - it { - is_expected.to enumerize(:status) - .in(*AnalysisJobsItem::AVAILABLE_ITEM_STATUS_SYMBOLS) - .with_default(:new) - } it 'does not allow dates greater than now for created_at' do analysis_jobs_item.created_at = Time.zone.now + 1.day @@ -62,99 +58,50 @@ expect(analysis_jobs_item).not_to be_valid end - describe 'analysis_jobs_item state transitions' do - [ - # old, new, []shouldPass|shouldPassAndUpdateField] - [:new, :new, true], - [:new, :queued, :queued_at], - [:new, :working, false], - [:new, :successful, false], - [:new, :failed, false], - [:new, :timed_out, false], - [:new, :cancelled, :completed_at], - [:new, nil, false], - [:queued, :new, false], - [:queued, :queued, true], - [:queued, :working, :work_started_at], - [:queued, :successful, false], - [:queued, :failed, false], - [:queued, :timed_out, false], - [:queued, :cancelled, :completed_at], - [:queued, nil, false], - [:working, :new, false], - [:working, :queued, false], - [:working, :working, true], - [:working, :successful, :completed_at], - [:working, :failed, :completed_at], - [:working, :timed_out, :completed_at], - [:working, :cancelled, :completed_at], - [:working, nil, false], - [:successful, :new, false], - [:successful, :queued, false], - [:successful, :working, false], - [:successful, :successful, true], - [:successful, :failed, false], - [:successful, :timed_out, false], - [:successful, :cancelled, false], - [:successful, nil, false], - [:failed, :new, false], - [:failed, :queued, false], - [:failed, :working, false], - [:failed, :successful, false], - [:failed, :failed, true], - [:failed, :timed_out, false], - [:failed, :cancelled, false], - [:failed, nil, false], - [:timed_out, :new, false], - [:timed_out, :queued, false], - [:timed_out, :working, false], - [:timed_out, :successful, false], - [:timed_out, :failed, false], - [:timed_out, :timed_out, true], - [:timed_out, :cancelled, false], - [:timed_out, nil, false], - [:cancelled, :new, false], - [:cancelled, :queued, false], - [:cancelled, :working, false], - [:cancelled, :successful, false], - [:cancelled, :failed, false], - [:cancelled, :timed_out, false], - [:cancelled, :cancelled, true], - [:cancelled, nil, false], - [nil, :new, false], - [nil, :queued, false], - [nil, :working, false], - [nil, :successful, false], - [nil, :failed, false], - [nil, :timed_out, false], - [nil, :cancelled, false], - # if all the other combinations hold true this case will never happen anyway. - # note: nil is a valid value for system queries, but we should never be able to - # transition the model to nil (other cases). - [nil, nil, true] - ].each do |test_case| - - it "tests state transition #{ test_case[0].to_s }→#{ test_case[1].to_s }" do - - analysis_jobs_item.write_attribute(:status, test_case[0]) - - date_field = test_case[2] - if date_field - if date_field.is_a? Symbol - first_date = analysis_jobs_item[date_field] - end - - analysis_jobs_item.status = test_case[1] - - expect(analysis_jobs_item.status == test_case[1]).to be true - expect(analysis_jobs_item[date_field]).to_not eq(first_date) if date_field.is_a? Symbol - else - expect { - analysis_jobs_item.status = test_case[1] - }.to raise_error - - end - end + describe 'state machine' do + let(:analysis_jobs_item) { + create(:analysis_jobs_item) + } + + it 'defines the queue event' do + expect(analysis_jobs_item).to transition_from(:new).to(:queued).on_event(:queue) + end + + it 'defines the work event' do + expect(analysis_jobs_item).to transition_from(:queued).to(:working).on_event(:work) + end + + it 'defines the succeed event' do + expect(analysis_jobs_item).to transition_from(:working).to(:successful).on_event(:succeed) + expect(analysis_jobs_item).to transition_from(:cancelling).to(:successful).on_event(:succeed) + expect(analysis_jobs_item).to transition_from(:cancelled).to(:successful).on_event(:succeed) + end + + it 'defines the fail event' do + expect(analysis_jobs_item).to transition_from(:working).to(:failed).on_event(:fail) + expect(analysis_jobs_item).to transition_from(:cancelling).to(:failed).on_event(:fail) + expect(analysis_jobs_item).to transition_from(:cancelled).to(:failed).on_event(:fail) + end + + it 'defines the time_out event' do + expect(analysis_jobs_item).to transition_from(:working).to(:timed_out).on_event(:time_out) + expect(analysis_jobs_item).to transition_from(:cancelling).to(:timed_out).on_event(:time_out) + expect(analysis_jobs_item).to transition_from(:cancelled).to(:timed_out).on_event(:time_out) + end + + it 'defines the cancel event' do + expect(analysis_jobs_item).to transition_from(:queued).to(:cancelling).on_event(:cancel) + end + + it 'defines the confirm_cancel event' do + expect(analysis_jobs_item).to transition_from(:cancelling).to(:cancelled).on_event(:confirm_cancel) + end + + it 'defines the retry event' do + expect(analysis_jobs_item).to transition_from(:failed).to(:queued).on_event(:retry) + expect(analysis_jobs_item).to transition_from(:timed_out).to(:queued).on_event(:retry) + expect(analysis_jobs_item).to transition_from(:cancelling).to(:queued).on_event(:retry) + expect(analysis_jobs_item).to transition_from(:cancelled).to(:queued).on_event(:retry) end end @@ -293,4 +240,35 @@ end end end + + context 'job items' do + + it 'extracts the correct payloads' do + project_1 = create(:project) + user = project_1.creator + site_1 = create(:site, projects: [project_1], creator: user) + + create(:audio_recording, site: site_1, creator: user, uploader: user) + + project_2 = create(:project, creator: user) + site_2 = create(:site, projects: [project_2], creator: user) + audio_recording_2 = create(:audio_recording, site: site_2, creator: user, uploader: user) + + ss = create(:saved_search, creator: user, stored_query: {id: {in: [audio_recording_2.id]}}) + s = create(:script, creator: user, verified: true) + + aj = create(:analysis_job, creator: user, script: s, saved_search: ss) + + payload = AnalysisJobsItem.create_action_payload(aj, audio_recording_2) + + # ensure result is as expected + expect(payload.is_a?(Hash)).to be_truthy + expect(payload[:command_format]).to eq(aj.script.executable_command) + expect(payload[:uuid]).to eq(audio_recording_2.uuid) + expect(payload[:id]).to eq(audio_recording_2.id) + expect(payload[:datetime_with_offset]).to eq(audio_recording_2.recorded_date.iso8601(3)) + expect(payload[:job_id]).to eq(aj.id) + end + + end end \ No newline at end of file diff --git a/spec/requests/analysis_jobs_spec.rb b/spec/requests/analysis_jobs_spec.rb new file mode 100644 index 00000000..7f7dd860 --- /dev/null +++ b/spec/requests/analysis_jobs_spec.rb @@ -0,0 +1,1255 @@ +require 'rails_helper' +require 'helpers/resque_helper' +require 'rspec/mocks' + +# +# Integration testing of AnalysisJobsController controller. +# API level tests should be put in acceptance/analysis_jobs_spec.rb +# + + +describe "Analysis Jobs" do + + create_audio_recordings_hierarchy + + # https://github.com/aasm/aasm#testing + + # create another 5 recordings + let!(:other_recordings) { + [ + Creation::Common.create_audio_recording(writer_user, writer_user, site), + Creation::Common.create_audio_recording(writer_user, writer_user, site), + Creation::Common.create_audio_recording(writer_user, writer_user, site), + Creation::Common.create_audio_recording(writer_user, writer_user, site), + Creation::Common.create_audio_recording(writer_user, writer_user, site) + ] + } + + let!(:saved_search) { + Creation::Common.create_saved_search(writer_user, project, { + 'site_id': {'eq': site.id} + }) + } + + let!(:script) { + FactoryGirl.create( + :script, + creator: admin_user, + executable_command: 'echo "<{file_executable}>" audio2csv /source:"<{file_source}>" /config:"<{file_config}>" /tempdir:"<{dir_temp}>" /output:"<{dir_output}>"', + analysis_action_params: { + file_executable: './AnalysisPrograms/AnalysisPrograms.exe', + copy_paths: [ + './programs/AnalysisPrograms/Logs/log.txt' + ] + }) + } + + before(:all) do + @queue_name = Settings.actions.analysis.queue + + # process one fake job so that the queue exists! + options = { + queue: @queue_name, + verbose: true, + fork: false + } + @worker, job = emulate_resque_worker_with_job(FakeJob, {an_argument: 3}, options) + end + + before(:each) do + @env ||= {} + @env['HTTP_AUTHORIZATION'] = writer_token + + @analysis_job_url = "/analysis_jobs" + end + + def get_queue_count + Resque.size(@queue_name) + end + + def get_failed_queue_count + Resque::Failure.count + end + + def get_items_count(status = nil) + predicates = {analysis_job_id: @analysis_job.id} + predicates[:status] = status if status + AnalysisJobsItem.where(predicates).count + end + + def create_analysis_job + + valid_attributes = { + analysis_job: { + script_id: script.id, + saved_search_id: saved_search.id, + name: 'Analysis Job #' + Time.now.to_s, + custom_settings: script.executable_settings, + description: 'Description...' + } + } + + post @analysis_job_url, valid_attributes, @env + + if response.status != 201 + fail "Failed to create test AnalysisJob, status: #{response.status}" + end + + body = JSON.parse(response.body) + item = AnalysisJob.find(body['data']['id']) + + item + end + + def create_analysis_job_direct + analysis_job = AnalysisJob.new + analysis_job.script_id = script.id + analysis_job.saved_search_id = saved_search.id + analysis_job.name = 'Analysis Job #' + Time.now.to_s + analysis_job.custom_settings = script.executable_settings + analysis_job.description = 'Description...' + analysis_job.creator = writer_user + + analysis_job.save! + + analysis_job + end + + def update_analysis_job(analysis_job_id, overall_status) + update_route = "/analysis_jobs/#{analysis_job_id}" + + @env['HTTP_AUTHORIZATION'] = writer_token + + put update_route, { + analysis_job: { + overall_status: overall_status + } + }, @env + + if response.status != 200 + fail "Failed to update AnalysisJob, status: #{response.status}" + end + end + + def destroy_analysis_job(analysis_job_id) + destroy_route = "/analysis_jobs/#{analysis_job_id}" + + @env['HTTP_AUTHORIZATION'] = writer_token + + delete destroy_route, {}, @env + + [response.status, response] + end + + def test_stats(analysis_job, expected = {}) + opts = { + overall_count: 0, + overall_duration_seconds: 0, + overall_data_length_bytes: 0, + overall_progress: { + queued: 0, + working: 0, + successful: 0, + failed: 0, + total: 0, + cancelled: 0, + cancelling: 0, + new: 0, + timed_out: 0 + } + } + + expected = opts.deep_merge(expected) + + expected.each do |key, value| + + value = value.stringify_keys if value.is_a?(Hash) + + actual = analysis_job[key] + expect(actual).to eq(value), "expected #{actual} for key #{key}, got #{value}" + end + end + + def test_stats_for(analysis_job, expected, n) + opts = { + overall_count: n, + overall_duration_seconds: (n * 60000), + overall_data_length_bytes: (n * 3800), + + } + + expected = opts.deep_merge(expected) + test_stats(analysis_job, expected) + end + + def get_analysis_job_item(analysis_job_id, audio_recording_id) + route = "/analysis_jobs/#{analysis_job_id}/audio_recordings/#{audio_recording_id}" + + # only the harvester user can update job items! + @env['HTTP_AUTHORIZATION'] = harvester_token + + get route, {}, @env + + if response.status != 200 + fail RuntimeError, "Failed to get AnalysisJobItem, status: #{response.status}" + end + + JSON.parse(response.body) + end + + def update_analysis_job_item(analysis_job_id, audio_recording_id, status) + update_route = "/analysis_jobs/#{analysis_job_id}/audio_recordings/#{audio_recording_id}" + + # only the harvester user can update job items! + @env['HTTP_AUTHORIZATION'] = harvester_token + + put update_route, { + analysis_jobs_item: { + status: status + } + }, @env + + if response.status != 200 + fail RuntimeError, "Failed to update AnalysisJobItem, status: #{response.status}" + end + end + + class FakeAnalysisJob + include ActionDispatch::Integration::RequestHelpers + + # super hacky way to inject test-context into this class + class_attribute :test_instance + + def self.perform(resque_id, params) + analysis_params = params['analysis_params'] + mock_result = analysis_params['mock_result'] || 'successful' + skip_completion = analysis_params['skip_completion'] == true + good_cancel_behaviour = analysis_params['good_cancel_behaviour'] == true + + analysis_job_id = analysis_params['job_id'] + audio_recording_id = analysis_params['id'] + + # simulate the working call + if good_cancel_behaviour + status = test_instance.get_analysis_job_item(analysis_job_id, audio_recording_id)['data']['status'] + if status == "cancelling" + test_instance.update_analysis_job_item(analysis_job_id, audio_recording_id, 'cancelled') + return + else + test_instance.update_analysis_job_item(analysis_job_id, audio_recording_id, 'working') + end + else + test_instance.update_analysis_job_item(analysis_job_id, audio_recording_id, 'working') + end + + # do some work + work = { + resque_id: resque_id, + params: params, + result: Time.now + } + + unless skip_completion + # simulate the complete call + test_instance.update_analysis_job_item(analysis_job_id, audio_recording_id, mock_result) + + if mock_result == 'failed' + fail 'Fake analysis job failing on purpose' + end + end + end + + def self.on_failure(*args) + test_instance.job_perform_failure = args + end + end + + attr_writer :job_perform_failure + + def perform_job() + @job_perform_failure = nil + + # execute the next available job + FakeAnalysisJob.test_instance = self + emulate_resque_worker(@queue_name, true, false, "FakeAnalysisJob") + + if @job_perform_failure && @job_perform_failure[0].message != 'Fake analysis job failing on purpose' + + fail 'job perform failed: ' + @job_perform_failure.to_s + end + end + + describe 'Creating an analysis job' do + + describe 'status: "new"' do + + before(:each) do + + # stub the prepare_job method so that it is a no-op + # so we can test :new in isolation + allow_any_instance_of(AnalysisJob).to receive(:prepare!).and_return([]) + allow_any_instance_of(AnalysisJob).to receive(:prepare).and_return([]) + + @analysis_job = create_analysis_job + end + + it 'should be :new after just being created' do + expect(@analysis_job.new?).to be_truthy + end + + it 'has the correct progress statistics' do + test_stats(@analysis_job) + end + + it 'ensures no new jobs exist in the message queue' do + expect(get_queue_count).to eq(0) + expect(get_failed_queue_count).to eq(0) + end + + it 'ensures no new AnalysisJobsItems exist' do + expect(get_items_count).to eq(0) + end + + end + + describe 'status: "preparing"' do + + + before(:each) do + ActionMailer::Base.deliveries.clear + + # stub batch-size so we so batches that are relevant to our test case. + allow(AnalysisJob).to receive(:batch_size).and_return(2) + + # don't allow automatic transition to :processing state + allow_any_instance_of(AnalysisJob).to receive(:process!).and_wrap_original do |m, *args| + m.receiver.send(:update_job_progress) + m.receiver.save! + end + + @analysis_job = create_analysis_job + end + + it 'emails the user when the job is starting to prepare' do + mail = ActionMailer::Base.deliveries.last + + expect(mail['to'].to_s).to include(writer_user.email) + expect(mail['subject'].value).to include(@analysis_job.name) + expect(mail['subject'].value).to include('New analysis job') + expect(mail.body.raw_source).to include('ID: ' + @analysis_job.id.to_s) + expect(mail.body.raw_source).to include('localhost:3000/analysis_jobs/' + @analysis_job.id.to_s) + end + + + it 'should be :preparing when preparing' do + expect(@analysis_job.preparing?).to be_truthy + end + + it 'has the correct progress statistics' do + test_stats_for(@analysis_job, { + overall_progress: { + queued: 6, + working: 0, + successful: 0, + failed: 0, + total: 6 + } + }, 6) + end + + it 'ensures new job items exist in the message queue' do + expect(get_queue_count).to eq(6) + expect(get_failed_queue_count).to eq(0) + + end + + it 'ensures new AnalysisJobsItems exist' do + expect(get_items_count).to eq(6) + end + + end + + describe 'distributed patterns, "preparing" -> "completed"' do + + it 'allows for some jobs items to have be started while still preparing' do + + allow(AnalysisJob).to receive(:batch_size).and_return(2) + + @analysis_job = create_analysis_job_direct + + # hook in and run one job for every batch + # we're simulating a distributed system here + # for each two recordings added, process one afterwards + # i.e. prepare 1,2; run 1; prepare 3,4; run 2; prepare 5,6; run 3; + allow(@analysis_job).to receive(:prepare_analysis_job_item).and_wrap_original do |m, *args| + result = m.call(*args) + + # what ever is next on the queue ( but just 1) + perform_job + + result + end + + # actually kick off the prepare + @analysis_job.prepare! + + expect(get_queue_count).to eq(3) + expect(get_failed_queue_count).to eq(0) + + expect(get_items_count).to eq(6) + + test_stats_for(@analysis_job, { + overall_progress: { + queued: 3, + working: 0, + successful: 3, + failed: 0, + total: 6 + } + }, 6) + + expect(@analysis_job.processing?).to be_truthy + end + + + it 'allows for all jobs items to have complete while still preparing - and to skip `processing` completely' do + + allow(AnalysisJob).to receive(:batch_size).and_return(2) + + @analysis_job = create_analysis_job_direct + + # hook in and run one job for every batch + # we're simulating a distributed system here + # for each two recordings added, process one afterwards + # i.e. prepare 1,2; run 1; prepare 3,4; run 2; prepare 5,6; run 3; + allow(@analysis_job).to receive(:prepare_analysis_job_item).and_wrap_original do |m, *args| + result = m.call(*args) + + # what ever is next on the queue - two should have just been added, so do two + perform_job + perform_job + + result + end + + # actually kick off the prepare + @analysis_job.prepare! + + expect(get_queue_count).to eq(0) + expect(get_failed_queue_count).to eq(0) + + expect(get_items_count).to eq(6) + + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + working: 0, + successful: 6, + failed: 0, + total: 6 + } + }, 6) + + expect(@analysis_job.completed?).to be_truthy + end + + end + + describe 'status: "processing"' do + + before(:each) do + ActionMailer::Base.deliveries.clear + + # here we simulate a system with a variety of sub-statuses + states = [ + [true, nil], # 'working' + [false, 'successful'], + [false, 'timed_out'], + [false, 'failed'], + # the following two aren't run + [false, 'successful'], + [true, nil] + ] + + allow(AnalysisJobsItem).to receive(:create_action_payload).and_wrap_original do |m, *args| + result = m.call(*args) + + mock_behaviour = states.shift + + # augment the hash + result[:skip_completion] = mock_behaviour[0] + result[:mock_result] = mock_behaviour[1] + + result + end + + @analysis_job = create_analysis_job + + # pause time + #Timecop.freeze + + + # start 4/6 of actions + (1..4).each { perform_job } + end + + it 'should be :processing when processing' do + expect(@analysis_job.processing?).to be_truthy + end + + it 'ensures some all jobs exist in the message queue' do + expect(get_queue_count).to eq(2) + expect(get_failed_queue_count).to eq(1) + + end + + it 'ensures all AnalysisJobsItems exist' do + expect(get_items_count).to eq(6) + end + + it 'correctly reports progress statistics' do + # it 'allows for jobs items to have all different states' + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 2, + working: 1, + successful: 1, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + + + end + + it 'invalidates cached progress statistics every whenever an analysis_job_item is updated' do + updated_at = @analysis_job.updated_at + + # stats shouldn't have changed + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 2, + working: 1, + successful: 1, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + expect(@analysis_job.updated_at).to eq(updated_at) + + # complete a job to finish + perform_job + + # reload to see the change + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 1, + working: 1, + successful: 2, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + expect(@analysis_job.updated_at).to eq(updated_at) + + + # start another job - do not complete (see `states` in before(:each)) + perform_job + + # reload to see the change + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + working: 2, + successful: 2, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + expect(@analysis_job.updated_at).to eq(updated_at) + end + + end + + describe 'status: "suspended"' do + + before(:each) do + ActionMailer::Base.deliveries.clear + + # here we simulate a system with a variety of sub-statuses + # [skip_completion, mock_result, good_cancel_behaviour] + states = [ + [true, nil], # 'working' + [false, 'successful'], + [false, 'timed_out'], + [false, 'failed'], + # the following two aren't run + [false, 'successful', true], + [true, 'successful'] + ] + + @working_audio_recording_id + + allow(AnalysisJobsItem).to receive(:create_action_payload).and_wrap_original do |m, *args| + result = m.call(*args) + + mock_behaviour = states.shift + + # augment the hash + result[:skip_completion] = mock_behaviour[0] + result[:mock_result] = mock_behaviour[1] + result[:good_cancel_behaviour] = mock_behaviour[2] || false + + @working_audio_recording_id = result[:id] if @working_audio_recording_id.nil? + + result + end + + @analysis_job = create_analysis_job + + + # start 4/6 of actions + (1..4).each { perform_job } + + expect(get_queue_count).to eq(2) + expect(get_failed_queue_count).to eq(1) + expect(get_items_count).to eq(6) + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 2, + working: 1, + successful: 1, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + + update_analysis_job(@analysis_job.id, 'suspended') + @analysis_job.reload + end + + + it 'should be :suspended when suspended' do + expect(@analysis_job.suspended?).to be_truthy + end + + it 'DOES NOTHING to the items in to the message queue' do + expect(get_queue_count).to eq(2) + expect(get_failed_queue_count).to eq(1) + end + + it 'ensures all AnalysisJobsItems that are :queued are reset to :cancelling' do + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 0, + cancelling: 2, + working: 1, + successful: 1, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + + expect(get_items_count).to eq(6) + expect(get_items_count('cancelling')).to eq(2) + end + + it 'will let job items finish (i.e. race conditions)' do + update_analysis_job_item(@analysis_job.id, @working_audio_recording_id, 'successful') + + + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 0, + cancelling: 2, + working: 0, + successful: 2, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + end + + it 'It confirms cancellation when the job tries to run (:cancelling --> :cancelled)' do + perform_job + + expect { + perform_job + }.to raise_error(RuntimeError, /Failed to update AnalysisJobItem, status: 422/) + + # no more jobs are left, this should be a no-op + perform_job + + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 2, + cancelling: 0, + working: 1, + successful: 1, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + end + + it 'can not resume when items are still queued' do + item = AnalysisJobsItem.first + # skip callbacks, validations, ActiveModelBase, etc... + item.update_column(:status, 'queued') + + @analysis_job.reload + + expect { + @analysis_job.resume + }.to raise_error(AASM::InvalidTransition) + end + + describe 'status: "suspended"→"processing"' do + + before(:each) do + # reset our mocks, or else create_payload will be mocked again! + RSpec::Mocks.space.proxy_for(AnalysisJobsItem).reset + + expect(get_items_count).to eq(6) + expect(get_items_count('cancelling')).to eq(2) + + @old_items = AnalysisJobsItem.where(status: 'cancelling').to_a + + # perform one outlying job (--> :cancelled), leave one :cancelling + perform_job + + expect(get_queue_count).to eq(1) + expect(get_failed_queue_count).to eq(1) + + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 1, + cancelling: 1, + working: 1, + successful: 1, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + + # resume job + update_analysis_job(@analysis_job.id, 'processing') + + @analysis_job.reload + end + + it 'should be :processing when processing' do + expect(@analysis_job.processing?).to be_truthy + end + + it 'ADDs all :cancelled jobs items in to the message queue' do + expect(get_queue_count).to eq(2) + expect(get_failed_queue_count).to eq(1) + end + + it 'ensures all AnalysisJobsItems that are :cancelled are reset to :queued (and get updated job ids)' do + expect(get_items_count).to eq(6) + expect(get_items_count('cancelled')).to eq(0) + expect(get_items_count('cancelling')).to eq(0) + expect(get_items_count('queued')).to eq(2) + + + # expect first item (which was cancelled to get a new queue_id) + expect(@old_items[0].queue_id).to_not eq(AnalysisJobsItem.find(@old_items[0].id).queue_id) + + # expect second one to have its old id + expect(@old_items[1].queue_id).to eq(AnalysisJobsItem.find(@old_items[1].id).queue_id) + end + + it 'has the correct progress statistics' do + test_stats_for(@analysis_job, { + overall_progress: { + queued: 2, + working: 1, + successful: 1, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + end + + end + + end + + describe 'status: "completed"' do + before(:each) do + ActionMailer::Base.deliveries.clear + + # here we simulate a system with a variety of sub-statuses + # [skip_completion, mock_result, good_cancel_behaviour] + states = [ + [false, 'successful'], + [false, 'successful'], + [false, 'timed_out'], + [false, 'failed'], + [false, 'successful'], + [false, 'successful'] + ] + + allow(AnalysisJobsItem).to receive(:create_action_payload).and_wrap_original do |m, *args| + result = m.call(*args) + + mock_behaviour = states.shift + + # augment the hash + result[:skip_completion] = mock_behaviour[0] + result[:mock_result] = mock_behaviour[1] + + result + end + + @analysis_job = create_analysis_job + + + # start all actions - and complete the job! + (1..6).each { perform_job } + + @analysis_job.reload + end + + + it 'should be :completed when completed' do + expect(@analysis_job.completed?).to be_truthy + end + + it 'ensures NO jobs exist in the message queue' do + expect(get_queue_count).to eq(0) + expect(get_failed_queue_count).to eq(1) + end + + it 'ensures all AnalysisJobsItems have one of the completed statuses' do + expect(get_items_count).to eq(6) + expect(AnalysisJobsItem.completed_for_analysis_job(@analysis_job.id).count).to eq(6) + end + + it 'correctly updates progress statistics' do + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 0, + cancelling: 0, + working: 0, + successful: 4, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + end + + it 'emails the user when the job is complete' do + mail = ActionMailer::Base.deliveries.last + + expect(mail['to'].to_s).to include(writer_user.email) + expect(mail['subject'].value).to include(@analysis_job.name) + expect(mail['subject'].value).to include('Completed analysis job') + expect(mail.body.raw_source).to include('ID: ' + @analysis_job.id.to_s) + expect(mail.body.raw_source).to include('localhost:3000/analysis_jobs/' + @analysis_job.id.to_s) + end + + describe 'retrying failed items, status: "completed"→"processing"' do + + before(:each) do + ActionMailer::Base.deliveries.clear + + # reset our mocks, or else create_payload will be mocked again! + RSpec::Mocks.space.proxy_for(AnalysisJobsItem).reset + + # fake a cancelled item + item = AnalysisJobsItem.for_analysis_job(@analysis_job.id).where(status: 'successful').first + item.update_column(:status, 'cancelled') + @analysis_job.check_progress + + @old_items = AnalysisJobsItem.where(status: ['timed_out', 'failed', 'cancelled']).to_a + + expect(get_queue_count).to eq(0) + expect(get_failed_queue_count).to eq(1) + + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 1, + cancelling: 0, + working: 0, + successful: 3, + failed: 1, + timed_out: 1, + total: 6 + } + }, 6) + + # resume job + update_analysis_job(@analysis_job.id, 'processing') + + @analysis_job.reload + end + + it 'should be :processing when processing' do + expect(@analysis_job.processing?).to be_truthy + end + + it 'ADDs all :cancelled jobs items in to the message queue' do + expect(get_queue_count).to eq(3) + expect(get_failed_queue_count).to eq(1) + end + + it 'ensures all AnalysisJobsItems that are failed are reset to :queued (and get updated job ids)' do + expect(get_items_count).to eq(6) + expect(get_items_count('cancelled')).to eq(0) + expect(get_items_count('cancelling')).to eq(0) + expect(get_items_count('queued')).to eq(3) + + @old_items.each do |item| + expect(item.queue_id).to_not eq(AnalysisJobsItem.find(item.id).queue_id) + end + end + + it 'has the correct progress statistics' do + test_stats_for(@analysis_job, { + overall_progress: { + queued: 3, + working: 0, + successful: 3, + failed: 0, + timed_out: 0, + total: 6 + } + }, 6) + end + + it 'emails the user when the job has been retried' do + mail = ActionMailer::Base.deliveries.last + + expect(mail['to'].to_s).to include(writer_user.email) + expect(mail['subject'].value).to include(@analysis_job.name) + expect(mail['subject'].value).to include('Retrying analysis job') + expect(mail.body.raw_source).to include('ID: ' + @analysis_job.id.to_s) + expect(mail.body.raw_source).to include('localhost:3000/analysis_jobs/' + @analysis_job.id.to_s) + end + + it 'can still complete!' do + perform_job + perform_job + perform_job + + @analysis_job.reload + expect(@analysis_job.completed?).to be_truthy + expect(get_queue_count).to eq(0) + expect(get_failed_queue_count).to eq(1) + + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + working: 0, + successful: 6, + failed: 0, + timed_out: 0, + total: 6 + } + }, 6) + + mail = ActionMailer::Base.deliveries.last + + expect(mail['to'].to_s).to include(writer_user.email) + expect(mail['subject'].value).to include(@analysis_job.name) + expect(mail['subject'].value).to include('Completed analysis job') + expect(mail.body.raw_source).to include('ID: ' + @analysis_job.id.to_s) + expect(mail.body.raw_source).to include('localhost:3000/analysis_jobs/' + @analysis_job.id.to_s) + end + + end + + end + + + end + + describe 'Deleting an analysis job' do + + describe 'status: "new"' do + + before(:each) do + @analysis_job = create_analysis_job_direct + + expect(@analysis_job.new?).to be_truthy + end + + it 'cannot be deleted while new' do + status_code, response = destroy_analysis_job(@analysis_job.id) + + expect(status_code).to be(409) + expect(response.body).to include('Cannot be deleted while `overall_status` is `new`') + end + + end + + describe 'status: "preparing"' do + + before(:each) do + @analysis_job = create_analysis_job_direct + + # don't allow automatic transition to :processing state + allow_any_instance_of(AnalysisJob).to receive(:process!).and_wrap_original do |m, *args| + m.receiver.send(:update_job_progress) + m.receiver.save! + end + + @analysis_job.prepare! + + expect(@analysis_job.preparing?).to be_truthy + end + + it 'cannot be deleted while preparing' do + status_code, response = destroy_analysis_job(@analysis_job.id) + + expect(status_code).to be(409) + expect(response.body).to include('Cannot be deleted while `overall_status` is `preparing`') + end + + end + + describe 'status: "processing"' do + + before(:each) do + @analysis_job = create_analysis_job + + expect(@analysis_job.processing?).to be_truthy + + status_code, response = destroy_analysis_job(@analysis_job.id) + expect(status_code).to eq(204) + + @analysis_job.reload + end + + it 'should be :suspended when deleted' do + expect(@analysis_job.suspended?).to be_truthy + end + + end + + describe 'status: "suspended"' do + before(:each) do + ActionMailer::Base.deliveries.clear + + @analysis_job = create_analysis_job + + expect(@analysis_job.processing?).to be_truthy + + status_code, response = destroy_analysis_job(@analysis_job.id) + expect(status_code).to eq(204) + + @analysis_job.reload + end + + it 'should be :suspended when deleted' do + expect(@analysis_job.suspended?).to be_truthy + end + + it 'DOES NOT change the message queue' do + expect(get_queue_count).to eq(6) + end + + it 'cancels all remaining AnalysisJobsItems that exist' do + expect(get_items_count).to eq(6) + expect(get_items_count('cancelled')).to eq(0) + expect(get_items_count('cancelling')).to eq(6) + end + + it 'correctly reports progress statistics' do + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 0, + cancelling: 6, + working: 0, + successful: 0, + failed: 0, + timed_out: 0, + total: 6 + } + }, 6) + end + + it 'will let job items finish (race conditions)' do + # fake the first job as working already + item = AnalysisJobsItem.first + # skip validations, save directly to database + item.update_column(:status, 'working') + + update_analysis_job_item(@analysis_job.id, item.audio_recording_id, 'successful') + + item.reload + expect(item.successful?).to be_truthy + + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 0, + cancelling: 5, + working: 0, + successful: 1, + failed: 0, + timed_out: 0, + total: 6 + } + }, 6) + + expect(@analysis_job.deleted?).to eq(true) + end + + it 'will let job items cancel (race conditions, message queue depletion)' do + expect { + perform_job + }.to raise_error(RuntimeError, /Failed to update AnalysisJobItem, status: 422/) + + # side test, it can also get the status of an item even if parent job deleted + item = get_analysis_job_item(@analysis_job.id, AnalysisJobsItem.first.audio_recording_id) + expect(item['data']['status']).to eq('cancelled') + + expect(get_items_count).to eq(6) + expect(get_items_count('cancelled')).to eq(1) + expect(get_items_count('cancelling')).to eq(5) + + expect(get_queue_count).to eq(5) + expect(get_failed_queue_count).to eq(1) + + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 1, + cancelling: 5, + working: 0, + successful: 0, + failed: 0, + timed_out: 0, + total: 6 + } + }, 6) + + expect(@analysis_job.deleted?).to eq(true) + end + + it 'will let all job items complete (message queue depletion) - but it wont transition to :complete!' do + (1..6).each { + expect { + perform_job + }.to raise_error(RuntimeError, /Failed to update AnalysisJobItem, status: 422/) + } + + + expect(get_items_count).to eq(6) + expect(get_items_count('cancelled')).to eq(6) + expect(get_items_count('cancelling')).to eq(0) + + expect(get_queue_count).to eq(0) + expect(get_failed_queue_count).to eq(6) + + @analysis_job.reload + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 6, + cancelling: 0, + working: 0, + successful: 0, + failed: 0, + timed_out: 0, + total: 6 + } + }, 6) + + expect(@analysis_job.suspended?).to be_truthy + expect(@analysis_job.deleted?).to eq(true) + + mail = ActionMailer::Base.deliveries.last + expect(mail['subject'].value).to_not include('Completed analysis job') + end + + end + + describe 'status: "completed"' do + + before(:each) do + ActionMailer::Base.deliveries.clear + + @analysis_job = create_analysis_job + + (1..6).each { + perform_job + } + + status_code, response = destroy_analysis_job(@analysis_job.id) + expect(status_code).to eq(204) + + @analysis_job.reload + + expect(@analysis_job.deleted?).to eq(true) + end + + it 'should be :completed when deleted' do + expect(@analysis_job.completed?).to be_truthy + end + + it 'should have no job items to remove from the message queue' do + expect(get_queue_count).to eq(0) + expect(get_failed_queue_count).to eq(0) + end + + it 'ensures there are no non-completed AnalysisJobsItems statuses' do + expect(get_items_count).to eq(6) + expect(get_items_count('successful')).to eq(6) + end + + it 'correctly updates progress statistics' do + test_stats_for(@analysis_job, { + overall_progress: { + queued: 0, + cancelled: 0, + cancelling: 0, + working: 0, + successful: 6, + failed: 0, + timed_out: 0, + total: 6 + } + }, 6) + end + + end + + end + +end + + + From 288878b3c7a580a3e643a3b031726e55d0f02d82 Mon Sep 17 00:00:00 2001 From: Anthony Truskinger Date: Fri, 29 Jul 2016 17:03:56 +1000 Subject: [PATCH 2/5] Updated acts_as_paranoid gem --- Gemfile | 6 ++++-- Gemfile.lock | 12 +++++------- spec/lib/filter/query_spec.rb | 34 ++++++++++++++++++---------------- 3 files changed, 27 insertions(+), 25 deletions(-) diff --git a/Gemfile b/Gemfile index cb7dc65c..534d76e3 100644 --- a/Gemfile +++ b/Gemfile @@ -96,7 +96,8 @@ gem 'jc-validates_timeliness', '~> 3.1.1' gem 'enumerize', '~> 1.0' gem 'uuidtools', '~> 2.1.5' -gem 'acts_as_paranoid', git: 'https://github.com/ActsAsParanoid/acts_as_paranoid.git', branch: :master, ref: 'ab31723bc1' +gem 'acts_as_paranoid', git: 'https://github.com/ActsAsParanoid/acts_as_paranoid.git', branch: :master, ref: 'c2db19554ddaedcac0a2b8d6a0563dea83c972c5' + # for state machines gem 'aasm' @@ -210,5 +211,6 @@ group :test do gem 'rspec-mocks', '~>3.4.1' - gem 'timecop' + # use to mock time in tests - currently not needed + #gem 'timecop' end \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 497d7f7b..923cf077 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,12 +1,12 @@ GIT remote: https://github.com/ActsAsParanoid/acts_as_paranoid.git - revision: ab31723bc11c82bac144cfabd4631b26307bfde1 - ref: ab31723bc1 + revision: c2db19554ddaedcac0a2b8d6a0563dea83c972c5 + ref: c2db19554ddaedcac0a2b8d6a0563dea83c972c5 branch: master specs: - acts_as_paranoid (0.5.0.beta2) - activerecord (~> 4.0) - activesupport (~> 4.0) + acts_as_paranoid (0.5.0.rc1) + activerecord (>= 4.0, < 5.1) + activesupport (>= 4.0, < 5.1) GIT remote: https://github.com/QutBioacoustics/baw-audio-tools.git @@ -431,7 +431,6 @@ GEM thread (0.2.2) thread_safe (0.3.5) tilt (2.0.2) - timecop (0.8.1) timeliness (0.3.8) tins (1.6.0) tzinfo (1.2.2) @@ -525,7 +524,6 @@ DEPENDENCIES simplecov (~> 0.11.1) therubyracer (~> 0.12.1) thin (~> 1.6.3) - timecop tzinfo (~> 1.2.2) tzinfo-data (~> 1.2016) uglifier (>= 1.3.0) diff --git a/spec/lib/filter/query_spec.rb b/spec/lib/filter/query_spec.rb index 3d2734ad..2e615731 100644 --- a/spec/lib/filter/query_spec.rb +++ b/spec/lib/filter/query_spec.rb @@ -345,7 +345,7 @@ def create_filter(params) } complex_result = "SELECT\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"site_id\" \ FROM\"audio_recordings\" \ -WHERE(\"audio_recordings\".\"deleted_at\"ISNULL) \ +WHERE\"audio_recordings\".\"deleted_at\"ISNULL \ AND\"audio_recordings\".\"site_id\"=5 \ ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" compare_filter_sql(request_body_obj, complex_result) @@ -370,7 +370,7 @@ def create_filter(params) "SELECT\"audio_recordings\".\"id\", \ \"audio_recordings\".\"duration_seconds\" \ FROM\"audio_recordings\" \ -WHERE(\"audio_recordings\".\"deleted_at\"ISNULL) \ +WHERE\"audio_recordings\".\"deleted_at\"ISNULL \ AND\"audio_recordings\".\"site_id\"=5 \ ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" compare_filter_sql(request_body_obj, complex_result) @@ -596,8 +596,8 @@ def create_filter(params) "SELECT\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"media_type\" \ FROM\"audio_recordings\" \ INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\" \ -AND(\"sites\".\"deleted_at\"ISNULL) \ -WHERE(\"audio_recordings\".\"deleted_at\"ISNULL) \ +AND\"sites\".\"deleted_at\"ISNULL \ +WHERE\"audio_recordings\".\"deleted_at\"ISNULL \ AND(EXISTS( \ SELECT1FROM\"projects_sites\" \ WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\" \ @@ -691,7 +691,7 @@ def create_filter(params) complex_result = "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"duration_seconds\" \ FROM\"audio_recordings\" \ -WHERE(\"audio_recordings\".\"deleted_at\"ISNULL) \ +WHERE\"audio_recordings\".\"deleted_at\"ISNULL \ AND\"audio_recordings\".\"id\"IN( \ SELECT\"audio_recordings\".\"id\" \ FROM\"audio_recordings\" \ @@ -718,7 +718,7 @@ def create_filter(params) user_id = user.id complex_result_2 = - "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"duration_seconds\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_recordings\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\"))))AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\"WHERE\"sites\".\"id\"=5)AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"audio_events\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"WHERE\"audio_events\".\"is_reference\"='t')AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"audio_events\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"LEFTOUTERJOIN\"audio_events_tags\"ON\"audio_events\".\"id\"=\"audio_events_tags\".\"audio_event_id\"LEFTOUTERJOIN\"tags\"ON\"audio_events_tags\".\"tag_id\"=\"tags\".\"id\"WHERE\"tags\".\"text\"ILIKE'%koala%')ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" + "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"duration_seconds\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_recordings\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\"))))AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\"WHERE\"sites\".\"id\"=5)AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"audio_events\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"WHERE\"audio_events\".\"is_reference\"='t')AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"audio_events\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"LEFTOUTERJOIN\"audio_events_tags\"ON\"audio_events\".\"id\"=\"audio_events_tags\".\"audio_event_id\"LEFTOUTERJOIN\"tags\"ON\"audio_events_tags\".\"tag_id\"=\"tags\".\"id\"WHERE\"tags\".\"text\"ILIKE'%koala%')ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" filter_query = Filter::Query.new( request_body_obj, @@ -762,7 +762,7 @@ def create_filter(params) \"audio_recordings\".\"recorded_date\", \ \"audio_recordings\".\"created_at\" \ FROM\"audio_recordings\" \ -WHERE(\"audio_recordings\".\"deleted_at\"ISNULL) \ +WHERE\"audio_recordings\".\"deleted_at\"ISNULL \ AND(\"audio_recordings\".\"id\"IN \ (SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\" \ LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\" \ @@ -779,7 +779,7 @@ def create_filter(params) user_id = user.id complex_result_2 = - "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"created_at\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_recordings\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\"))))AND(\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\"LEFTOUTERJOIN\"projects_sites\"ON\"sites\".\"id\"=\"projects_sites\".\"site_id\"LEFTOUTERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects\".\"id\"<123456)AND\"audio_recordings\".\"duration_seconds\"!=40)ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT20OFFSET0" + "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"created_at\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_recordings\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\"))))AND(\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\"LEFTOUTERJOIN\"projects_sites\"ON\"sites\".\"id\"=\"projects_sites\".\"site_id\"LEFTOUTERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects\".\"id\"<123456)AND\"audio_recordings\".\"duration_seconds\"!=40)ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT20OFFSET0" filter_query = Filter::Query.new( request_body_obj, @@ -825,7 +825,8 @@ def create_filter(params) "analysis_jobs_items"."created_at", "analysis_jobs_items"."queued_at", "analysis_jobs_items"."work_started_at", - "analysis_jobs_items"."completed_at" + "analysis_jobs_items"."completed_at", + "analysis_jobs_items"."cancel_started_at" FROM "analysis_jobs_items" RIGHT OUTER JOIN "audio_recordings" "tmp_audio_recordings_generator" @@ -833,10 +834,10 @@ def create_filter(params) ) analysis_jobs_items INNER JOIN "audio_recordings" ON "audio_recordings"."id" = "analysis_jobs_items"."audio_recording_id" - AND ("audio_recordings"."deleted_at" IS NULL) + AND "audio_recordings"."deleted_at" IS NULL INNER JOIN "sites" ON "sites"."id" = "audio_recordings"."site_id" - AND ("sites"."deleted_at" IS NULL) + AND "sites"."deleted_at" IS NULL WHERE (EXISTS(SELECT 1 FROM "projects_sites" WHERE "sites"."id" = "projects_sites"."site_id" AND EXISTS((SELECT 1 @@ -860,13 +861,14 @@ def create_filter(params) "analysis_jobs_items"."created_at", "analysis_jobs_items"."queued_at", "analysis_jobs_items"."work_started_at", - "analysis_jobs_items"."completed_at" + "analysis_jobs_items"."completed_at", + "analysis_jobs_items"."cancel_started_at" FROM "analysis_jobs_items" RIGHT OUTER JOIN "audio_recordings" "tmp_audio_recordings_generator" ON "tmp_audio_recordings_generator"."id" IS NULL) analysis_jobs_items INNER JOIN "audio_recordings" ON "audio_recordings"."id" = "analysis_jobs_items"."audio_recording_id" - AND ("audio_recordings"."deleted_at" IS NULL)) analysis_jobs_items + AND "audio_recordings"."deleted_at" IS NULL) analysis_jobs_items LEFT OUTER JOIN "audio_recordings" ON "analysis_jobs_items"."audio_recording_id" = "audio_recordings"."id" WHERE "audio_recordings"."duration_seconds" >= 60000.0) @@ -909,7 +911,7 @@ def create_filter(params) ) expected_sql = - "SELECT\"audio_events\".\"id\",\"audio_events\".\"audio_recording_id\",\"audio_events\".\"start_time_seconds\",\"audio_events\".\"end_time_seconds\",\"audio_events\".\"low_frequency_hertz\",\"audio_events\".\"high_frequency_hertz\",\"audio_events\".\"is_reference\",\"audio_events\".\"creator_id\",\"audio_events\".\"updated_at\",\"audio_events\".\"created_at\"FROM\"audio_events\"INNERJOIN\"audio_recordings\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"AND(\"audio_recordings\".\"deleted_at\"ISNULL)INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_events\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\")))OREXISTS(SELECT1FROM\"audio_events\"\"ae_ref\"WHERE\"ae_ref\".\"deleted_at\"ISNULLAND\"ae_ref\".\"is_reference\"='t'AND\"ae_ref\".\"id\"=\"audio_events\".\"id\"))AND((\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")>3)ORDERBY\"audio_events\".\"created_at\"DESCLIMIT25OFFSET0" + "SELECT\"audio_events\".\"id\",\"audio_events\".\"audio_recording_id\",\"audio_events\".\"start_time_seconds\",\"audio_events\".\"end_time_seconds\",\"audio_events\".\"low_frequency_hertz\",\"audio_events\".\"high_frequency_hertz\",\"audio_events\".\"is_reference\",\"audio_events\".\"creator_id\",\"audio_events\".\"updated_at\",\"audio_events\".\"created_at\"FROM\"audio_events\"INNERJOIN\"audio_recordings\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"AND\"audio_recordings\".\"deleted_at\"ISNULLINNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_events\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\")))OREXISTS(SELECT1FROM\"audio_events\"\"ae_ref\"WHERE\"ae_ref\".\"deleted_at\"ISNULLAND\"ae_ref\".\"is_reference\"='t'AND\"ae_ref\".\"id\"=\"audio_events\".\"id\"))AND((\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")>3)ORDERBY\"audio_events\".\"created_at\"DESCLIMIT25OFFSET0" expect(filter_query.query_full.to_sql.gsub(/\s+/, '')).to eq(expected_sql.gsub(/\s+/, '')) @@ -940,7 +942,7 @@ def create_filter(params) ) expected_sql = - "SELECT\"audio_events\".\"id\",\"audio_events\".\"audio_recording_id\",\"audio_events\".\"start_time_seconds\",\"audio_events\".\"end_time_seconds\",\"audio_events\".\"low_frequency_hertz\",\"audio_events\".\"high_frequency_hertz\",\"audio_events\".\"is_reference\",\"audio_events\".\"creator_id\",\"audio_events\".\"updated_at\",\"audio_events\".\"created_at\"FROM\"audio_events\"INNERJOIN\"audio_recordings\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"AND(\"audio_recordings\".\"deleted_at\"ISNULL)INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_events\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\")))OREXISTS(SELECT1FROM\"audio_events\"\"ae_ref\"WHERE\"ae_ref\".\"deleted_at\"ISNULLAND\"ae_ref\".\"is_reference\"='t'AND\"ae_ref\".\"id\"=\"audio_events\".\"id\"))AND((\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")>3)ORDERBY(\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")ASCLIMIT25OFFSET0" + "SELECT\"audio_events\".\"id\",\"audio_events\".\"audio_recording_id\",\"audio_events\".\"start_time_seconds\",\"audio_events\".\"end_time_seconds\",\"audio_events\".\"low_frequency_hertz\",\"audio_events\".\"high_frequency_hertz\",\"audio_events\".\"is_reference\",\"audio_events\".\"creator_id\",\"audio_events\".\"updated_at\",\"audio_events\".\"created_at\"FROM\"audio_events\"INNERJOIN\"audio_recordings\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"AND\"audio_recordings\".\"deleted_at\"ISNULLINNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_events\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\")))OREXISTS(SELECT1FROM\"audio_events\"\"ae_ref\"WHERE\"ae_ref\".\"deleted_at\"ISNULLAND\"ae_ref\".\"is_reference\"='t'AND\"ae_ref\".\"id\"=\"audio_events\".\"id\"))AND((\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")>3)ORDERBY(\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")ASCLIMIT25OFFSET0" expect(filter_query.query_full.to_sql.gsub(/\s+/, '')).to eq(expected_sql.gsub(/\s+/, '')) @@ -968,7 +970,7 @@ def create_filter(params) ) expected_sql = - "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"uuid\",\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"sample_rate_hertz\",\"audio_recordings\".\"channels\",\"audio_recordings\".\"bit_rate_bps\",\"audio_recordings\".\"media_type\",\"audio_recordings\".\"data_length_bytes\",\"audio_recordings\".\"status\",\"audio_recordings\".\"created_at\",\"audio_recordings\".\"updated_at\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_recordings\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\"))))AND(\"audio_recordings\".\"recorded_date\"+CAST(\"audio_recordings\".\"duration_seconds\"||'seconds'asinterval)<'2016-03-01T02:00:00')AND(\"audio_recordings\".\"recorded_date\"+CAST(\"audio_recordings\".\"duration_seconds\"||'seconds'asinterval)>'2016-03-01T01:50:00')ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" + "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"uuid\",\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"sample_rate_hertz\",\"audio_recordings\".\"channels\",\"audio_recordings\".\"bit_rate_bps\",\"audio_recordings\".\"media_type\",\"audio_recordings\".\"data_length_bytes\",\"audio_recordings\".\"status\",\"audio_recordings\".\"created_at\",\"audio_recordings\".\"updated_at\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_recordings\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"WHERE\"sites\".\"id\"=\"projects_sites\".\"site_id\"ANDEXISTS((SELECT1FROM\"projects\"WHERE\"projects\".\"deleted_at\"ISNULLAND\"projects\".\"creator_id\"=#{user_id}AND\"projects_sites\".\"project_id\"=\"projects\".\"id\"UNIONALLSELECT1FROM\"permissions\"WHERE\"permissions\".\"user_id\"=#{user_id}AND\"permissions\".\"level\"IN('reader','writer','owner')AND\"projects_sites\".\"project_id\"=\"permissions\".\"project_id\"))))AND(\"audio_recordings\".\"recorded_date\"+CAST(\"audio_recordings\".\"duration_seconds\"||'seconds'asinterval)<'2016-03-01T02:00:00')AND(\"audio_recordings\".\"recorded_date\"+CAST(\"audio_recordings\".\"duration_seconds\"||'seconds'asinterval)>'2016-03-01T01:50:00')ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" expect(filter_query.query_full.to_sql.gsub(/\s+/, '')).to eq(expected_sql.gsub(/\s+/, '')) From c1cd2b8779f4db4dfc482aaf6c6f8324e58f9f56 Mon Sep 17 00:00:00 2001 From: Anthony Truskinger Date: Fri, 29 Jul 2016 17:12:54 +1000 Subject: [PATCH 3/5] Fixed unit tests. Bugs related to strong params. Changed strong params from :log to :raise for DEV and TEST environments in a previous commit. Now fixed up the related tests. --- .../analysis_jobs_items_controller.rb | 2 +- .../audio_recordings_controller.rb | 8 ++- app/controllers/saved_searches_controller.rb | 8 ++- app/controllers/user_accounts_controller.rb | 2 +- spec/acceptance/analysis_jobs_spec.rb | 64 +++++++++++++++++-- spec/acceptance/saved_searches_spec.rb | 8 ++- spec/helpers/acceptance_spec_helper.rb | 2 +- 7 files changed, 77 insertions(+), 17 deletions(-) diff --git a/app/controllers/analysis_jobs_items_controller.rb b/app/controllers/analysis_jobs_items_controller.rb index 3ff99ef2..80104f8e 100644 --- a/app/controllers/analysis_jobs_items_controller.rb +++ b/app/controllers/analysis_jobs_items_controller.rb @@ -399,7 +399,7 @@ def normalised_name(path) end def normalise_path(path) - analysis_base_paths = BawWorkers::Config.analysis_cache_helper.existing_dirs + analysis_base_paths = BawWorkers::Config.analysis_cache_helper.possible_dirs matching_base_path = analysis_base_paths.select { |abp| path.start_with?(abp) } if matching_base_path.size == 1 path_without_base = path.gsub(/#{matching_base_path[0].gsub('/', '\/')}\/[^\/]+\/[^\/]+\/[^\/]+\/?/, '') diff --git a/app/controllers/audio_recordings_controller.rb b/app/controllers/audio_recordings_controller.rb index 314f06b7..305ea1b7 100644 --- a/app/controllers/audio_recordings_controller.rb +++ b/app/controllers/audio_recordings_controller.rb @@ -295,12 +295,14 @@ def audio_recording_params ] # can't permit arbitrary hash - # https://github.com/rails/rails/issues/9454#issuecomment-14167664 + # ~~https://github.com/rails/rails/issues/9454#issuecomment-14167664~~ + # http://stackoverflow.com/questions/19172893/rails-hashes-with-unknown-keys-and-strong-parameters/24752108#24752108 # http://guides.rubyonrails.org/action_controller_overview.html#more-examples # add arbitrary hash for notes manually + properties = params[:audio_recording].delete(:notes) params.require(:audio_recording).permit(*permitted_attributes).tap do |allowed_params| - if params[:audio_recording][:notes] - allowed_params[:notes] = params[:audio_recording][:notes] + if properties + allowed_params[:notes] = properties end end end diff --git a/app/controllers/saved_searches_controller.rb b/app/controllers/saved_searches_controller.rb index 4f45c9f9..3a0aeb89 100644 --- a/app/controllers/saved_searches_controller.rb +++ b/app/controllers/saved_searches_controller.rb @@ -75,11 +75,13 @@ def filter def saved_search_params # can't permit arbitrary hash - # https://github.com/rails/rails/issues/9454#issuecomment-14167664 + # ~~https://github.com/rails/rails/issues/9454#issuecomment-14167664~~ + # http://stackoverflow.com/questions/19172893/rails-hashes-with-unknown-keys-and-strong-parameters/24752108#24752108 # add arbitrary hash for stored_query manually + properties = params[:saved_search].delete(:stored_query) params.require(:saved_search).permit(:id, :name, :description).tap do |allowed_params| - if params[:saved_search][:stored_query] - allowed_params[:stored_query] = params[:saved_search][:stored_query] + if properties + allowed_params[:stored_query] = properties end end end diff --git a/app/controllers/user_accounts_controller.rb b/app/controllers/user_accounts_controller.rb index 816c2d07..addb1ef1 100644 --- a/app/controllers/user_accounts_controller.rb +++ b/app/controllers/user_accounts_controller.rb @@ -229,7 +229,7 @@ def user_params def user_update_params params.require(:user).permit( - :id, :user_name, :email, + :id, :user_name, :email, :tzinfo_tz, :password, :password_confirmation, :roles_mask, :image) end diff --git a/spec/acceptance/analysis_jobs_spec.rb b/spec/acceptance/analysis_jobs_spec.rb index 10f016a2..082e1a93 100644 --- a/spec/acceptance/analysis_jobs_spec.rb +++ b/spec/acceptance/analysis_jobs_spec.rb @@ -27,7 +27,22 @@ def analysis_jobs_body_params create_entire_hierarchy - let(:body_attributes) { FactoryGirl.attributes_for(:analysis_job, script_id: script.id, saved_search_id: saved_search.id).to_json } + let(:body_attributes) { + FactoryGirl + .attributes_for(:analysis_job, script_id: script.id, saved_search_id: saved_search.id) + .except(:started_at, :overall_progress, + :overall_progress_modified_at, :overall_count, + :overall_duration_seconds, :overall_data_length_bytes) + .to_json + } + + let(:body_attributes_update) { + FactoryGirl + .attributes_for(:analysis_job, script_id: script.id, saved_search_id: saved_search.id) + .slice(:name, :description) + .to_json + } + ################################ # INDEX @@ -218,7 +233,7 @@ def analysis_jobs_body_params } }.to_json } - let!(:preparation_create){ + let!(:preparation_create) { project = Creation::Common.create_project(writer_user) script = FactoryGirl.create(:script, creator: writer_user, id: 999899) @@ -238,7 +253,7 @@ def analysis_jobs_body_params analysis_jobs_id_param analysis_jobs_body_params let(:id) { analysis_job.id } - let(:raw_post) { body_attributes } + let(:raw_post) { body_attributes_update } let(:authentication_token) { admin_token } standard_request_options(:put, 'UPDATE (as admin)', :ok, {expected_json_path: 'data/saved_search_id'}) end @@ -247,7 +262,7 @@ def analysis_jobs_body_params analysis_jobs_id_param analysis_jobs_body_params let(:id) { analysis_job.id } - let(:raw_post) { body_attributes } + let(:raw_post) { body_attributes_update } let(:authentication_token) { admin_token } standard_request_options(:patch, 'UPDATE (as admin)', :ok, {expected_json_path: 'data/saved_search_id'}) end @@ -256,7 +271,7 @@ def analysis_jobs_body_params analysis_jobs_id_param analysis_jobs_body_params let(:id) { analysis_job.id } - let(:raw_post) { body_attributes } + let(:raw_post) { body_attributes_update } let(:authentication_token) { writer_token } standard_request_options(:put, 'UPDATE (as writer)', :ok, {expected_json_path: 'data/saved_search_id'}) end @@ -311,18 +326,53 @@ def analysis_jobs_body_params # DESTROY ################################ + def mock_processing_state(opts) + analysis_job.update_columns( + overall_status: 'processing', + overall_status_modified_at: Time.zone.now, + started_at: Time.zone.now + ) + end + delete '/analysis_jobs/:id' do analysis_jobs_id_param let(:id) { analysis_job.id } let(:authentication_token) { admin_token } - standard_request_options(:delete, 'DESTROY (as admin)', :no_content, {expected_response_has_content: false, expected_response_content_type: nil}) + standard_request_options( + :delete, + 'DESTROY (as admin)', + :no_content, + { + expected_response_has_content: false, + expected_response_content_type: nil + }, + &:mock_processing_state + ) end delete '/analysis_jobs/:id' do analysis_jobs_id_param let(:id) { analysis_job.id } let(:authentication_token) { writer_token } - standard_request_options(:delete, 'DESTROY (as writer)', :no_content, {expected_response_has_content: false, expected_response_content_type: nil}) + standard_request_options( + :delete, + 'DESTROY (as writer, when [:processing|:suspended|:complete])', + :no_content, + { + expected_response_has_content: false, + expected_response_content_type: nil + }, + &:mock_processing_state) + end + + delete '/analysis_jobs/:id' do + analysis_jobs_id_param + let(:id) { analysis_job.id } + let(:authentication_token) { writer_token } + standard_request_options(:delete, 'DESTROY (as writer, when [:new|:preparing])', :conflict, { + expected_json_path: 'meta/error/details', + response_body_content: '"message":"Conflict"' + }) end delete '/analysis_jobs/:id' do diff --git a/spec/acceptance/saved_searches_spec.rb b/spec/acceptance/saved_searches_spec.rb index b9e3aa5d..de29d6bc 100644 --- a/spec/acceptance/saved_searches_spec.rb +++ b/spec/acceptance/saved_searches_spec.rb @@ -24,7 +24,13 @@ } } let(:example_stored_query) { {uuid: {eq: audio_recording.uuid}} } - let(:post_attributes) { {name: 'saved search name', stored_query: example_stored_query} } + let(:post_attributes) { + { + name: 'saved search name', + description: 'I\'m a description!', + stored_query: example_stored_query + } + } context 'list' do diff --git a/spec/helpers/acceptance_spec_helper.rb b/spec/helpers/acceptance_spec_helper.rb index 9746727d..67945a83 100644 --- a/spec/helpers/acceptance_spec_helper.rb +++ b/spec/helpers/acceptance_spec_helper.rb @@ -43,7 +43,7 @@ def standard_request_options(http_method, description, expected_status, opts = { # allow for modification of opts, provide context so let and let! values can be accessed if opts_mod - opts_mod.call self, opts + opts_mod.call(self, opts) end expected_error_class = opts[:expected_error_class] From 04bac092e7fdab71dcb4075dbec31d538a5e3832 Mon Sep 17 00:00:00 2001 From: Anthony Truskinger Date: Wed, 3 Aug 2016 19:52:14 +1000 Subject: [PATCH 4/5] Analysis Jobs Item Integration - Code review --- CHANGELOG.md | 4 +++- .../analysis_jobs_items_controller.rb | 5 +--- app/models/analysis_job.rb | 23 +++++++++---------- app/models/analysis_jobs_item.rb | 6 ++--- spec/factories/script_factory.rb | 8 ++++++- 5 files changed, 25 insertions(+), 21 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f2e9b92e..a5ddb77d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,9 @@ # Changelog ## Unreleased - + - 2016-08-03 + - Feature: Analysis Jobs items integration. Analysis jobs have been setup and their complete workflows tested and + integrated. See [#300](https://github.com/QutBioacoustics/baw-server/pull/300) - 2016-06-11 - Feature: Analysis Jobs item tracking - basic support added for tracking each item in an analysis job [#290](https://github.com/QutBioacoustics/baw-server/pull/290) - Enhancement: Filter::Query can now have it's base query customised [931066b](https://github.com/QutBioacoustics/baw-server/commit/931066b5eb30925e34e659bdff91e5b55abce764) diff --git a/app/controllers/analysis_jobs_items_controller.rb b/app/controllers/analysis_jobs_items_controller.rb index 80104f8e..73bac061 100644 --- a/app/controllers/analysis_jobs_items_controller.rb +++ b/app/controllers/analysis_jobs_items_controller.rb @@ -112,8 +112,6 @@ def update else respond_change_fail end - - end # GET|POST /analysis_jobs/:analysis_job_id/audio_recordings/ @@ -139,7 +137,6 @@ def filter def analysis_jobs_item_update_params # Only status can be updated via API # Other properties are updated by the model/initial processing system - #:analysis_job_id, :audio_recording_id, :format params.require(:analysis_jobs_item).permit(:status) end @@ -165,7 +162,7 @@ def do_get_opts end def do_get_analysis_job - # We use with deleted here because we always needs to be able to load AnalysisJobsItem even if the parent + # We use with deleted here because we always need to be able to load AnalysisJobsItem even if the parent # AnalysisJob has been deleted. @analysis_job = AnalysisJob.with_deleted.find(@analysis_job_id) unless @is_system_job end diff --git a/app/models/analysis_job.rb b/app/models/analysis_job.rb index d006919b..5481e09a 100644 --- a/app/models/analysis_job.rb +++ b/app/models/analysis_job.rb @@ -97,7 +97,6 @@ def self.batch_size def check_progress #if (Time.zone.now - analysis_job.overall_progress_modified_at) > OVERALL_PROGRESS_REFRESH_SECONDS - # https://github.com/ActsAsParanoid/acts_as_paranoid/issues/45 was_deleted = deleted? if was_deleted @@ -123,19 +122,20 @@ def check_progress # Note: no AnalysisJobsItems have been made and no resque jobs have been enqueued. # 2. Then the job must be prepared. Currently synchronous but designed to be asynchronous. # Do this by calling `prepare` which will transition from `new` to `preparing`. - # Note: AnalysisJobsItems are enqueued progressively here. Some actions by be processed and even completed + # Note: AnalysisJobsItems are enqueued progressively here. Some actions may be processed and even completed # before the AnalysisJob even finishes preparing! # 3. Transition to `processing` # Note: the processing transition should be automatic - # 3. Once all resque jobs have been enqueued, the analysis job will transition to 'processing' status. - # 4. Resque jobs will update the analysis job (via analysis jobs items) as resque jobs change states. + # Once all resque jobs have been enqueued, the analysis job will transition to 'processing' status. + # 4. Resque jobs will update the analysis job (via analysis jobs items) as resque jobs change states. # `check_progress` is used to update progress and also is the callback that checks for job completion. # 5. When all items have finished processing, the job transitions to `completed`. Users are notified with an email # and the job is marked as completed. # # Additional states: # - Jobs can transition between processing and suspended. When suspended all analysis jobs items are marked as - # `cancelling`. When resumed, all `cancelling` or `cancelled` items are re-added back to the queue. + # `cancelling`. When resumed, all `cancelling` items are marked as `queued` again and all `cancelled` items are + # re-added back to the queue. # Note: We can't add or remove items from the message queue, which is why we need the cancelling/cancelled # distinction. # - Jobs can be retried. In this case, the failed items (only) are re-queued and the job is set to `processing` @@ -261,11 +261,10 @@ def send_preparing_email # Create payloads from audio recordings extracted from saved search. # This method saves persists changes. def prepare_job - # test: self.creator - # test: status == preparing user = creator - # TODO add logging and timing + Rails.logger.info 'AnalysisJob::prepare_job: Begin.' + # TODO This may need to be an async operation itself depending on how fast it runs # counters @@ -298,7 +297,7 @@ def prepare_job self.overall_data_length_bytes = options[:data_length_bytes_sum] self.save! - options[:results] + Rails.logger.info "AnalysisJob::prepare_job: Complete. Queued: #{options[:queued_count]}" end # Suspends an analysis job by cancelling all queued items @@ -323,7 +322,7 @@ def resume_job # batch update query.find_in_batches(batch_size: AnalysisJob.batch_size) do |items| items.each do |item| - # transition to cancelled state and save + # transition to queued state and save item.retry! end end @@ -341,7 +340,7 @@ def retry_job # batch update query.find_in_batches(batch_size: AnalysisJob.batch_size) do |items| items.each do |item| - # transition to cancelled state and save + # transition to queued state and save item.retry! end end @@ -357,7 +356,7 @@ def update_status_timestamp end # Update progress and modified timestamp if changes are made. Does NOT persist changes. - # This callback happens after a model is saved in a new state or loaded from the database. + # This callback happens after the AnalysisJob transitions to a new state or when `check_progress` is called. # We want all transactions to finish before we update the progresses - since they run a query themselves. # @return [void] def update_job_progress diff --git a/app/models/analysis_jobs_item.rb b/app/models/analysis_jobs_item.rb index 554c81db..44460359 100644 --- a/app/models/analysis_jobs_item.rb +++ b/app/models/analysis_jobs_item.rb @@ -317,9 +317,9 @@ def add_to_queue # the assumption here is that result is a unique identifier that we can later use to interrogate the message queue self.queue_id = result rescue => e - error = e - # TODO: swallowing exception? seems bad + # Note: exception used to be swallowed. We might need better error handling here later on. Rails.logger.error "An error occurred when enqueuing an analysis job item: #{e}" + raise e end @enqueue_results = {result: result, error: error} @@ -355,7 +355,7 @@ def self.create_action_payload(analysis_job, audio_recording) # Options invariant to the AnalysisJob are stuck in here, like: # - file_executable # - copy_paths - payload = analysis_job.script.analysis_action_params.dup + payload = analysis_job.script.analysis_action_params.dup.deep_symbolize_keys # merge base payload.merge({ diff --git a/spec/factories/script_factory.rb b/spec/factories/script_factory.rb index fccabcab..69efe709 100644 --- a/spec/factories/script_factory.rb +++ b/spec/factories/script_factory.rb @@ -10,7 +10,13 @@ sequence(:executable_command) { |n| "executable command #{n}"} sequence(:executable_settings) { |n| "executable settings #{n}"} sequence(:executable_settings_media_type) { |n| 'text/plain' } - sequence(:analysis_action_params) { |n| {custom_setting: n}} + sequence(:analysis_action_params) { |n| { + file_executable: './AnalysisPrograms/AnalysisPrograms.exe', + copy_paths: [ + './programs/AnalysisPrograms/Logs/log.txt' + ], + custom_setting: n + }} creator From 6791dadfe1f6c9a52fb20ae64bcc70743f27840e Mon Sep 17 00:00:00 2001 From: Anthony Truskinger Date: Fri, 5 Aug 2016 10:37:23 +1000 Subject: [PATCH 5/5] Fixed bugs introduced by merge conflict The mege conflict in ef0155e7a74ba0d7c5716a3793af3323eddb2cfe was massive. For clarity split commit into two parts, the basic merge conflict, and then the bugs that it produced (only exposed when running tests). - WARNING: Strong params now throws an exception for all parameter payloads in dev an test. However, payloads with invalid params will simply be filtered out in staging and production environments. - Fixed bug in `rails_helper.rb` which did not reseed the database after truncating it in request specs - Updated version of acts_as_paranoid no longer adds unecessary parentheses - fixed hard coded SQL tests - Made the `permissions_controller.rb` tests more robust to strong params hardening. They will now work even when `action_on_unpermitted_parameters` is set to `:raise` --- app/controllers/permissions_controller.rb | 5 +++-- config/environments/production.rb | 8 ++++++++ config/environments/staging.rb | 8 ++++++++ spec/acceptance/analysis_jobs_spec.rb | 11 ++--------- spec/lib/filter/query_spec.rb | 10 +++++----- spec/models/analysis_jobs_item_spec.rb | 2 +- spec/rails_helper.rb | 5 +++++ 7 files changed, 32 insertions(+), 17 deletions(-) diff --git a/app/controllers/permissions_controller.rb b/app/controllers/permissions_controller.rb index 7d5d821f..a9b42d3b 100644 --- a/app/controllers/permissions_controller.rb +++ b/app/controllers/permissions_controller.rb @@ -100,13 +100,14 @@ def permission_params end def update_permissions_params - params.permit(project_wide: [:logged_in, :anonymous], per_user: [:none, :reader, :writer, :owner]) + params.slice(:project_wide, :per_user).permit(project_wide: [:logged_in, :anonymous], per_user: [:none, :reader, :writer, :owner]) end def update_permissions + return nil if !params.include?(:project_wide) && !params.include?(:per_user) + request_params = update_permissions_params - return nil if !request_params.include?(:project_wide) && !request_params.include?(:per_user) if request_params.include?(:project_wide) && request_params[:project_wide].include?(:logged_in) permission = Permission.where(project: @project, user: nil, allow_logged_in: true, allow_anonymous: false).first permission = Permission.new(project: @project, user: nil, allow_logged_in: true, allow_anonymous: false) if permission.blank? diff --git a/config/environments/production.rb b/config/environments/production.rb index 7b2de35d..1273a9ef 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -54,6 +54,14 @@ # Use a different logger for distributed setups. # config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new) + # By default parameter keys that are not explicitly permitted will be logged in the development + # and test environment. In other environments these parameters will simply be filtered out + # and ignored. Additionally, this behaviour can be changed by changing the + # config.action_controller.action_on_unpermitted_parameters property in your environment files. + # If set to :log the unpermitted attributes will be logged, if set to :raise an exception will + # be raised. + #config.action_controller.action_on_unpermitted_parameters = :raise + # Use a different cache store in production. # config.cache_store = :mem_cache_store diff --git a/config/environments/staging.rb b/config/environments/staging.rb index e5be7d86..6e94fff6 100644 --- a/config/environments/staging.rb +++ b/config/environments/staging.rb @@ -50,6 +50,14 @@ # Use a different logger for distributed setups. # config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new) + # By default parameter keys that are not explicitly permitted will be logged in the development + # and test environment. In other environments these parameters will simply be filtered out + # and ignored. Additionally, this behaviour can be changed by changing the + # config.action_controller.action_on_unpermitted_parameters property in your environment files. + # If set to :log the unpermitted attributes will be logged, if set to :raise an exception will + # be raised. + #config.action_controller.action_on_unpermitted_parameters = :raise + # Use a different cache store in production. # config.cache_store = :mem_cache_store diff --git a/spec/acceptance/analysis_jobs_spec.rb b/spec/acceptance/analysis_jobs_spec.rb index de9fb18a..272a593b 100644 --- a/spec/acceptance/analysis_jobs_spec.rb +++ b/spec/acceptance/analysis_jobs_spec.rb @@ -129,7 +129,7 @@ def body_params end get '/analysis_jobs/:id' do - analysis_jobs_id_param + id_params let(:id) { 'system' } let(:authentication_token) { admin_token } standard_request_options(:get, 'SHOW system (as admin)', :not_implemented, { @@ -390,7 +390,7 @@ def mock_processing_state(opts) end delete '/analysis_jobs/:id' do - analysis_jobs_id_param + id_params let(:id) { analysis_job.id } let(:authentication_token) { writer_token } standard_request_options(:delete, 'DESTROY (as writer, when [:new|:preparing])', :conflict, { @@ -399,13 +399,6 @@ def mock_processing_state(opts) }) end - delete '/analysis_jobs/:id' do - id_params - let(:id) { analysis_job.id } - let(:authentication_token) { writer_token } - standard_request_options(:delete, 'DESTROY (as writer)', :no_content, {expected_response_has_content: false, expected_response_content_type: nil}) - end - delete '/analysis_jobs/:id' do id_params let(:id) { analysis_job.id } diff --git a/spec/lib/filter/query_spec.rb b/spec/lib/filter/query_spec.rb index c63582c8..4c9822cd 100644 --- a/spec/lib/filter/query_spec.rb +++ b/spec/lib/filter/query_spec.rb @@ -738,7 +738,7 @@ def create_filter(params) user_id = user.id complex_result_2 = - "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"duration_seconds\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_recordings\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t'))))AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\"WHERE\"sites\".\"id\"=5)AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"audio_events\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"WHERE\"audio_events\".\"is_reference\"='t')AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"audio_events\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"LEFTOUTERJOIN\"audio_events_tags\"ON\"audio_events\".\"id\"=\"audio_events_tags\".\"audio_event_id\"LEFTOUTERJOIN\"tags\"ON\"audio_events_tags\".\"tag_id\"=\"tags\".\"id\"WHERE\"tags\".\"text\"ILIKE'%koala%')ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" + "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"duration_seconds\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_recordings\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t'))))AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\"WHERE\"sites\".\"id\"=5)AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"audio_events\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"WHERE\"audio_events\".\"is_reference\"='t')AND\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"audio_events\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"LEFTOUTERJOIN\"audio_events_tags\"ON\"audio_events\".\"id\"=\"audio_events_tags\".\"audio_event_id\"LEFTOUTERJOIN\"tags\"ON\"audio_events_tags\".\"tag_id\"=\"tags\".\"id\"WHERE\"tags\".\"text\"ILIKE'%koala%')ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" filter_query = Filter::Query.new( request_body_obj, @@ -799,7 +799,7 @@ def create_filter(params) user_id = user.id complex_result_2 = - "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"created_at\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_recordings\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t'))))AND(\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\"LEFTOUTERJOIN\"projects_sites\"ON\"sites\".\"id\"=\"projects_sites\".\"site_id\"LEFTOUTERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects\".\"id\"<123456)AND\"audio_recordings\".\"duration_seconds\"!=40)ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT20OFFSET0" + "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"created_at\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_recordings\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t'))))AND(\"audio_recordings\".\"id\"IN(SELECT\"audio_recordings\".\"id\"FROM\"audio_recordings\"LEFTOUTERJOIN\"sites\"ON\"audio_recordings\".\"site_id\"=\"sites\".\"id\"LEFTOUTERJOIN\"projects_sites\"ON\"sites\".\"id\"=\"projects_sites\".\"site_id\"LEFTOUTERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects\".\"id\"<123456)AND\"audio_recordings\".\"duration_seconds\"!=40)ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT20OFFSET0" filter_query = Filter::Query.new( request_body_obj, @@ -932,7 +932,7 @@ def create_filter(params) ) expected_sql = - "SELECT\"audio_events\".\"id\",\"audio_events\".\"audio_recording_id\",\"audio_events\".\"start_time_seconds\",\"audio_events\".\"end_time_seconds\",\"audio_events\".\"low_frequency_hertz\",\"audio_events\".\"high_frequency_hertz\",\"audio_events\".\"is_reference\",\"audio_events\".\"creator_id\",\"audio_events\".\"updated_at\",\"audio_events\".\"created_at\"FROM\"audio_events\"INNERJOIN\"audio_recordings\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"AND(\"audio_recordings\".\"deleted_at\"ISNULL)INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_events\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t')))OREXISTS(SELECT1FROM\"audio_events\"\"ae_ref\"WHERE\"ae_ref\".\"deleted_at\"ISNULLAND\"ae_ref\".\"is_reference\"='t'AND\"ae_ref\".\"id\"=\"audio_events\".\"id\"))AND((\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")>3)ORDERBY\"audio_events\".\"created_at\"DESCLIMIT25OFFSET0" + "SELECT\"audio_events\".\"id\",\"audio_events\".\"audio_recording_id\",\"audio_events\".\"start_time_seconds\",\"audio_events\".\"end_time_seconds\",\"audio_events\".\"low_frequency_hertz\",\"audio_events\".\"high_frequency_hertz\",\"audio_events\".\"is_reference\",\"audio_events\".\"creator_id\",\"audio_events\".\"updated_at\",\"audio_events\".\"created_at\"FROM\"audio_events\"INNERJOIN\"audio_recordings\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"AND\"audio_recordings\".\"deleted_at\"ISNULLINNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_events\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t')))OREXISTS(SELECT1FROM\"audio_events\"\"ae_ref\"WHERE\"ae_ref\".\"deleted_at\"ISNULLAND\"ae_ref\".\"is_reference\"='t'AND\"ae_ref\".\"id\"=\"audio_events\".\"id\"))AND((\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")>3)ORDERBY\"audio_events\".\"created_at\"DESCLIMIT25OFFSET0" comparison_ignore_spaces(filter_query.query_full.to_sql, expected_sql) @@ -962,7 +962,7 @@ def create_filter(params) ) expected_sql = - "SELECT\"audio_events\".\"id\",\"audio_events\".\"audio_recording_id\",\"audio_events\".\"start_time_seconds\",\"audio_events\".\"end_time_seconds\",\"audio_events\".\"low_frequency_hertz\",\"audio_events\".\"high_frequency_hertz\",\"audio_events\".\"is_reference\",\"audio_events\".\"creator_id\",\"audio_events\".\"updated_at\",\"audio_events\".\"created_at\"FROM\"audio_events\"INNERJOIN\"audio_recordings\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"AND(\"audio_recordings\".\"deleted_at\"ISNULL)INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_events\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t')))OREXISTS(SELECT1FROM\"audio_events\"\"ae_ref\"WHERE\"ae_ref\".\"deleted_at\"ISNULLAND\"ae_ref\".\"is_reference\"='t'AND\"ae_ref\".\"id\"=\"audio_events\".\"id\"))AND((\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")>3)ORDERBY(\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")ASCLIMIT25OFFSET0" + "SELECT\"audio_events\".\"id\",\"audio_events\".\"audio_recording_id\",\"audio_events\".\"start_time_seconds\",\"audio_events\".\"end_time_seconds\",\"audio_events\".\"low_frequency_hertz\",\"audio_events\".\"high_frequency_hertz\",\"audio_events\".\"is_reference\",\"audio_events\".\"creator_id\",\"audio_events\".\"updated_at\",\"audio_events\".\"created_at\"FROM\"audio_events\"INNERJOIN\"audio_recordings\"ON\"audio_recordings\".\"id\"=\"audio_events\".\"audio_recording_id\"AND\"audio_recordings\".\"deleted_at\"ISNULLINNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_events\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t')))OREXISTS(SELECT1FROM\"audio_events\"\"ae_ref\"WHERE\"ae_ref\".\"deleted_at\"ISNULLAND\"ae_ref\".\"is_reference\"='t'AND\"ae_ref\".\"id\"=\"audio_events\".\"id\"))AND((\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")>3)ORDERBY(\"audio_events\".\"end_time_seconds\"-\"audio_events\".\"start_time_seconds\")ASCLIMIT25OFFSET0" comparison_ignore_spaces(filter_query.query_full.to_sql, expected_sql) @@ -990,7 +990,7 @@ def create_filter(params) ) expected_sql = - "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"uuid\",\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"sample_rate_hertz\",\"audio_recordings\".\"channels\",\"audio_recordings\".\"bit_rate_bps\",\"audio_recordings\".\"media_type\",\"audio_recordings\".\"data_length_bytes\",\"audio_recordings\".\"status\",\"audio_recordings\".\"created_at\",\"audio_recordings\".\"updated_at\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND(\"sites\".\"deleted_at\"ISNULL)WHERE(\"audio_recordings\".\"deleted_at\"ISNULL)AND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t'))))AND(\"audio_recordings\".\"recorded_date\"+CAST(\"audio_recordings\".\"duration_seconds\"||'seconds'asinterval)<'2016-03-01T02:00:00')AND(\"audio_recordings\".\"recorded_date\"+CAST(\"audio_recordings\".\"duration_seconds\"||'seconds'asinterval)>'2016-03-01T01:50:00')ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" + "SELECT\"audio_recordings\".\"id\",\"audio_recordings\".\"uuid\",\"audio_recordings\".\"recorded_date\",\"audio_recordings\".\"site_id\",\"audio_recordings\".\"duration_seconds\",\"audio_recordings\".\"sample_rate_hertz\",\"audio_recordings\".\"channels\",\"audio_recordings\".\"bit_rate_bps\",\"audio_recordings\".\"media_type\",\"audio_recordings\".\"data_length_bytes\",\"audio_recordings\".\"status\",\"audio_recordings\".\"created_at\",\"audio_recordings\".\"updated_at\"FROM\"audio_recordings\"INNERJOIN\"sites\"ON\"sites\".\"id\"=\"audio_recordings\".\"site_id\"AND\"sites\".\"deleted_at\"ISNULLWHERE\"audio_recordings\".\"deleted_at\"ISNULLAND(EXISTS(SELECT1FROM\"projects_sites\"INNERJOIN\"projects\"ON\"projects_sites\".\"project_id\"=\"projects\".\"id\"WHERE\"projects_sites\".\"site_id\"=\"sites\".\"id\"ANDEXISTS(SELECT1FROM\"permissions\"WHERE\"permissions\".\"level\"IN('owner','writer','reader')AND\"projects\".\"id\"=\"permissions\".\"project_id\"AND\"projects\".\"deleted_at\"ISNULLAND(\"permissions\".\"user_id\"=#{user_id}OR\"permissions\".\"allow_logged_in\"='t'))))AND(\"audio_recordings\".\"recorded_date\"+CAST(\"audio_recordings\".\"duration_seconds\"||'seconds'asinterval)<'2016-03-01T02:00:00')AND(\"audio_recordings\".\"recorded_date\"+CAST(\"audio_recordings\".\"duration_seconds\"||'seconds'asinterval)>'2016-03-01T01:50:00')ORDERBY\"audio_recordings\".\"recorded_date\"DESCLIMIT25OFFSET0" comparison_ignore_spaces(filter_query.query_full.to_sql, expected_sql) diff --git a/spec/models/analysis_jobs_item_spec.rb b/spec/models/analysis_jobs_item_spec.rb index 4fa43f17..23f6d69d 100644 --- a/spec/models/analysis_jobs_item_spec.rb +++ b/spec/models/analysis_jobs_item_spec.rb @@ -12,7 +12,7 @@ it 'cannot be created when status is not new' do expect { create(:analysis_jobs_item, status: nil) - }.to raise_error(RuntimeError, /AnalysisJobItem#status: Invalid state transition/) + }.to raise_error(AASM::NoDirectAssignmentError, /direct assignment of AASM column has been disabled/) end it 'created_at should be set by rails' do diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb index 8655f6ef..52be442a 100644 --- a/spec/rails_helper.rb +++ b/spec/rails_helper.rb @@ -181,7 +181,12 @@ end config.after(:each) do + is_truncating = DatabaseCleaner.connections[0].strategy.class == DatabaseCleaner::ActiveRecord::Truncation + DatabaseCleaner.clean + + Rails.application.load_seed if is_truncating + #Bullet.perform_out_of_channel_notifications if Bullet.enable? && Bullet.notification? #Bullet.end_request if Bullet.enable?