Skip to content

Commit

Permalink
Don't try to split up analysis code by node. Delete the analysis dire…
Browse files Browse the repository at this point in the history
…ctory.
  • Loading branch information
anyaelena committed Aug 5, 2018
1 parent 70838e1 commit ba78275
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 70 deletions.
67 changes: 0 additions & 67 deletions server/app/lib/web_node/resque/analysis.rb

This file was deleted.

65 changes: 62 additions & 3 deletions server/app/models/analysis.rb
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,6 @@ class Analysis
after_create :verify_uuid
before_destroy :queue_delete_files

# TODO dynamically include as appropriate, depending on environment
include WebNode::Resque::Analysis

def self.status_states
%w(na init queued started completed)
end
Expand Down Expand Up @@ -357,6 +354,40 @@ def delayed_job_ids
jobs.map { |v| v[:delayed_job_ids] }
end

# path to analysis in osdata volume - this is used on web node by resque init and finalize analysis jobs
def shared_directory_path
"#{APP_CONFIG['server_asset_path']}/analyses/#{id}"
end
# Unpack analysis.zip into the osdata volume for use by background and web
# specific usecase is to run analysis initialization and finalization scripts.
# currently only used with Resque, as it's called by ResqueJobs::InitializeAnalysis job
# runs on web node
def run_initialization
# unpack seed zip file into osdata
# run initialize.sh if present
# Extract the zip
extract_count = 0
extract_max_count = 3
logger.info "Running analysis initialization scripts"
logger.info "Extracting seed zip #{seed_zip.path} to #{shared_directory_path}"
begin
Timeout.timeout(180) do
extract_count += 1
OpenStudio::Workflow.extract_archive(seed_zip.path, shared_directory_path)
end
rescue => e
retry if extract_count < extract_max_count
raise "Extraction of the seed.zip file failed #{extract_max_count} times with error #{e.message}"
end
run_script_with_args "initialize"
end

# runs on web node
def run_finalization
logger.info "Running analysis finalization scripts"
run_script_with_args "finalize"
end

protected

# Queue up the task to delete all the files in the background
Expand All @@ -368,6 +399,8 @@ def queue_delete_files
Delayed::Job.enqueue DjJobs::DeleteAnalysis.new(analysis_dir)
elsif Rails.application.config.job_manager == :resque
Resque.enqueue(ResqueJobs::DeleteAnalysis, analysis_dir)
# AP: does this double delete indicate that we are duplicating the unzip??
Resque.enqueue(ResqueJobs::DeleteAnalysis, shared_directory_path)
else
raise 'Rails.application.config.job_manager must be set to :resque or :delayed_job'
end
Expand All @@ -381,4 +414,30 @@ def verify_uuid
self.uuid = id if uuid.nil?
save!
end

def run_script_with_args script_name
dir_path = "#{shared_directory_path}/scripts/analysis"
# paths to check for args and script files
args_path = "#{dir_path}/#{script_name}.args"
script_path = "#{dir_path}/#{script_name}.sh"
log_path = "#{dir_path}/#{script_name}.log"

logger.info "Checking for presence of args file at #{args_path}"
args = nil
if File.file? args_path
args = Utility::Oss.load_args args_path
logger.info " args loaded from file #{args_path}: #{args}"
end


logger.info "Checking for presence of script file at #{script_path}"
if File.file? script_path
# TODO how long do we want to set timeout?
# SCRIPT_PATH - path to where the scripts were extracted
# HOST_URL - URL of the server
# RAILS_ROOT - location of rails
Utility::Oss.run_script(script_path, 4.hours, {'SCRIPT_PATH' => dir_path, 'ANALYSIS_ID' => id, 'HOST_URL' => APP_CONFIG['os_server_host_url'], 'RAILS_ROOT' => Rails.root.to_s, 'ANALYSIS_DIRECTORY' => shared_directory_path}, args, logger,log_path)
end
end

end

0 comments on commit ba78275

Please sign in to comment.