Skip to content

Commit

Permalink
Fixes work_upload_edit_service_specs
Browse files Browse the repository at this point in the history
  • Loading branch information
hectorcorrea committed Mar 30, 2023
1 parent 79b0ebb commit ea233a0
Showing 1 changed file with 32 additions and 50 deletions.
82 changes: 32 additions & 50 deletions spec/services/work_upload_edit_service_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -47,20 +47,15 @@
end
let(:s3_data) { [s3_file1, s3_file2] }

# before do
# stub_request(:put, /#{bucket_url}/).to_return(status: 200)
# work.pre_curation_uploads.attach(uploaded_file)
# stub_request(:delete, attachment_url).to_return(status: 200)
# end

context "When no uploads changes are in the params" do
let(:params) { { "work_id" => "" }.with_indifferent_access }
context "When no uploads changes are requested" do
let(:added_files) { [] }
let(:deleted_files) { [] }

it "returns all existing files" do
fake_s3_service = stub_s3(data: s3_data, bucket_url: bucket_url)

upload_service = described_class.new(work, user)
updated_work = upload_service.update_precurated_file_list(params)
updated_work = upload_service.update_precurated_file_list(added_files, deleted_files)
filenames = updated_work.pre_curation_uploads_fast.map(&:filename)
expect(filenames).to eq(s3_data.map(&:filename))
expect(fake_s3_service).not_to have_received(:delete_s3_object)
Expand All @@ -69,18 +64,35 @@
end

context "When upload additions are in the params" do
# this is not possible at the moment, but should be
let(:added_files) { [uploaded_file3] }
let(:deleted_files) { [] }

it "returns all existing files plus the new one" do
fake_s3_service = stub_s3(bucket_url: bucket_url)
allow(fake_s3_service).to receive(:client_s3_files).and_return(s3_data, s3_data + [s3_file3])

upload_service = described_class.new(work, user)
updated_work = upload_service.update_precurated_file_list(added_files, deleted_files)
expect(updated_work.pre_curation_uploads_fast.map(&:filename).sort).to eq([s3_file1.key, s3_file2.key, s3_file3.key].sort)
expect(fake_s3_service).not_to have_received(:delete_s3_object)

# it logs the addition (and no delete)
activity_log = JSON.parse(work.work_activity.first.message)
expect(activity_log.find { |log| log["action"] == "added" && log["filename"] == s3_file3.filename_display }).not_to be nil
expect(activity_log.find { |log| log["action"] == "deleted" }).to be nil
end
end

context "When upload removals are in the params" do
let(:params) { { "work_id" => "", "deleted_uploads" => { s3_data[0].filename => "1" } }.with_indifferent_access }
context "When upload removals are requested" do
let(:added_files) { [] }
let(:deleted_files) { [s3_data[0].filename] }

it "returns all existing files except the deleted one" do
fake_s3_service = stub_s3(bucket_url: bucket_url)
allow(fake_s3_service).to receive(:client_s3_files).and_return(s3_data, [s3_file2])

upload_service = described_class.new(work, user)
updated_work = upload_service.update_precurated_file_list(params)
updated_work = upload_service.update_precurated_file_list(added_files, deleted_files)
expect(updated_work.pre_curation_uploads_fast.map(&:filename)).to eq([s3_file2.key])
expect(fake_s3_service).to have_received(:delete_s3_object).with(s3_file1.key).once

Expand All @@ -91,46 +103,15 @@
end
end

context "When upload replacements are in the params" do
let(:attachment_url) { "#{bucket_url}#{work.doi}/#{work.id}/us_covid_2020.csv" }
let(:s3_file4) do
FactoryBot.build(:s3_file, work: work,
filename: "#{work.doi}/#{work.id}/datacite_basic.xml",
last_modified: Time.parse("2022-04-21T18:30:07.000Z"),
size: 12_739,
checksum: "abc567")
end

let(:params) { { "work_id" => "", "replaced_uploads" => { work.pre_curation_uploads_fast.last.key => uploaded_file4 } }.with_indifferent_access }

it "replaces the correct file" do
fake_s3_service = stub_s3(bucket_url: bucket_url)
# TODO: why do I need the first set of files twice. Maybe a memo is not getting set properly?
allow(fake_s3_service).to receive(:client_s3_files).and_return([s3_file1, s3_file2, s3_file3], [s3_file1, s3_file2, s3_file3], [s3_file1, s3_file3, s3_file4])
upload_service = described_class.new(work, user)
updated_work = upload_service.update_precurated_file_list(params)
list = updated_work.pre_curation_uploads_fast

# remeber order of the files will be alphabetical
expect(list.map(&:filename)).to eq([s3_file4.key, s3_file3.key, s3_file1.key])
expect(fake_s3_service).to have_received(:delete_s3_object).with(s3_file2.key).once

# it logs the activity
activity_log = JSON.parse(work.work_activity.first.message)
expect(activity_log.find { |log| log["action"] == "deleted" && log["filename"] == s3_file2.key }).not_to be nil
expect(activity_log.find { |log| log["action"] == "added" && log["filename"] == "datacite_basic.xml" }).not_to be nil
end
end

context "When replacing all uploads is the params" do
let(:params) { { "work_id" => "", "pre_curation_uploads" => [uploaded_file2, uploaded_file3] }.with_indifferent_access }
let(:added_files) { [uploaded_file2, uploaded_file3] }
let(:deleted_files) { [s3_file1.key] }

it "replaces all the files" do
fake_s3_service = stub_s3(bucket_url: bucket_url)
# TODO: why do I need the first set of files twice. Maybe a memo is not getting set properly?
allow(fake_s3_service).to receive(:client_s3_files).and_return([s3_file1], [s3_file1], [s3_file2, s3_file3])
allow(fake_s3_service).to receive(:client_s3_files).and_return([s3_file1], [s3_file2, s3_file3])
upload_service = described_class.new(work, user)
updated_work = upload_service.update_precurated_file_list(params)
updated_work = upload_service.update_precurated_file_list(added_files, deleted_files)
list = updated_work.reload.pre_curation_uploads_fast
expect(list.map(&:filename)).to eq([s3_file3.key, s3_file2.key])
expect(fake_s3_service).to have_received(:delete_s3_object).with(s3_file1.key).once
Expand All @@ -144,14 +125,15 @@
end

context "When replacing all uploads in the params, but some overlap" do
let(:params) { { "work_id" => "", "pre_curation_uploads" => [uploaded_file2, uploaded_file3] }.with_indifferent_access }
let(:added_files) { [uploaded_file2, uploaded_file3] }
let(:deleted_files) { [s3_file1.key, s3_file2.key] }

it "replaces all the files" do
fake_s3_service = stub_s3(data: s3_data, bucket_url: bucket_url)

# upload the two new files
upload_service = described_class.new(work, user)
updated_work = upload_service.update_precurated_file_list(params)
updated_work = upload_service.update_precurated_file_list(added_files, deleted_files)
filenames = updated_work.reload.pre_curation_uploads.map { |attachment| attachment.filename.to_s }
expect(filenames).to eq([uploaded_file2.original_filename, uploaded_file3.original_filename])

Expand Down

0 comments on commit ea233a0

Please sign in to comment.