Skip to content

Commit

Permalink
Merge pull request #411 from alphagov/switch-global-export-to-s3
Browse files Browse the repository at this point in the history
Upload exported CSV file to S3 rather than attaching it
  • Loading branch information
benjamineskola committed Feb 4, 2020
2 parents f648ba7 + 3b3dcac commit 72726af
Show file tree
Hide file tree
Showing 7 changed files with 88 additions and 23 deletions.
7 changes: 3 additions & 4 deletions app/mailers/global_export_notification.rb
@@ -1,10 +1,9 @@
class GlobalExportNotification < ApplicationMailer
def notification_email(notification_email, filename, csv_contents)
attachments[filename] = csv_contents

def notification_email(notification_email, url)
@url = url
mail(
to: notification_email,
subject: "[GOV.UK Feedback Explorer] Your global export is attached",
subject: "[GOV.UK Feedback Explorer] Your global export is ready",
)
end
end
@@ -1,5 +1,7 @@
Hi,

Your CSV file is attached.
Use the link below to download your CSV file from Feedback Explorer.

<%= @url %>

- Feedex
20 changes: 3 additions & 17 deletions app/workers/generate_feedback_csv_worker.rb
@@ -1,3 +1,5 @@
require "s3_file_uploader"

class GenerateFeedbackCsvWorker
include Sidekiq::Worker

Expand All @@ -10,26 +12,10 @@ def perform(*args)
end

csv = feedback_export_request.generate_csv
self.class.save_file_to_s3(feedback_export_request.filename, csv)
S3FileUploader.save_file_to_s3(feedback_export_request.filename, csv)

feedback_export_request.touch(:generated_at)

ExportNotification.notification_email(feedback_export_request.notification_email, feedback_export_request.url).deliver_now
end

def self.save_file_to_s3(filename, csv)
connection = Fog::Storage.new(
provider: "AWS",
region: ENV["AWS_REGION"],
aws_access_key_id: ENV["AWS_ACCESS_KEY_ID"],
aws_secret_access_key: ENV["AWS_SECRET_ACCESS_KEY"],
)

directory = connection.directories.get(ENV["AWS_S3_BUCKET_NAME"])

directory.files.create(
key: filename,
body: csv,
)
end
end
6 changes: 5 additions & 1 deletion app/workers/generate_global_export_csv_worker.rb
@@ -1,3 +1,5 @@
require "s3_file_uploader"

class GenerateGlobalExportCsvWorker
include Sidekiq::Worker

Expand All @@ -8,6 +10,8 @@ def perform(export_params)
export_params["exclude_spam"],
).call

GlobalExportNotification.notification_email(export_params["notification_email"], filename, contents).deliver_now
s3_file = S3FileUploader.save_file_to_s3(filename, contents)

GlobalExportNotification.notification_email(export_params["notification_email"], s3_file.key).deliver_now
end
end
17 changes: 17 additions & 0 deletions lib/s3_file_uploader.rb
@@ -0,0 +1,17 @@
class S3FileUploader
def self.save_file_to_s3(filename, csv)
connection = Fog::Storage.new(
provider: "AWS",
region: ENV["AWS_REGION"],
aws_access_key_id: ENV["AWS_ACCESS_KEY_ID"],
aws_secret_access_key: ENV["AWS_SECRET_ACCESS_KEY"],
)

directory = connection.directories.get(ENV["AWS_S3_BUCKET_NAME"])

directory.files.create(
key: filename,
body: csv,
)
end
end
19 changes: 19 additions & 0 deletions spec/mailers/global_export_notification_spec.rb
@@ -0,0 +1,19 @@
require "rails_helper"

RSpec.describe GlobalExportNotification, type: :mailer do
describe "notification_email" do
subject(:mail) { GlobalExportNotification.notification_email("foo@example.com", "http://www.example.com/foo.csv") }

it "is sent from the no reply address" do
expect(mail.from).to eq ["inside-government@digital.cabinet-office.gov.uk"]
end

it "is sent to the correct recipient" do
expect(mail.to).to eq ["foo@example.com"]
end

it "contains the URL" do
expect(mail.body).to include "http://www.example.com/foo.csv"
end
end
end
38 changes: 38 additions & 0 deletions spec/workers/generate_global_export_csv_worker_spec.rb
@@ -0,0 +1,38 @@
require "rails_helper"
require "date"

describe GenerateGlobalExportCsvWorker, type: :worker do
subject(:worker) { described_class.new }
before do
Fog.mock!
ENV["AWS_REGION"] = "eu-west-1"
ENV["AWS_ACCESS_KEY_ID"] = "test"
ENV["AWS_SECRET_ACCESS_KEY"] = "test"
ENV["AWS_S3_BUCKET_NAME"] = "test-bucket"

# Create an S3 bucket so the code being tested can find it
connection = Fog::Storage.new(
provider: "AWS",
region: ENV["AWS_REGION"],
aws_access_key_id: ENV["AWS_ACCESS_KEY_ID"],
aws_secret_access_key: ENV["AWS_SECRET_ACCESS_KEY"],
)
@directory = connection.directories.get(ENV["AWS_S3_BUCKET_NAME"]) || connection.directories.create(key: ENV["AWS_S3_BUCKET_NAME"])
end

it "has the expected filename" do
from_date = "2019-01-01"
to_date = "2019-12-31"
described_class.new.perform(
"from_date" => from_date,
"to_date" => to_date,
"notification_email" => "inside-government@digital.cabinet-office.gov.uk",
)

file = @directory.files.get("feedex_#{from_date}T00:00:00Z_#{to_date}T23:59:59Z.csv")
expect(file).not_to be nil
rows = file.body.split("\n")
expect(rows.count).to eq 1
expect(rows.first).to eq "date,report_count"
end
end

0 comments on commit 72726af

Please sign in to comment.