Skip to content

Commit

Permalink
DEV: Switch to new ExportUserArchive job
Browse files Browse the repository at this point in the history
We now use the newly created job class from the previous commit.
  • Loading branch information
riking committed Aug 28, 2020
1 parent a8560d7 commit 7bf199b
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 123 deletions.
7 changes: 6 additions & 1 deletion app/controllers/export_csv_controller.rb
Expand Up @@ -6,7 +6,12 @@ class ExportCsvController < ApplicationController

def export_entity
guardian.ensure_can_export_entity!(export_params[:entity])
Jobs.enqueue(:export_csv_file, entity: export_params[:entity], user_id: current_user.id, args: export_params[:args])

if export_params[:entity] == 'user_archive'
Jobs.enqueue(:export_user_archive, user_id: current_user.id, args: export_params[:args])
else
Jobs.enqueue(:export_csv_file, entity: export_params[:entity], user_id: current_user.id, args: export_params[:args])
end
StaffActionLogger.new(current_user).log_entity_export(export_params[:entity])
render json: success_json
rescue Discourse::InvalidAccess
Expand Down
79 changes: 1 addition & 78 deletions app/jobs/regular/export_csv_file.rb
Expand Up @@ -12,8 +12,6 @@ class ExportCsvFile < ::Jobs::Base
attr_accessor :entity

HEADER_ATTRS_FOR ||= HashWithIndifferentAccess.new(
user_archive: ['topic_title', 'categories', 'is_pm', 'post', 'like_count', 'reply_count', 'url', 'created_at'],
user_archive_profile: ['location', 'website', 'bio', 'views'],
user_list: ['id', 'name', 'username', 'email', 'title', 'created_at', 'last_seen_at', 'last_posted_at', 'last_emailed_at', 'trust_level', 'approved', 'suspended_at', 'suspended_till', 'silenced_till', 'active', 'admin', 'moderator', 'ip_address', 'staged', 'secondary_emails'],
user_stats: ['topics_entered', 'posts_read_count', 'time_read', 'topic_count', 'post_count', 'likes_given', 'likes_received'],
user_profile: ['location', 'website', 'views'],
Expand All @@ -31,17 +29,14 @@ def execute(args)
@current_user = User.find_by(id: args[:user_id])

entities = [{ name: @entity }]
entities << { name: "user_archive_profile" } if @entity === "user_archive"

entities.each do |entity|
entity[:method] = :"#{entity[:name]}_export"
raise Discourse::InvalidParameters.new(:entity) unless respond_to?(entity[:method])

@timestamp ||= Time.now.strftime("%y%m%d-%H%M%S")
entity[:filename] =
if entity[:name] == "user_archive" || entity[:name] === "user_archive_profile"
"#{entity[:name].dasherize}-#{@current_user.username}-#{@timestamp}"
elsif entity[:name] == "report" && @extra[:name].present?
if entity[:name] == "report" && @extra[:name].present?
"#{@extra[:name].dasherize}-#{@timestamp}"
else
"#{entity[:name].dasherize}-#{@timestamp}"
Expand Down Expand Up @@ -108,30 +103,6 @@ def execute(args)
end
end

def user_archive_export
return enum_for(:user_archive_export) unless block_given?

Post.includes(topic: :category)
.where(user_id: @current_user.id)
.select(:topic_id, :post_number, :raw, :like_count, :reply_count, :created_at)
.order(:created_at)
.with_deleted
.each do |user_archive|
yield get_user_archive_fields(user_archive)
end
end

def user_archive_profile_export
return enum_for(:user_archive_profile_export) unless block_given?

UserProfile
.where(user_id: @current_user.id)
.select(:location, :website, :bio_raw, :views)
.each do |user_profile|
yield get_user_archive_profile_fields(user_profile)
end
end

def user_list_export
return enum_for(:user_list_export) unless block_given?

Expand Down Expand Up @@ -341,54 +312,6 @@ def add_group_names(user, user_info_array)
user_info_array
end

def get_user_archive_fields(user_archive)
user_archive_array = []
topic_data = user_archive.topic
user_archive = user_archive.as_json
topic_data = Topic.with_deleted.find_by(id: user_archive['topic_id']) if topic_data.nil?
return user_archive_array if topic_data.nil?

all_categories = Category.all.to_h { |category| [category.id, category] }

categories = "-"
if topic_data.category_id && category = all_categories[topic_data.category_id]
categories = [category.name]
while category.parent_category_id && category = all_categories[category.parent_category_id]
categories << category.name
end
categories = categories.reverse.join("|")
end

is_pm = topic_data.archetype == "private_message" ? I18n.t("csv_export.boolean_yes") : I18n.t("csv_export.boolean_no")
url = "#{Discourse.base_url}/t/#{topic_data.slug}/#{topic_data.id}/#{user_archive['post_number']}"

topic_hash = { "post" => user_archive['raw'], "topic_title" => topic_data.title, "categories" => categories, "is_pm" => is_pm, "url" => url }
user_archive.merge!(topic_hash)

HEADER_ATTRS_FOR['user_archive'].each do |attr|
user_archive_array.push(user_archive[attr])
end

user_archive_array
end

def get_user_archive_profile_fields(user_profile)
user_archive_profile = []

HEADER_ATTRS_FOR['user_archive_profile'].each do |attr|
data =
if attr == 'bio'
user_profile.attributes['bio_raw']
else
user_profile.attributes[attr]
end

user_archive_profile.push(data)
end

user_archive_profile
end

def get_staff_action_fields(staff_action)
staff_action_array = []

Expand Down
1 change: 1 addition & 0 deletions app/jobs/regular/export_user_archive.rb
Expand Up @@ -7,6 +7,7 @@ class ExportUserArchive < ::Jobs::Base
sidekiq_options retry: false

attr_accessor :current_user
# note: contents provided entirely by user
attr_accessor :extra

COMPONENTS ||= %w(
Expand Down
51 changes: 12 additions & 39 deletions spec/jobs/export_csv_file_spec.rb
Expand Up @@ -5,26 +5,30 @@
describe Jobs::ExportCsvFile do

context '#execute' do
fab!(:user) { Fabricate(:user, username: "john_doe") }
let(:other_user) { Fabricate(:user) }
let(:admin) { Fabricate(:admin) }
let(:action_log) { StaffActionLogger.new(admin).log_revoke_moderation(other_user) }

it 'raises an error when the entity is missing' do
expect { Jobs::ExportCsvFile.new.execute(user_id: user.id) }.to raise_error(Discourse::InvalidParameters)
expect { Jobs::ExportCsvFile.new.execute(user_id: admin.id) }.to raise_error(Discourse::InvalidParameters)
end

it 'works' do
action_log

begin
expect do
Jobs::ExportCsvFile.new.execute(
user_id: user.id,
entity: "user_archive"
user_id: admin.id,
entity: "staff_action"
)
end.to change { Upload.count }.by(1)

system_message = user.topics_allowed.last
system_message = admin.topics_allowed.last

expect(system_message.title).to eq(I18n.t(
"system_messages.csv_export_succeeded.subject_template",
export_title: "User Archive"
export_title: "Staff Action"
))

upload = system_message.first_post.uploads.first
Expand All @@ -42,44 +46,13 @@
zip_file.each { |entry| files << entry.name }
end

expect(files.size).to eq(2)
expect(files.size).to eq(1)
ensure
user.uploads.each(&:destroy!)
admin.uploads.each(&:destroy!)
end
end
end

context '#user_archive_export' do
let(:user) { Fabricate(:user) }

let(:category) { Fabricate(:category_with_definition) }
let(:subcategory) { Fabricate(:category_with_definition, parent_category_id: category.id) }
let(:subsubcategory) { Fabricate(:category_with_definition, parent_category_id: subcategory.id) }

it 'works with sub-sub-categories' do
SiteSetting.max_category_nesting = 3
topic = Fabricate(:topic, category: subsubcategory)
post = Fabricate(:post, topic: topic, user: user)

exporter = Jobs::ExportCsvFile.new
exporter.current_user = User.find_by(id: user.id)

rows = []
exporter.user_archive_export { |row| rows << row }

expect(rows.length).to eq(1)

first_row = Jobs::ExportCsvFile::HEADER_ATTRS_FOR['user_archive'].zip(rows[0]).to_h

expect(first_row["topic_title"]).to eq(topic.title)
expect(first_row["categories"]).to eq("#{category.name}|#{subcategory.name}|#{subsubcategory.name}")
expect(first_row["is_pm"]).to eq(I18n.t("csv_export.boolean_no"))
expect(first_row["post"]).to eq(post.raw)
expect(first_row["like_count"]).to eq(0)
expect(first_row["reply_count"]).to eq(0)
end
end

context '.report_export' do

let(:user) { Fabricate(:admin) }
Expand Down
9 changes: 4 additions & 5 deletions spec/requests/export_csv_controller_spec.rb
Expand Up @@ -8,21 +8,20 @@
before { sign_in(user) }

describe "#export_entity" do
it "enqueues export job" do
it "enqueues user archive job" do
post "/export_csv/export_entity.json", params: { entity: "user_archive" }
expect(response.status).to eq(200)
expect(Jobs::ExportCsvFile.jobs.size).to eq(1)
expect(Jobs::ExportUserArchive.jobs.size).to eq(1)

job_data = Jobs::ExportCsvFile.jobs.first["args"].first
expect(job_data["entity"]).to eq("user_archive")
job_data = Jobs::ExportUserArchive.jobs.first["args"].first
expect(job_data["user_id"]).to eq(user.id)
end

it "should not enqueue export job if rate limit is reached" do
UserExport.create(file_name: "user-archive-codinghorror-150116-003249", user_id: user.id)
post "/export_csv/export_entity.json", params: { entity: "user_archive" }
expect(response.status).to eq(422)
expect(Jobs::ExportCsvFile.jobs.size).to eq(0)
expect(Jobs::ExportUserArchive.jobs.size).to eq(0)
end

it "returns 404 when normal user tries to export admin entity" do
Expand Down

0 comments on commit 7bf199b

Please sign in to comment.