Skip to content

Commit

Permalink
Merge pull request #310 from QutBioacoustics/fix-exclude-deleted-anno…
Browse files Browse the repository at this point in the history
…tations-from-csv-download

exclude deleted annotations from csv download
  • Loading branch information
atruskie committed Dec 11, 2016
2 parents bb785cf + 67ff093 commit 10f58b4
Show file tree
Hide file tree
Showing 4 changed files with 161 additions and 6 deletions.
4 changes: 4 additions & 0 deletions Gemfile
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,10 @@ gem 'jc-validates_timeliness', '~> 3.1.1'

gem 'enumerize', '~> 2.0'
gem 'uuidtools', '~> 2.1.5'

# Note: if other modifications are made to the default_scope
# there are manually constructed queries that need to be updated to match
# (search for ':deleted_at' to find the relevant places)
gem 'acts_as_paranoid', git: 'https://github.com/ActsAsParanoid/acts_as_paranoid.git', branch: :master, ref: 'c2db19554ddaedcac0a2b8d6a0563dea83c972c5'


Expand Down
3 changes: 2 additions & 1 deletion app/controllers/application_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,8 @@ def user_signed_in?
# @return [void]
def add_archived_at_header(model)
if model.respond_to?(:deleted_at) && !model.deleted_at.blank?
response.headers['X-Archived-At'] = model.deleted_at.httpdate # must be a string, can't just pass a Date or Time
# response header must be a string, can't just pass a Date or Time
response.headers['X-Archived-At'] = model.deleted_at.httpdate
end
end

Expand Down
32 changes: 31 additions & 1 deletion app/models/audio_event.rb
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,24 @@ def self.filter_settings

# Project audio events to the format for CSV download
# @return [Arel::Nodes::Node] audio event csv query
# @param [User] user
# @param [Project] project
# @param [Site] site
# @param [AudioRecording] audio_recording
# @param [Float] start_offset
# @param [Float] end_offset
# @param [String] timezone_name
# @return [Arel:SelectManager]
def self.csv_query(user, project, site, audio_recording, start_offset, end_offset, timezone_name)

# Note: if other modifications are made to the default_scope (like acts_as_paranoid does),
# manually constructed queries like this need to be updated to match
# (search for ':deleted_at' to find the relevant places)

# Note: tried using Arel from ActiveRecord
# e.g. AudioEvent.all.ast.cores[0].wheres
# but was more trouble to use than directly constructing Arel

audio_events = AudioEvent.arel_table
users = User.arel_table
audio_recordings = AudioRecording.arel_table
Expand Down Expand Up @@ -158,6 +174,7 @@ def self.csv_query(user, project, site, audio_recording, start_offset, end_offse
projects_aggregate =
projects_sites
.join(projects).on(projects[:id].eq(projects_sites[:project_id]))
.where(projects[:deleted_at].eq(nil))
.where(projects_sites[:site_id].eq(sites[:id]))
.project(projects_agg)

Expand Down Expand Up @@ -190,9 +207,12 @@ def self.csv_query(user, project, site, audio_recording, start_offset, end_offse

query =
audio_events
.where(audio_events[:deleted_at].eq(nil))
.join(users).on(users[:id].eq(audio_events[:creator_id]))
.join(audio_recordings).on(audio_recordings[:id].eq(audio_events[:audio_recording_id]))
.where(audio_recordings[:deleted_at].eq(nil))
.join(sites).on(sites[:id].eq(audio_recordings[:site_id]))
.where(sites[:deleted_at].eq(nil))
.order(audio_events[:id].desc)
.project(
audio_events[:id].as('audio_event_id'),
Expand Down Expand Up @@ -246,15 +266,25 @@ def self.csv_query(user, project, site, audio_recording, start_offset, end_offse
.as('library_url'),
)

# ensure deleted projects are not included
site_ids_for_live_project_ids = projects
.where(projects[:deleted_at].eq(nil))
.join(projects_sites).on(projects[:id].eq(projects_sites[:project_id]))
.where(sites[:id].eq(projects_sites[:site_id]))
.project(sites[:id]).distinct

query = query.where(sites[:id].in(site_ids_for_live_project_ids))


if user
query = query.where(users[:id].eq(user.id))
end

if project

site_ids = sites
.join(projects_sites).on(sites[:id].eq(projects_sites[:site_id]))
.join(projects).on(projects[:id].eq(projects_sites[:project_id]))
.where(projects[:deleted_at].eq(nil))
.where(projects[:id].eq(project.id))
.project(sites[:id]).distinct

Expand Down
128 changes: 124 additions & 4 deletions spec/models/audio_event_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,9 @@
to_char("audio_events"."created_at" + INTERVAL '0 seconds', 'YYYY-MM-DD') AS event_created_at_date_utc_00_00,
to_char("audio_events"."created_at" + INTERVAL '0 seconds', 'HH24:MI:SS') AS event_created_at_time_utc_00_00,
to_char("audio_events"."created_at" + INTERVAL '0 seconds', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS event_created_at_datetime_utc_00_00,
(SELECT string_agg(CAST("projects"."id" as varchar) || ':' || "projects"."name", '|') FROM "projects_sites" INNER JOIN "projects" ON "projects"."id" = "projects_sites"."project_id" WHERE "projects_sites"."site_id" = "sites"."id") projects,
(SELECT string_agg(CAST("projects"."id" as varchar) || ':' || "projects"."name", '|') FROM "projects_sites" INNER JOIN "projects" ON "projects"."id" = "projects_sites"."project_id" WHERE
"projects"."deleted_at" IS NULL AND
"projects_sites"."site_id" = "sites"."id") projects,
"sites"."id" AS site_id, "sites"."name" AS site_name,
to_char("audio_recordings"."recorded_date" + CAST("audio_events"."start_time_seconds" || ' seconds' as interval) + INTERVAL '0 seconds', 'YYYY-MM-DD') AS event_start_date_utc_00_00,
to_char("audio_recordings"."recorded_date" + CAST("audio_events"."start_time_seconds" || ' seconds' as interval) + INTERVAL '0 seconds', 'HH24:MI:SS') AS event_start_time_utc_00_00,
Expand All @@ -84,10 +86,17 @@
INNER JOIN "users" ON "users"."id" = "audio_events"."creator_id"
INNER JOIN "audio_recordings" ON "audio_recordings"."id" = "audio_events"."audio_recording_id"
INNER JOIN "sites" ON "sites"."id" = "audio_recordings"."site_id"
WHERE
"audio_events"."deleted_at" IS NULL AND
"audio_recordings"."deleted_at" IS NULL AND
"sites"."deleted_at" IS NULL AND
"sites"."id" IN
(SELECT DISTINCT "sites"."id" FROM "projects" INNER JOIN "projects_sites" ON "projects"."id" = "projects_sites"."project_id"
WHERE "projects"."deleted_at" IS NULL AND "sites"."id" = "projects_sites"."site_id")
ORDER BY "audio_events"."id" DESC
eos

expect(query.to_sql).to eq(sql.gsub("\n", ' ').trim(' ', ''))
expect(query.to_sql.gsub("\n", ' ').gsub(' ', '')).to eq(sql.gsub("\n", ' ').gsub(' ', ''))

end

Expand All @@ -106,7 +115,9 @@
to_char("audio_events"."created_at" + INTERVAL '36000 seconds', 'YYYY-MM-DD') AS event_created_at_date_brisbane_10_00,
to_char("audio_events"."created_at" + INTERVAL '36000 seconds', 'HH24:MI:SS') AS event_created_at_time_brisbane_10_00,
to_char("audio_events"."created_at" + INTERVAL '36000 seconds', 'YYYY-MM-DD"T"HH24:MI:SS"+10:00"') AS event_created_at_datetime_brisbane_10_00,
(SELECT string_agg(CAST("projects"."id" as varchar) || ':' || "projects"."name", '|') FROM "projects_sites" INNER JOIN "projects" ON "projects"."id" = "projects_sites"."project_id" WHERE "projects_sites"."site_id" = "sites"."id") projects,
(SELECT string_agg(CAST("projects"."id" as varchar) || ':' || "projects"."name", '|') FROM "projects_sites" INNER JOIN "projects" ON "projects"."id" = "projects_sites"."project_id" WHERE
"projects"."deleted_at" IS NULL AND
"projects_sites"."site_id" = "sites"."id") projects,
"sites"."id" AS site_id, "sites"."name" AS site_name,
to_char("audio_recordings"."recorded_date" + CAST("audio_events"."start_time_seconds" || ' seconds' as interval) + INTERVAL '36000 seconds', 'YYYY-MM-DD') AS event_start_date_brisbane_10_00,
to_char("audio_recordings"."recorded_date" + CAST("audio_events"."start_time_seconds" || ' seconds' as interval) + INTERVAL '36000 seconds', 'HH24:MI:SS') AS event_start_time_brisbane_10_00,
Expand All @@ -130,10 +141,119 @@
INNER JOIN "users" ON "users"."id" = "audio_events"."creator_id"
INNER JOIN "audio_recordings" ON "audio_recordings"."id" = "audio_events"."audio_recording_id"
INNER JOIN "sites" ON "sites"."id" = "audio_recordings"."site_id"
WHERE
"audio_events"."deleted_at" IS NULL AND
"audio_recordings"."deleted_at" IS NULL AND
"sites"."deleted_at" IS NULL AND
"sites"."id" IN
(SELECT DISTINCT "sites"."id" FROM "projects" INNER JOIN "projects_sites" ON "projects"."id" = "projects_sites"."project_id"
WHERE "projects"."deleted_at" IS NULL AND "sites"."id" = "projects_sites"."site_id")
ORDER BY "audio_events"."id" DESC
eos

expect(query.to_sql).to eq(sql.gsub("\n", ' ').trim(' ', ''))
expect(query.to_sql.gsub("\n", ' ').gsub(' ', '')).to eq(sql.gsub("\n", ' ').gsub(' ', ''))

end

it 'excludes deleted projects, sites, audio_recordings, and audio_events from annotation download' do
user = FactoryGirl.create(:user, user_name: 'owner user checking excluding deleted items in annotation download')

# create combinations of deleted and not deleted for project, site, audio_recording, audio_event
expected_audio_recording = nil
(0..1).each do |project_n|
project = FactoryGirl.create(:project, creator: user)
project.destroy if project_n == 1

(0..1).each do |site_n|
site = FactoryGirl.create(:site, :with_lat_long, creator: user)
site.projects << project
site.save!
site.destroy if site_n == 1

(0..1).each do |audio_recording_n|
audio_recording = FactoryGirl.create(:audio_recording, :status_ready, creator: user, uploader: user, site: site)
audio_recording.destroy if audio_recording_n == 1

(0..1).each do |audio_event_n|
audio_event = FactoryGirl.create(:audio_event, creator: user, audio_recording: audio_recording)
audio_event.destroy if audio_event_n == 1
if project_n == 0 && site_n == 0 && audio_recording_n == 0 && audio_event_n == 0
expected_audio_recording = audio_event
end

end
end
end
end

# check that AudioEvent.csv_query returns only non-deleted items
query = AudioEvent.csv_query(nil, nil, nil, nil, nil, nil, nil)
query_sql = query.to_sql
formatted_annotations = AudioEvent.connection.select_all(query_sql)

expect(Project.with_deleted.count).to eq(2)
expect(Project.count).to eq(1)
expect(Project.only_deleted.count).to eq(1)

expect(Site.with_deleted.count).to eq(4)
expect(Site.count).to eq(2)
expect(Site.only_deleted.count).to eq(2)

expect(AudioRecording.with_deleted.count).to eq(8)
expect(AudioRecording.count).to eq(4)
expect(AudioRecording.only_deleted.count).to eq(4)

expect(AudioEvent.with_deleted.count).to eq(16)
expect(AudioEvent.count).to eq(8)
expect(AudioEvent.only_deleted.count).to eq(8)

expected_audio_events = [expected_audio_recording.id.to_s]
actual_audio_event_ids = formatted_annotations.map { |item| item['audio_event_id'] }

expect(actual_audio_event_ids).to eq(expected_audio_events)

end

it 'ensures only one instance of each audio event in annotation download' do
user = FactoryGirl.create(:user, user_name: 'owner user checking audio event uniqueness in annotation download')

# create 2 of everything for project, site, audio_recording, audio_event
(0..1).each do
project = FactoryGirl.create(:project, creator: user)

(0..1).each do
site = FactoryGirl.create(:site, :with_lat_long, creator: user)
site.projects << project
site.save!

(0..1).each do
audio_recording = FactoryGirl.create(:audio_recording, :status_ready, creator: user, uploader: user, site: site)

(0..1).each do
FactoryGirl.create(:audio_event, creator: user, audio_recording: audio_recording)

end
end
end
end

# check that AudioEvent.csv_query returns unique audio events
query = AudioEvent.csv_query(nil, nil, nil, nil, nil, nil, nil)
query_sql = query.to_sql
formatted_annotations = AudioEvent.connection.select_all(query_sql)

expect(Project.with_deleted.count).to eq(2)
expect(Site.with_deleted.count).to eq(2 * 2)
expect(AudioRecording.with_deleted.count).to eq(2 * 2 * 2)
expect(AudioEvent.with_deleted.count).to eq(2 * 2 * 2 * 2)

actual_audio_event_ids = formatted_annotations.map { |item| item['audio_event_id'] }

expect(actual_audio_event_ids.count).to eq(2 * 2 * 2 * 2)

expected_audio_event_ids = actual_audio_event_ids.uniq

expect(actual_audio_event_ids).to eq(expected_audio_event_ids)

end

Expand Down

0 comments on commit 10f58b4

Please sign in to comment.