diff --git a/db/migrate/20190404132157_add_implicit_varchar_uuid_cast.rb b/db/migrate/20190404132157_add_implicit_varchar_uuid_cast.rb deleted file mode 100644 index 2c874a7f1..000000000 --- a/db/migrate/20190404132157_add_implicit_varchar_uuid_cast.rb +++ /dev/null @@ -1,25 +0,0 @@ -class AddImplicitVarcharUuidCast < ActiveRecord::Migration[5.2] - def up - if on_postgresql? - ActiveRecord::Base.connection.execute <<~SQL - CREATE CAST (varchar AS uuid) - WITH INOUT - AS IMPLICIT - SQL - end - end - - def down - if on_postgresql? - ActiveRecord::Base.connection.execute <<~SQL - DROP CAST (varchar AS uuid) - SQL - end - end - - private - - def on_postgresql? - ActiveRecord::Base.connection.adapter_name == 'PostgreSQL' - end -end diff --git a/extra/dynflow-debug.sh b/extra/dynflow-debug.sh index 184f29503..b7bd544cc 100755 --- a/extra/dynflow-debug.sh +++ b/extra/dynflow-debug.sh @@ -31,9 +31,9 @@ add_files /var/log/foreman/dynflow_executor*.output* # Foreman Tasks fast export (Postgresql only; for HTML version use foreman-rake foreman_tasks:export_tasks) if [ "$FOREMAN_DB_ADAPTER" == "postgresql" ]; then - export_csv "select dynflow_execution_plans.* from foreman_tasks_tasks join dynflow_execution_plans on (foreman_tasks_tasks.external_id = dynflow_execution_plans.uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_execution_plans.csv" - export_csv "select dynflow_actions.* from foreman_tasks_tasks join dynflow_actions on (foreman_tasks_tasks.external_id = dynflow_actions.execution_plan_uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_actions.csv" - export_csv "select dynflow_steps.* from foreman_tasks_tasks join dynflow_steps on (foreman_tasks_tasks.external_id = dynflow_steps.execution_plan_uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_steps.csv" + export_csv "select dynflow_execution_plans.* from foreman_tasks_tasks join dynflow_execution_plans on (foreman_tasks_tasks.external_id = dynflow_execution_plans.uuid::varchar) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_execution_plans.csv" + export_csv "select dynflow_actions.* from foreman_tasks_tasks join dynflow_actions on (foreman_tasks_tasks.external_id = dynflow_actions.execution_plan_uuid::varchar) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_actions.csv" + export_csv "select dynflow_steps.* from foreman_tasks_tasks join dynflow_steps on (foreman_tasks_tasks.external_id = dynflow_steps.execution_plan_uuid::varchar) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_steps.csv" export_csv "select * from dynflow_schema_info" "$DIR/dynflow_schema_info.csv" export_csv "select * from foreman_tasks_tasks limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/foreman_tasks_tasks.csv" fi diff --git a/lib/foreman_tasks/cleaner.rb b/lib/foreman_tasks/cleaner.rb index 89a0116fe..4832556ad 100644 --- a/lib/foreman_tasks/cleaner.rb +++ b/lib/foreman_tasks/cleaner.rb @@ -192,9 +192,15 @@ def with_batches(source, name) end def orphaned_dynflow_tasks + dynflow_plan_uuid_attribute = "dynflow_execution_plans.uuid" + if ActiveRecord::Base.connection.adapter_name == 'PostgreSQL' + # typecast the UUID attribute for Postgres + dynflow_plan_uuid_attribute += "::varchar" + end + db = ForemanTasks.dynflow.world.persistence.adapter.db db.fetch("select dynflow_execution_plans.uuid from dynflow_execution_plans left join "\ - "foreman_tasks_tasks on (dynflow_execution_plans.uuid = foreman_tasks_tasks.external_id) "\ + "foreman_tasks_tasks on (#{dynflow_plan_uuid_attribute} = foreman_tasks_tasks.external_id) "\ "where foreman_tasks_tasks.id IS NULL") end diff --git a/test/unit/cleaner_test.rb b/test/unit/cleaner_test.rb index c021f5284..7ea66274a 100644 --- a/test/unit/cleaner_test.rb +++ b/test/unit/cleaner_test.rb @@ -32,6 +32,27 @@ class TasksTest < ActiveSupport::TestCase .find_execution_plans(filters: { 'uuid' => tasks_to_keep.map(&:external_id) }).size.must_equal tasks_to_keep.size end + describe "#orphaned_dynflow_tasks" do + # We can't use transactional tests because we're using Sequel for the cleanup query + self.use_transactional_tests = false + before do + skip "Sqlite is running testing Dynlfow DB in memory" if ActiveRecord::Base.connection.adapter_name == 'SQLite' + @existing_task = FactoryBot.create(:dynflow_task, :user_create_task) + @missing_task = FactoryBot.create(:dynflow_task, :user_create_task) + @cleaner = ForemanTasks::Cleaner.new(filter: "id ^ (#{@existing_task.id}, #{@missing_task.id})") + @missing_task.destroy + end + + after do + @cleaner.delete if @cleaner + end + + it 'is able to find orphaned execution plans (without corresponding task object)' do + assert(@cleaner.orphaned_dynflow_tasks.any? { |t| t[:uuid] == @missing_task.external_id }) + assert_not(@cleaner.orphaned_dynflow_tasks.any? { |t| t[:uuid] == @existing_task.external_id }) + end + end + it 'deletes all tasks matching the filter when the time limit is not specified' do cleaner = ForemanTasks::Cleaner.new(:filter => 'label = "Actions::User::Create"') tasks_to_delete = [FactoryBot.create(:dynflow_task, :user_create_task),