Skip to content

Commit

Permalink
Refs #22602 - explicit instead of implicit casting UUID casting
Browse files Browse the repository at this point in the history
In production setup, we were getting:

     create cast must be owner of type character varying

The problem is this casting seems to be global, and we might not want
to do that for every case, and we don't have permissions for that.
  • Loading branch information
iNecas committed May 7, 2019
1 parent 94a0733 commit 23590d2
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 29 deletions.
25 changes: 0 additions & 25 deletions db/migrate/20190404132157_add_implicit_varchar_uuid_cast.rb

This file was deleted.

6 changes: 3 additions & 3 deletions extra/dynflow-debug.sh
Expand Up @@ -31,9 +31,9 @@ add_files /var/log/foreman/dynflow_executor*.output*
# Foreman Tasks fast export (Postgresql only; for HTML version use foreman-rake foreman_tasks:export_tasks)

if [ "$FOREMAN_DB_ADAPTER" == "postgresql" ]; then
export_csv "select dynflow_execution_plans.* from foreman_tasks_tasks join dynflow_execution_plans on (foreman_tasks_tasks.external_id = dynflow_execution_plans.uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_execution_plans.csv"
export_csv "select dynflow_actions.* from foreman_tasks_tasks join dynflow_actions on (foreman_tasks_tasks.external_id = dynflow_actions.execution_plan_uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_actions.csv"
export_csv "select dynflow_steps.* from foreman_tasks_tasks join dynflow_steps on (foreman_tasks_tasks.external_id = dynflow_steps.execution_plan_uuid) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_steps.csv"
export_csv "select dynflow_execution_plans.* from foreman_tasks_tasks join dynflow_execution_plans on (foreman_tasks_tasks.external_id = dynflow_execution_plans.uuid::varchar) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_execution_plans.csv"
export_csv "select dynflow_actions.* from foreman_tasks_tasks join dynflow_actions on (foreman_tasks_tasks.external_id = dynflow_actions.execution_plan_uuid::varchar) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_actions.csv"
export_csv "select dynflow_steps.* from foreman_tasks_tasks join dynflow_steps on (foreman_tasks_tasks.external_id = dynflow_steps.execution_plan_uuid::varchar) where foreman_tasks_tasks.started_at > 'now'::timestamp - '${DYNFLOW_EXPORT_MONTHS:-1} months'::interval limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/dynflow_steps.csv"
export_csv "select * from dynflow_schema_info" "$DIR/dynflow_schema_info.csv"
export_csv "select * from foreman_tasks_tasks limit ${DYNFLOW_EXPORT_LIMIT:-100000}" "$DIR/foreman_tasks_tasks.csv"
fi
Expand Down
8 changes: 7 additions & 1 deletion lib/foreman_tasks/cleaner.rb
Expand Up @@ -192,9 +192,15 @@ def with_batches(source, name)
end

def orphaned_dynflow_tasks
dynflow_plan_uuid_attribute = "dynflow_execution_plans.uuid"
if ActiveRecord::Base.connection.adapter_name == 'PostgreSQL'
# typecast the UUID attribute for Postgres
dynflow_plan_uuid_attribute += "::varchar"
end

db = ForemanTasks.dynflow.world.persistence.adapter.db
db.fetch("select dynflow_execution_plans.uuid from dynflow_execution_plans left join "\
"foreman_tasks_tasks on (dynflow_execution_plans.uuid = foreman_tasks_tasks.external_id) "\
"foreman_tasks_tasks on (#{dynflow_plan_uuid_attribute} = foreman_tasks_tasks.external_id) "\
"where foreman_tasks_tasks.id IS NULL")
end

Expand Down
21 changes: 21 additions & 0 deletions test/unit/cleaner_test.rb
Expand Up @@ -32,6 +32,27 @@ class TasksTest < ActiveSupport::TestCase
.find_execution_plans(filters: { 'uuid' => tasks_to_keep.map(&:external_id) }).size.must_equal tasks_to_keep.size
end

describe "#orphaned_dynflow_tasks" do
# We can't use transactional tests because we're using Sequel for the cleanup query
self.use_transactional_tests = false
before do
skip "Sqlite is running testing Dynlfow DB in memory" if ActiveRecord::Base.connection.adapter_name == 'SQLite'
@existing_task = FactoryBot.create(:dynflow_task, :user_create_task)
@missing_task = FactoryBot.create(:dynflow_task, :user_create_task)
@cleaner = ForemanTasks::Cleaner.new(filter: "id ^ (#{@existing_task.id}, #{@missing_task.id})")
@missing_task.destroy
end

after do
@cleaner.delete if @cleaner
end

it 'is able to find orphaned execution plans (without corresponding task object)' do
assert(@cleaner.orphaned_dynflow_tasks.any? { |t| t[:uuid] == @missing_task.external_id })
assert_not(@cleaner.orphaned_dynflow_tasks.any? { |t| t[:uuid] == @existing_task.external_id })
end
end

it 'deletes all tasks matching the filter when the time limit is not specified' do
cleaner = ForemanTasks::Cleaner.new(:filter => 'label = "Actions::User::Create"')
tasks_to_delete = [FactoryBot.create(:dynflow_task, :user_create_task),
Expand Down

0 comments on commit 23590d2

Please sign in to comment.