Skip to content

Commit

Permalink
fix(migration): migrate queue parameters
Browse files Browse the repository at this point in the history
In #10840, some queue parameter types were changed
so that values were more restricted than in the original schema.  This commit adds
a data migration so that any values that would not be valid according to the new
schema are updated.  Testing this migration is done with an integration test rather
than using the upgrade tests logic because the latter does not currently support
testing upgrades in a way that'd be required for this change.
  • Loading branch information
hanshuebner committed Jun 5, 2023
1 parent 750ba97 commit 331dfdc
Show file tree
Hide file tree
Showing 5 changed files with 117 additions and 0 deletions.
6 changes: 6 additions & 0 deletions kong/db/migrations/core/020_330_to_340.lua
@@ -0,0 +1,6 @@
local queue_parameter_migration_340 = require('kong.db.migrations.core.queue_parameter_migration_340')
return {
postgres = {
up = queue_parameter_migration_340,
}
}
1 change: 1 addition & 0 deletions kong/db/migrations/core/init.lua
Expand Up @@ -17,4 +17,5 @@ return {
"017_300_to_310",
"018_310_to_320",
"019_320_to_330",
"020_330_to_340",
}
23 changes: 23 additions & 0 deletions kong/db/migrations/core/queue_parameter_migration_340.lua
@@ -0,0 +1,23 @@
-- This data migration updates queue parameters so that they conform to the changes made in https://github.com/Kong/kong/pull/10840
-- The migration lives in a separate file so that it can be tested easily
return [[
update plugins
set config = jsonb_set(config, '{queue, max_batch_size}', to_jsonb(round((config->'queue'->>'max_batch_size')::numeric)))
where config->'queue'->>'max_batch_size' is not null;
update plugins
set config = jsonb_set(config, '{queue, max_entries}', to_jsonb(round((config->'queue'->>'max_entries')::numeric)))
where config->'queue'->>'max_entries' is not null;
update plugins
set config = jsonb_set(config, '{queue, max_bytes}', to_jsonb(round((config->'queue'->>'max_bytes')::numeric)))
where config->'queue'->>'max_bytes' is not null;
update plugins
set config = jsonb_set(config, '{queue, initial_retry_delay}', to_jsonb(least(greatest((config->'queue'->>'initial_retry_delay')::numeric, 0.001), 1000000)))
where config->'queue'->>'initial_retry_delay' is not null;
update plugins
set config = jsonb_set(config, '{queue, max_retry_delay}', to_jsonb(least(greatest((config->'queue'->>'max_retry_delay')::numeric, 0.001), 1000000)))
where config->'queue'->>'max_retry_delay' is not null;
]]
82 changes: 82 additions & 0 deletions spec/03-plugins/02-queue_parameter_migration_340_spec.lua
@@ -0,0 +1,82 @@
local cjson = require "cjson"
local tablex = require "pl.tablex"
local helpers = require "spec.helpers"
local Schema = require "kong.db.schema"
local queue_schema = Schema.new(require "kong.tools.queue_schema")
local queue_parameter_migration_340 = require "kong.db.migrations.core.queue_parameter_migration_340"

describe("Kong Gateway 3.4 queue parameter migration", function()
local db

local function load_queue_config()
local rows, err = db.connector:query([[SELECT config->>'queue' AS queue_config FROM plugins]])
assert(rows, "SQL query for queue config failed: " .. (err or ""))
return cjson.decode(rows[1].queue_config)
end

local sane_queue_config

lazy_setup(function()
-- Create a service to make sure that our database is initialized properly.
local bp
bp, db = helpers.get_db_utils()

db:truncate()

bp.plugins:insert{
name = "http-log",
config = {
http_endpoint = "http://example.com",
}
}

sane_queue_config = load_queue_config()
end)

local function update_plugin_queue_config(queue_config)
local query = string.format([[
UPDATE plugins
SET config = jsonb_set(config, '{queue}', '%s'::jsonb)
WHERE config->'queue' IS NOT NULL]],
cjson.encode(queue_config))
local ok, err = db.connector:query(query)
assert(ok, "SQL query " .. query .. " failed: " .. (err or ""))
end

local function validate_queue_config()
local queue_config = load_queue_config()
assert(queue_schema:validate(queue_config))
return queue_config
end

local function run_migration()
local ok, err = db.connector:query(queue_parameter_migration_340)
assert(ok, "Running migration failed: " .. (err or ""))
end

local function test_one_parameter(key, value, migrated_value)
local queue_config = tablex.deepcopy(sane_queue_config)
queue_config[key] = value
update_plugin_queue_config(queue_config)
run_migration()
local migrated_queue_config = validate_queue_config()
assert.equals(migrated_value, migrated_queue_config[key])
end

it("parameters that were previously unrestricted migrated to conform to the restricions", function()
test_one_parameter("max_batch_size", 120, 120)
test_one_parameter("max_batch_size", 120.20, 120)
test_one_parameter("max_entries", 203, 203)
test_one_parameter("max_entries", 203.20, 203)
test_one_parameter("max_bytes", 304, 304)
test_one_parameter("max_bytes", 303.9, 304)
test_one_parameter("initial_retry_delay", -2000, 0.001)
test_one_parameter("initial_retry_delay", 0.001, 0.001)
test_one_parameter("initial_retry_delay", 1000000, 1000000)
test_one_parameter("initial_retry_delay", 39999999, 1000000)
test_one_parameter("max_retry_delay", -2000, 0.001)
test_one_parameter("max_retry_delay", 0.001, 0.001)
test_one_parameter("max_retry_delay", 1000000, 1000000)
test_one_parameter("max_retry_delay", 39999999, 1000000)
end)
end)
5 changes: 5 additions & 0 deletions spec/05-migration/db/migrations/core/020_330_to_340_spec.lua
@@ -0,0 +1,5 @@
local uh = require "spec/upgrade_helpers"

describe("database migration", function()
-- This is a placeholder at this point
end)

0 comments on commit 331dfdc

Please sign in to comment.