From 0d0e740fd962b2748cd7c378240af69cca7b6097 Mon Sep 17 00:00:00 2001 From: Morgan Tocker Date: Fri, 1 May 2026 20:27:18 -0600 Subject: [PATCH 1/3] test(#781): migrate pkg/table tests from assert.* to require.* Part of #779. Setup-path errors now fail-fast instead of cascading into confusing follow-on assertion failures. Co-Authored-By: Claude Opus 4.7 (1M context) --- pkg/table/chunk_test.go | 7 +- pkg/table/chunker_composite_test.go | 262 +++++++++++++-------------- pkg/table/chunker_multi_test.go | 2 +- pkg/table/chunker_optimistic_test.go | 107 +++++------ pkg/table/chunker_test.go | 19 +- pkg/table/datum_test.go | 101 ++++++----- pkg/table/tableinfo_test.go | 79 ++++---- 7 files changed, 291 insertions(+), 286 deletions(-) diff --git a/pkg/table/chunk_test.go b/pkg/table/chunk_test.go index ba04c9ea..6551a49c 100644 --- a/pkg/table/chunk_test.go +++ b/pkg/table/chunk_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestChunk2String(t *testing.T) { @@ -143,7 +144,7 @@ func TestWatermarkAboveClause(t *testing.T) { // Build a watermark JSON: chunk with upper bound id=100 watermark := `{"Key":["id"],"ChunkSize":1000,"LowerBound":{"Value":["50"],"Inclusive":true},"UpperBound":{"Value":["100"],"Inclusive":false}}` clause, err := WatermarkAboveClause(ti, watermark) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` > 100", clause) // Composite key @@ -153,7 +154,7 @@ func TestWatermarkAboveClause(t *testing.T) { watermark2 := `{"Key":["tenant_id","item_id"],"ChunkSize":1000,"LowerBound":{"Value":["1","50"],"Inclusive":true},"UpperBound":{"Value":["2","100"],"Inclusive":false}}` clause2, err := WatermarkAboveClause(ti2, watermark2) - assert.NoError(t, err) + require.NoError(t, err) assert.Contains(t, clause2, "`tenant_id`") assert.Contains(t, clause2, "`item_id`") // Should be a row constructor comparison: ((tenant_id > 2) OR (tenant_id = 2 AND item_id > 100)) @@ -161,5 +162,5 @@ func TestWatermarkAboveClause(t *testing.T) { // Invalid JSON _, err = WatermarkAboveClause(ti, "not-json") - assert.Error(t, err) + require.Error(t, err) } diff --git a/pkg/table/chunker_composite_test.go b/pkg/table/chunker_composite_test.go index f5081efb..51a369e5 100644 --- a/pkg/table/chunker_composite_test.go +++ b/pkg/table/chunker_composite_test.go @@ -30,7 +30,7 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { testutils.RunSQL(t, `INSERT INTO composite_binary_t1 (a, b, c) SELECT UUID(), UUID(), 1 FROM composite_binary_t1 a JOIN composite_binary_t1 b JOIN composite_binary_t1 c LIMIT 1000000`) //nolint: dupword db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -38,7 +38,7 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { }() t1 := NewTableInfo(db, "test", "composite_binary_t1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) // Assert that the types are correct. assert.Equal(t, []string{"varbinary", "varbinary"}, t1.keyColumnsMySQLTp) @@ -46,18 +46,18 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { assert.Equal(t, binaryType, t1.keyDatums[1]) chunker, err := NewChunker(t1, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) assert.IsType(t, &chunkerComposite{}, chunker) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.NotContains(t, "`a` >= ", chunk.String()) // first chunk is special upperBound := chunk.UpperBound.Value chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) previousUpperBound := upperBound upperBound = chunk.UpperBound.Value require.NotEqual(t, previousUpperBound, upperBound) @@ -72,7 +72,7 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { ) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) previousUpperBound = upperBound upperBound = chunk.UpperBound.Value require.NotEqual(t, previousUpperBound, upperBound) @@ -88,7 +88,7 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { // Test it advances again chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) previousUpperBound = upperBound upperBound = chunk.UpperBound.Value require.NotEqual(t, previousUpperBound, upperBound) @@ -135,7 +135,7 @@ func TestCompositeChunkerBinary(t *testing.T) { testutils.RunSQL(t, `INSERT INTO composite_t1 (pk, a, b) SELECT UUID(), 1, 1 FROM composite_t1 a JOIN composite_t1 b JOIN composite_t1 c LIMIT 1000000`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -143,32 +143,32 @@ func TestCompositeChunkerBinary(t *testing.T) { }() t1 := NewTableInfo(db, "test", "composite_t1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) // Assert that the types are correct. assert.Equal(t, []string{"varbinary"}, t1.keyColumnsMySQLTp) assert.Equal(t, binaryType, t1.keyDatums[0]) chunker, err := NewChunker(t1, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) assert.IsType(t, &chunkerComposite{}, chunker) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.NotContains(t, "`pk` >= ", chunk.String()) // first chunk is special upperBound := chunk.UpperBound.Value[0].String() chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) previousUpperBound := upperBound upperBound = chunk.UpperBound.Value[0].String() require.NotEqual(t, previousUpperBound, upperBound) assert.Equal(t, fmt.Sprintf("`pk` >= %s AND `pk` < %s", previousUpperBound, upperBound), chunk.String()) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) previousUpperBound = upperBound upperBound = chunk.UpperBound.Value[0].String() require.NotEqual(t, previousUpperBound, upperBound) @@ -176,7 +176,7 @@ func TestCompositeChunkerBinary(t *testing.T) { // Test it advances again chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) previousUpperBound = upperBound upperBound = chunk.UpperBound.Value[0].String() require.NotEqual(t, previousUpperBound, upperBound) @@ -215,7 +215,7 @@ func TestCompositeChunkerInt(t *testing.T) { testutils.RunSQL(t, "ALTER TABLE compositeint_t1 CHANGE COLUMN pk pk int NOT NULL") //nolint: dupword db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -223,31 +223,31 @@ func TestCompositeChunkerInt(t *testing.T) { }() t1 := NewTableInfo(db, "test", "compositeint_t1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) // Assert that the types are correct. assert.Equal(t, []string{"int"}, t1.keyColumnsMySQLTp) assert.Equal(t, signedType, t1.keyDatums[0]) chunker, err := NewChunker(t1, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) assert.IsType(t, &chunkerComposite{}, chunker) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) // This might get messy if different versions skip // auto_inc values differently. chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` < 1008", chunk.String()) // first chunk is special chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` >= 1008 AND `pk` < 2032", chunk.String()) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` >= 2032 AND `pk` < 3033", chunk.String()) totalChunks := 3 // 3 so far @@ -278,7 +278,7 @@ func TestCompositeLowWatermark(t *testing.T) { testutils.RunSQL(t, `INSERT INTO compositewatermark_t1 (pk, a, b) SELECT NULL, 1, 1 FROM compositewatermark_t1 a JOIN compositewatermark_t1 b JOIN compositewatermark_t1 c LIMIT 10000`) testutils.RunSQL(t, `INSERT INTO compositewatermark_t1 (pk, a, b) SELECT NULL, 1, 1 FROM compositewatermark_t1 a JOIN compositewatermark_t1 b JOIN compositewatermark_t1 c LIMIT 10000`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -286,7 +286,7 @@ func TestCompositeLowWatermark(t *testing.T) { }() t1 := NewTableInfo(db, "test", "compositewatermark_t1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) chunker := &chunkerComposite{ Ti: t1, @@ -296,7 +296,7 @@ func TestCompositeLowWatermark(t *testing.T) { } _, err = chunker.Next() assert.Error(t, err) // not open yet - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) assert.Error(t, chunker.Open()) // double open should fail _, err = chunker.GetLowWatermark() @@ -304,7 +304,7 @@ func TestCompositeLowWatermark(t *testing.T) { assert.Equal(t, StartingChunkSize, int(chunker.chunkSize)) chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` < 1008", chunk.String()) // first chunk _, err = chunker.GetLowWatermark() assert.Error(t, err) // no feedback yet. @@ -315,75 +315,75 @@ func TestCompositeLowWatermark(t *testing.T) { assert.Error(t, err) // there has been feedback, but watermark is not ready after first chunk. chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` >= 1008 AND `pk` < 2032", chunk.String()) chunker.Feedback(chunk, time.Second, 1) assert.Equal(t, 100, int(chunker.chunkSize)) // usually requires 10 feedbacks, but changed because >5x target watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) // The watermark can be divided into the chunkJSON and the rows. var compositeWM compositeWatermark - assert.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) + require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1008\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2032\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` >= 2032 AND `pk` < 2133", chunk.String()) chunker.Feedback(chunk, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) - assert.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) + require.NoError(t, err) + require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunkAsync1, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` >= 2133 AND `pk` < 2144", chunkAsync1.String()) chunkAsync2, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` >= 2144 AND `pk` < 2155", chunkAsync2.String()) chunkAsync3, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` >= 2155 AND `pk` < 2166", chunkAsync3.String()) chunker.Feedback(chunkAsync2, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) - assert.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) + require.NoError(t, err) + require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunker.Feedback(chunkAsync3, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) - assert.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) + require.NoError(t, err) + require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunker.Feedback(chunkAsync1, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) - assert.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) + require.NoError(t, err) + require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2155\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2166\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`pk` >= 2166 AND `pk` < 2177", chunk.String()) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) - assert.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) + require.NoError(t, err) + require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2155\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2166\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunker.Feedback(chunk, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) - assert.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) + require.NoError(t, err) + require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2166\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2177\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) // Give enough feedback that the chunk size recalculation runs. assert.Equal(t, 10, int(chunker.chunkSize)) for range 50 { chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) if chunk.ChunkSize != 10 { break // feedback has worked } @@ -410,7 +410,7 @@ func TestCompositeSmallTable(t *testing.T) { testutils.RunSQL(t, `INSERT INTO compositesmall_t1 (pk, a, b) SELECT UUID(), 1, 1 FROM compositesmall_t1 a JOIN compositesmall_t1 b JOIN compositesmall_t1 c LIMIT 10`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -418,18 +418,18 @@ func TestCompositeSmallTable(t *testing.T) { }() t1 := NewTableInfo(db, "test", "compositesmall_t1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) chunker, err := NewChunker(t1, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) assert.IsType(t, &chunkerComposite{}, chunker) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "1=1", chunk.String()) // small chunk - assert.NoError(t, chunker.Close()) + require.NoError(t, chunker.Close()) } func TestSetKey(t *testing.T) { @@ -454,7 +454,7 @@ func TestSetKey(t *testing.T) { testutils.RunSQL(t, `INSERT INTO setkey_t1 SELECT NULL, 1, 1, 'PENDING', NOW(), NOW() FROM setkey_t1 a JOIN setkey_t1 b JOIN setkey_t1 c LIMIT 10000`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -463,19 +463,19 @@ func TestSetKey(t *testing.T) { // Test SetKey with PrimaryKey t1 := NewTableInfo(db, "test", "setkey_t1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) chunkerPK := &chunkerComposite{ Ti: t1, ChunkerTarget: 100 * time.Millisecond, logger: slog.Default(), } err = chunkerPK.SetKey("PRIMARY", "id < 1008") - assert.NoError(t, err) - assert.NoError(t, chunkerPK.Open()) + require.NoError(t, err) + require.NoError(t, chunkerPK.Open()) _, err = chunkerPK.Next() - assert.NoError(t, err) - assert.NoError(t, chunkerPK.Close()) + require.NoError(t, err) + require.NoError(t, chunkerPK.Close()) chunker := &chunkerComposite{ Ti: t1, @@ -483,15 +483,15 @@ func TestSetKey(t *testing.T) { logger: slog.Default(), } err = chunker.SetKey("s", "status = 'ARCHIVED' AND updated_at < NOW() - INTERVAL 1 DAY") - assert.NoError(t, err) - assert.NoError(t, chunker.Open()) + require.NoError(t, err) + require.NoError(t, chunker.Open()) chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) // Because there are zero rows with status archived or updated_at that old, // it returns 1 chunk with 1=1 and the original condition. assert.Equal(t, "1=1 AND (status = 'ARCHIVED' AND updated_at < NOW() - INTERVAL 1 DAY)", chunk.String()) - assert.NoError(t, chunker.Close()) + require.NoError(t, chunker.Close()) // If I reset again with a different condition it should range as chunks. chunker = &chunkerComposite{ @@ -500,30 +500,30 @@ func TestSetKey(t *testing.T) { logger: slog.Default(), } err = chunker.SetKey("s", "status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY") - assert.NoError(t, err) - assert.NoError(t, chunker.Open()) + require.NoError(t, err) + require.NoError(t, chunker.Open()) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "((`status` < \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` < 1008)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) // Check a chunk with both a lowerbound and upper bound. chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "((`status` > \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` >= 1008)) AND ((`status` < \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` < 2032)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) // repeat ~10 more times without calling Feedback() for range 8 { _, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) } chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "((`status` > \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` >= 10040)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) _, err = chunker.Next() assert.ErrorIs(t, err, ErrTableIsRead) - assert.NoError(t, chunker.Close()) + require.NoError(t, chunker.Close()) // Test other index types. for _, index := range []string{"u", "su", "ui"} { @@ -533,10 +533,10 @@ func TestSetKey(t *testing.T) { logger: slog.Default(), } err = chunker.SetKey(index, "updated_at < NOW() - INTERVAL 1 DAY") - assert.NoError(t, err) - assert.NoError(t, chunker.Open()) + require.NoError(t, err) + require.NoError(t, chunker.Open()) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "1=1 AND (updated_at < NOW() - INTERVAL 1 DAY)", chunk.String()) // check the key parts are correct. @@ -548,7 +548,7 @@ func TestSetKey(t *testing.T) { case "ui": assert.Equal(t, []string{"updated_at", "id"}, chunker.chunkKeys) } - assert.NoError(t, chunker.Close()) + require.NoError(t, chunker.Close()) } } @@ -590,7 +590,7 @@ func TestSetKeyCompositeKeyMerge(t *testing.T) { INDEX dnc (dob,name,city) )`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -598,14 +598,14 @@ func TestSetKeyCompositeKeyMerge(t *testing.T) { }() t1 := NewTableInfo(db, "test", "setkeycomposite_t1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) chunker := &chunkerComposite{ Ti: t1, ChunkerTarget: 100 * time.Millisecond, logger: slog.Default(), } err = chunker.SetKey("dnc", "") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, []string{"dob", "name", "city", "ssn"}, chunker.chunkKeys) } @@ -625,7 +625,7 @@ func TestCompositeChunkerReset(t *testing.T) { testutils.RunSQL(t, `INSERT INTO compositereset_t1 (pk, a, b) SELECT NULL, 1, 1 FROM compositereset_t1 a JOIN compositereset_t1 b JOIN compositereset_t1 c LIMIT 5000`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -634,7 +634,7 @@ func TestCompositeChunkerReset(t *testing.T) { // Create table info and chunker t1 := NewTableInfo(db, "test", "compositereset_t1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) chunker := &chunkerComposite{ Ti: t1, @@ -649,7 +649,7 @@ func TestCompositeChunkerReset(t *testing.T) { assert.ErrorIs(t, err, ErrChunkerNotOpen) // Open the chunker - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) // Capture initial state after opening initialChunkPtrs := len(chunker.chunkPtrs) // Should be 0 (empty slice) @@ -659,15 +659,15 @@ func TestCompositeChunkerReset(t *testing.T) { // Process some chunks to change the state chunk1, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Contains(t, chunk1.String(), "`pk` <") // first chunk chunk2, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Contains(t, chunk2.String(), "`pk` >=") // second chunk has bounds chunk3, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Contains(t, chunk3.String(), "`pk` >=") // third chunk has bounds // Give feedback to advance watermark and change state @@ -683,12 +683,12 @@ func TestCompositeChunkerReset(t *testing.T) { // Verify watermark exists watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.NotEmpty(t, watermark) // Now reset the chunker err = chunker.Reset() - assert.NoError(t, err) + require.NoError(t, err) // Verify state is reset to initial values assert.Len(t, chunker.chunkPtrs, initialChunkPtrs, "chunkPtrs should be reset to initial value (empty slice)") @@ -712,15 +712,15 @@ func TestCompositeChunkerReset(t *testing.T) { // Verify that after reset, the chunker produces the same sequence as a fresh chunker resetChunk1, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, chunk1.String(), resetChunk1.String(), "First chunk after reset should match original first chunk") resetChunk2, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, chunk2.String(), resetChunk2.String(), "Second chunk after reset should match original second chunk") resetChunk3, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, chunk3.String(), resetChunk3.String(), "Third chunk after reset should match original third chunk") // Test that reset works even with more complex state changes @@ -728,19 +728,19 @@ func TestCompositeChunkerReset(t *testing.T) { // The chunk size should change due to panic factor _, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) // The chunk size might be reduced due to the slow feedback // Reset again err = chunker.Reset() - assert.NoError(t, err) + require.NoError(t, err) // Verify chunk size is back to initial value assert.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value even after dynamic changes") // Verify we can still get the same first chunk finalResetChunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, chunk1.String(), finalResetChunk.String(), "First chunk after second reset should still match original") // Test with custom key and where condition @@ -753,24 +753,24 @@ func TestCompositeChunkerReset(t *testing.T) { // Set a custom key and where condition err = chunker2.SetKey("PRIMARY", "a = 1") - assert.NoError(t, err) - assert.NoError(t, chunker2.Open()) + require.NoError(t, err) + require.NoError(t, chunker2.Open()) // Get a chunk with the custom condition customChunk, err := chunker2.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Contains(t, customChunk.String(), "a = 1") // Should contain the where condition // Reset and verify the custom condition is preserved err = chunker2.Reset() - assert.NoError(t, err) + require.NoError(t, err) resetCustomChunk, err := chunker2.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, customChunk.String(), resetCustomChunk.String(), "Custom chunk should match after reset") - assert.NoError(t, chunker2.Close()) - assert.NoError(t, chunker.Close()) + require.NoError(t, chunker2.Close()) + require.NoError(t, chunker.Close()) } // TestCompositeChunkerWatermarkOptimizations tests KeyAboveHighWatermark and KeyBelowLowWatermark @@ -808,7 +808,7 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { SELECT 3, n, 1 FROM seq`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -827,10 +827,10 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { require.Equal(t, 1000, countA1, "Expected 1000 rows for a=1") tbl := NewTableInfo(db, "test", "compositewatermarkopt_t1") - assert.NoError(t, tbl.SetInfo(t.Context())) + require.NoError(t, tbl.SetInfo(t.Context())) chunker, err := NewChunker(tbl, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) comp := chunker.(*chunkerComposite) // Before opening, everything is above high watermark @@ -838,7 +838,7 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { assert.True(t, comp.KeyAboveHighWatermark(100)) assert.False(t, comp.KeyBelowLowWatermark(1)) // watermark not ready - assert.NoError(t, comp.Open()) + require.NoError(t, comp.Open()) // After opening but before first chunk, key=1 should still be above assert.True(t, comp.KeyAboveHighWatermark(1)) @@ -926,7 +926,7 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { assert.True(t, comp.KeyBelowLowWatermark(1)) assert.True(t, comp.KeyBelowLowWatermark(100)) - assert.NoError(t, comp.Close()) + require.NoError(t, comp.Close()) } // TestCompositeChunkerWatermarkNonNumeric tests that watermark optimizations @@ -959,7 +959,7 @@ func TestCompositeChunkerWatermarkNonNumeric(t *testing.T) { SELECT CONCAT('key', LPAD(n, 5, '0')), n FROM seq`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -974,13 +974,13 @@ func TestCompositeChunkerWatermarkNonNumeric(t *testing.T) { t.Logf("VARCHAR test: %d rows inserted", count) tbl := NewTableInfo(db, "test", "compositewatermarknn_t1") - assert.NoError(t, tbl.SetInfo(t.Context())) + require.NoError(t, tbl.SetInfo(t.Context())) chunker, err := NewChunker(tbl, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) comp := chunker.(*chunkerComposite) - assert.NoError(t, comp.Open()) + require.NoError(t, comp.Open()) // Get first chunk chunk1, err := comp.Next() @@ -1005,7 +1005,7 @@ func TestCompositeChunkerWatermarkNonNumeric(t *testing.T) { // Verify the watermark value is what we expect assert.Equal(t, upperVal, watermarkUpper) - assert.NoError(t, comp.Close()) + require.NoError(t, comp.Close()) } // TestCompositeChunkerWatermarkDateTime tests that watermark optimizations @@ -1038,7 +1038,7 @@ func TestCompositeChunkerWatermarkDateTime(t *testing.T) { SELECT DATE_ADD('2024-01-01 00:00:00', INTERVAL n HOUR), n FROM seq`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -1053,13 +1053,13 @@ func TestCompositeChunkerWatermarkDateTime(t *testing.T) { t.Logf("DATETIME test: %d rows inserted", count) tbl := NewTableInfo(db, "test", "compositewatermarkdt_t1") - assert.NoError(t, tbl.SetInfo(t.Context())) + require.NoError(t, tbl.SetInfo(t.Context())) chunker, err := NewChunker(tbl, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) comp := chunker.(*chunkerComposite) - assert.NoError(t, comp.Open()) + require.NoError(t, comp.Open()) // Get first chunk chunk1, err := comp.Next() @@ -1084,7 +1084,7 @@ func TestCompositeChunkerWatermarkDateTime(t *testing.T) { // Verify the watermark value is what we expect assert.Equal(t, upperVal, watermarkUpper) - assert.NoError(t, comp.Close()) + require.NoError(t, comp.Close()) } // TestCompositeChunkerCollationDifference demonstrates how Go's lexicographic comparison @@ -1133,7 +1133,7 @@ func TestCompositeChunkerCollationDifference(t *testing.T) { testutils.RunSQL(t, "INSERT INTO compositecollation_t1 VALUES ('test', 9003)") db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -1149,7 +1149,7 @@ func TestCompositeChunkerCollationDifference(t *testing.T) { // Verify MySQL's collation order vs Go's lexicographic order var mysqlOrder []string rows, err := db.QueryContext(t.Context(), "SELECT DISTINCT name FROM compositecollation_t1 ORDER BY name LIMIT 20") - assert.NoError(t, err) + require.NoError(t, err) for rows.Next() { var name string assert.NoError(t, rows.Scan(&name)) @@ -1163,13 +1163,13 @@ func TestCompositeChunkerCollationDifference(t *testing.T) { // (uppercase letters have lower byte values than lowercase in ASCII) tbl := NewTableInfo(db, "test", "compositecollation_t1") - assert.NoError(t, tbl.SetInfo(t.Context())) + require.NoError(t, tbl.SetInfo(t.Context())) chunker, err := NewChunker(tbl, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) comp := chunker.(*chunkerComposite) - assert.NoError(t, comp.Open()) + require.NoError(t, comp.Open()) // Get first chunk to establish the watermark chunk1, err := comp.Next() @@ -1232,7 +1232,7 @@ func TestCompositeChunkerCollationDifference(t *testing.T) { t.Logf(" - Safety: Checksum phase uses full table scans without watermarks") } - assert.NoError(t, comp.Close()) + require.NoError(t, comp.Close()) } // TestCompositeChunkerWatermarkWithOutOfOrderCompletion tests that watermark @@ -1274,7 +1274,7 @@ func TestCompositeChunkerWatermarkWithOutOfOrderCompletion(t *testing.T) { SELECT n FROM seq`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -1282,13 +1282,13 @@ func TestCompositeChunkerWatermarkWithOutOfOrderCompletion(t *testing.T) { }() tbl := NewTableInfo(db, "test", "compositewatermarkooo_t1") - assert.NoError(t, tbl.SetInfo(t.Context())) + require.NoError(t, tbl.SetInfo(t.Context())) chunker, err := NewChunker(tbl, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) comp := chunker.(*chunkerComposite) - assert.NoError(t, comp.Open()) + require.NoError(t, comp.Open()) // Get three chunks - need this many to test out-of-order completion chunk1, err := comp.Next() @@ -1345,7 +1345,7 @@ func TestCompositeChunkerWatermarkWithOutOfOrderCompletion(t *testing.T) { // Now chunk3 range should be below watermark assert.True(t, comp.KeyBelowLowWatermark(chunk3Lower)) - assert.NoError(t, comp.Close()) + require.NoError(t, comp.Close()) } // TestCompositeChunkerCheckpointHighPtr verifies that after OpenAtWatermark, @@ -1385,7 +1385,7 @@ func TestCompositeChunkerCheckpointHighPtr(t *testing.T) { ) SELECT n, 1 FROM seq`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -1393,13 +1393,13 @@ func TestCompositeChunkerCheckpointHighPtr(t *testing.T) { }() srcTable := NewTableInfo(db, "test", "composite_ckpt_src") - assert.NoError(t, srcTable.SetInfo(t.Context())) + require.NoError(t, srcTable.SetInfo(t.Context())) dstTable := NewTableInfo(db, "test", "composite_ckpt_dst") - assert.NoError(t, dstTable.SetInfo(t.Context())) + require.NoError(t, dstTable.SetInfo(t.Context())) chunker, err := NewChunker(srcTable, ChunkerConfig{NewTable: dstTable}) - assert.NoError(t, err) + require.NoError(t, err) comp := chunker.(*chunkerComposite) // Before OpenAtWatermark: everything should be "above" (no chunks dispatched) @@ -1407,7 +1407,7 @@ func TestCompositeChunkerCheckpointHighPtr(t *testing.T) { // Simulate a watermark at a=200 — the copier had reached this point before interruption. watermark := `{"ChunkJSON":"{\"Key\":[\"a\",\"b\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\":[\"100\",\"1\"],\"Inclusive\":true},\"UpperBound\":{\"Value\":[\"200\",\"1\"],\"Inclusive\":false}}","RowsCopied":200}` - assert.NoError(t, comp.OpenAtWatermark(watermark)) + require.NoError(t, comp.OpenAtWatermark(watermark)) // checkpointHighPtr should now be set to the max value of the destination // table's first PK column (a=500, since dstTable has rows up to a=499). @@ -1430,5 +1430,5 @@ func TestCompositeChunkerCheckpointHighPtr(t *testing.T) { // Key a=999 is well above — safe to discard. assert.True(t, comp.KeyAboveHighWatermark(999)) - assert.NoError(t, comp.Close()) + require.NoError(t, comp.Close()) } diff --git a/pkg/table/chunker_multi_test.go b/pkg/table/chunker_multi_test.go index fe000e65..420f2bc8 100644 --- a/pkg/table/chunker_multi_test.go +++ b/pkg/table/chunker_multi_test.go @@ -436,7 +436,7 @@ func TestMultiChunkerReset(t *testing.T) { assert.ErrorIs(t, err, ErrChunkerNotOpen) // Open the chunker - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) // Capture initial state after opening initialRowsCopied1, initialChunksCopied1, _ := mock1.Progress() diff --git a/pkg/table/chunker_optimistic_test.go b/pkg/table/chunker_optimistic_test.go index 258834ec..7f2a1847 100644 --- a/pkg/table/chunker_optimistic_test.go +++ b/pkg/table/chunker_optimistic_test.go @@ -8,6 +8,7 @@ import ( "github.com/block/spirit/pkg/testutils" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestOptimisticChunkerBasic(t *testing.T) { @@ -32,44 +33,44 @@ func TestOptimisticChunkerBasic(t *testing.T) { } chunker.SetDynamicChunking(false) - assert.NoError(t, t1.PrimaryKeyIsMemoryComparable()) + require.NoError(t, t1.PrimaryKeyIsMemoryComparable()) t1.keyColumnsMySQLTp[0] = "varchar" t1.keyDatums[0] = unknownType assert.Error(t, t1.PrimaryKeyIsMemoryComparable()) t1.keyColumnsMySQLTp[0] = "bigint" t1.keyDatums[0] = signedType - assert.NoError(t, t1.PrimaryKeyIsMemoryComparable()) + require.NoError(t, t1.PrimaryKeyIsMemoryComparable()) assert.Equal(t, "`t1`", t1.QuotedTableName) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) assert.Error(t, chunker.Open()) // can't open twice. assert.True(t, chunker.KeyAboveHighWatermark(1)) // we haven't started copying. _, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.True(t, chunker.KeyAboveHighWatermark(100)) // we are at 1 _, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.False(t, chunker.KeyAboveHighWatermark(100)) // we are at 1001 for range 999 { _, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) } // The last chunk. _, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) _, err = chunker.Next() assert.Error(t, err) // err: table is read. assert.Equal(t, "table is read", err.Error()) - assert.NoError(t, chunker.Close()) + require.NoError(t, chunker.Close()) } func TestLowWatermark(t *testing.T) { @@ -83,7 +84,7 @@ func TestLowWatermark(t *testing.T) { t1.KeyIsAutoInc = true t1.Columns = []string{"id", "name"} - assert.NoError(t, t1.PrimaryKeyIsMemoryComparable()) + require.NoError(t, t1.PrimaryKeyIsMemoryComparable()) chunker := &chunkerOptimistic{ Ti: t1, ChunkerTarget: ChunkerDefaultTarget, @@ -92,13 +93,13 @@ func TestLowWatermark(t *testing.T) { } chunker.SetDynamicChunking(false) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) _, err := chunker.GetLowWatermark() assert.Error(t, err) chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` < 1", chunk.String()) // first chunk _, err = chunker.GetLowWatermark() assert.Error(t, err) // no feedback yet. @@ -107,11 +108,11 @@ func TestLowWatermark(t *testing.T) { assert.Error(t, err) // there has been feedback, but watermark is not ready after first chunk. chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` >= 1 AND `id` < 1001", chunk.String()) // first chunk chunker.Feedback(chunk, time.Second, 1) watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"1001\"],\"Inclusive\":false}}", watermark) // Check key w.r.t. watermark @@ -121,59 +122,59 @@ func TestLowWatermark(t *testing.T) { assert.False(t, chunker.KeyBelowLowWatermark(1001)) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` >= 1001 AND `id` < 2001", chunk.String()) // first chunk // Check KeyBelowLowWatermark before and after feedback. assert.False(t, chunker.KeyBelowLowWatermark(1001)) chunker.Feedback(chunk, time.Second, 1) assert.True(t, chunker.KeyBelowLowWatermark(1001)) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) chunkAsync1, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` >= 2001 AND `id` < 3001", chunkAsync1.String()) assert.False(t, chunker.KeyBelowLowWatermark(2001)) chunkAsync2, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` >= 3001 AND `id` < 4001", chunkAsync2.String()) assert.False(t, chunker.KeyBelowLowWatermark(2001)) chunkAsync3, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` >= 4001 AND `id` < 5001", chunkAsync3.String()) assert.False(t, chunker.KeyBelowLowWatermark(2001)) chunker.Feedback(chunkAsync2, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) chunker.Feedback(chunkAsync3, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) assert.False(t, chunker.KeyBelowLowWatermark(2001)) chunker.Feedback(chunkAsync1, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"4001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"5001\"],\"Inclusive\":false}}", watermark) assert.True(t, chunker.KeyBelowLowWatermark(2001)) assert.True(t, chunker.KeyBelowLowWatermark(5000)) chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` >= 5001 AND `id` < 6001", chunk.String()) // should bump immediately watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"4001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"5001\"],\"Inclusive\":false}}", watermark) chunker.Feedback(chunk, time.Second, 1) watermark, err = chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"5001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"6001\"],\"Inclusive\":false}}", watermark) // Test that we have applied all stored chunks and the map is empty, @@ -195,21 +196,21 @@ func TestOptimisticDynamicChunking(t *testing.T) { t1.columnsMySQLTps["id"] = "bigint" chunker, err := NewChunker(t1, ChunkerConfig{TargetChunkTime: 100 * time.Millisecond}) - assert.NoError(t, err) + require.NoError(t, err) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) chunker.Feedback(chunk, time.Second, 1) // way too long. chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(100), chunk.ChunkSize) // immediate change from before chunker.Feedback(chunk, time.Second, 1) // way too long again, it will reduce to 10 newChunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(10), newChunk.ChunkSize) // immediate change from before // Feedback is only taken if the chunk.ChunkSize matches the current size. // so lets give bad feedback and see no change. @@ -217,7 +218,7 @@ func TestOptimisticDynamicChunking(t *testing.T) { chunker.Feedback(newChunk, 10*time.Second, 1) // way too long. chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(10), chunk.ChunkSize) // no change chunker.Feedback(chunk, 50*time.Microsecond, 1) // must give feedback to advance watermark. @@ -225,26 +226,26 @@ func TestOptimisticDynamicChunking(t *testing.T) { for range 10 { // no change chunk, err = chunker.Next() chunker.Feedback(chunk, 50*time.Microsecond, 1) // very short. - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(10), chunk.ChunkSize) // no change. } // On the 11th piece of feedback *with this chunk size* // it finally changes. But no greater than 50% increase at a time. chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(15), chunk.ChunkSize) chunker.Feedback(chunk, 50*time.Microsecond, 1) // Advance the watermark a little bit. for range 20 { chunk, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) chunker.Feedback(chunk, time.Millisecond, 1) } // Fetch the watermark. watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":22,\"LowerBound\":{\"Value\": [\"584\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"606\"],\"Inclusive\":false}}", watermark) @@ -262,20 +263,20 @@ func TestOptimisticDynamicChunking(t *testing.T) { t2.columnsMySQLTps["id"] = "bigint" chunker2, err := NewChunker(t2, ChunkerConfig{NewTable: t2, TargetChunkTime: 100}) - assert.NoError(t, err) - assert.NoError(t, chunker2.OpenAtWatermark(watermark)) + require.NoError(t, err) + require.NoError(t, chunker2.OpenAtWatermark(watermark)) // The pointer goes to the lowerbound.value. // It could equally go to the upperbound.value but then // we would have to worry about off-by-1 errors. chunk, err = chunker2.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "584", chunk.LowerBound.Value[0].String()) } func TestOptimisticPrefetchChunking(t *testing.T) { db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -312,19 +313,19 @@ func TestOptimisticPrefetchChunking(t *testing.T) { t1 := newTableInfo4Test("test", "tprefetch") t1.db = db - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) chunker := &chunkerOptimistic{ Ti: t1, ChunkerTarget: time.Second, logger: slog.Default(), } chunker.SetDynamicChunking(true) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) assert.False(t, chunker.chunkPrefetchingEnabled) for !chunker.finalChunkSent { chunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) chunker.Feedback(chunk, 100*time.Millisecond, 1) // way too short. } assert.True(t, chunker.chunkPrefetchingEnabled) @@ -362,7 +363,7 @@ func TestOptimisticChunkerReset(t *testing.T) { assert.ErrorIs(t, err, ErrChunkerNotOpen) // Open the chunker - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) // Capture initial state after opening initialChunkPtr := chunker.chunkPtr @@ -372,15 +373,15 @@ func TestOptimisticChunkerReset(t *testing.T) { // Process some chunks to change the state chunk1, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` < 1", chunk1.String()) // first chunk chunk2, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` >= 1 AND `id` < 1001", chunk2.String()) chunk3, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "`id` >= 1001 AND `id` < 2001", chunk3.String()) // Give feedback to advance watermark and change state @@ -396,12 +397,12 @@ func TestOptimisticChunkerReset(t *testing.T) { // Verify watermark exists watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) assert.NotEmpty(t, watermark) // Now reset the chunker err = chunker.Reset() - assert.NoError(t, err) + require.NoError(t, err) // Verify state is reset to initial values assert.Equal(t, initialChunkPtr.String(), chunker.chunkPtr.String(), "chunkPtr should be reset to initial value") @@ -426,11 +427,11 @@ func TestOptimisticChunkerReset(t *testing.T) { // Verify that after reset, the chunker produces the same sequence as a fresh chunker resetChunk1, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, chunk1.String(), resetChunk1.String(), "First chunk after reset should match original first chunk") resetChunk2, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, chunk2.String(), resetChunk2.String(), "Second chunk after reset should match original second chunk") // Verify KeyAboveHighWatermark behavior is reset @@ -440,7 +441,7 @@ func TestOptimisticChunkerReset(t *testing.T) { assert.False(t, chunker.KeyAboveHighWatermark(900), "KeyAboveHighWatermark not reset correctly") resetChunk3, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, chunk3.String(), resetChunk3.String(), "Third chunk after reset should match original third chunk") // Test that reset works even with more complex state changes @@ -448,18 +449,18 @@ func TestOptimisticChunkerReset(t *testing.T) { // The chunk size should change due to panic factor _, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) // The chunk size might be reduced due to the slow feedback // Reset again err = chunker.Reset() - assert.NoError(t, err) + require.NoError(t, err) // Verify chunk size is back to initial value assert.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value even after dynamic changes") // Verify we can still get the same first chunk finalResetChunk, err := chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, chunk1.String(), finalResetChunk.String(), "First chunk after second reset should still match original") } diff --git a/pkg/table/chunker_test.go b/pkg/table/chunker_test.go index d3e231fa..4a66d59e 100644 --- a/pkg/table/chunker_test.go +++ b/pkg/table/chunker_test.go @@ -6,6 +6,7 @@ import ( "github.com/block/spirit/pkg/testutils" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCompositeChunker(t *testing.T) { @@ -18,7 +19,7 @@ func TestCompositeChunker(t *testing.T) { testutils.RunSQL(t, table) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -26,10 +27,10 @@ func TestCompositeChunker(t *testing.T) { }() t1 := NewTableInfo(db, "test", "composite") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) chunker, err := NewChunker(t1, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) assert.IsType(t, &chunkerComposite{}, chunker) } @@ -42,7 +43,7 @@ func TestOptimisticChunker(t *testing.T) { testutils.RunSQL(t, table) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -50,10 +51,10 @@ func TestOptimisticChunker(t *testing.T) { }() t1 := NewTableInfo(db, "test", "optimistic") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) chunker, err := NewChunker(t1, ChunkerConfig{}) - assert.NoError(t, err) + require.NoError(t, err) assert.IsType(t, &chunkerOptimistic{}, chunker) } @@ -68,7 +69,7 @@ func TestNewCompositeChunkerWithKeyAndWhere(t *testing.T) { testutils.RunSQL(t, table) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -76,7 +77,7 @@ func TestNewCompositeChunkerWithKeyAndWhere(t *testing.T) { }() t1 := NewTableInfo(db, "test", "composite") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) // When Key and Where are specified, NewChunker should always return a // composite chunker even though this table has a single-column auto-inc PK. @@ -84,7 +85,7 @@ func TestNewCompositeChunkerWithKeyAndWhere(t *testing.T) { Key: "age_idx", Where: "age > 50", }) - assert.NoError(t, err) + require.NoError(t, err) assert.IsType(t, &chunkerComposite{}, chunker) assert.Equal(t, "age_idx", chunker.(*chunkerComposite).keyName) assert.Equal(t, "age > 50", chunker.(*chunkerComposite).where) diff --git a/pkg/table/datum_test.go b/pkg/table/datum_test.go index faec0e70..e57270a4 100644 --- a/pkg/table/datum_test.go +++ b/pkg/table/datum_test.go @@ -8,13 +8,14 @@ import ( "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestDatum(t *testing.T) { signed, err := NewDatum(1, signedType) - assert.NoError(t, err) + require.NoError(t, err) unsigned, err := NewDatum(uint(1), unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "1", signed.String()) assert.Equal(t, "1", unsigned.String()) @@ -42,33 +43,33 @@ func TestDatum(t *testing.T) { // We initialize the values to max-10 of the range, but then add 100 to each. // The add operation truncates: so both should equal the maxValue exactly. overflowSigned, err := NewDatum(uint64(math.MaxInt64)-10, signedType) // wrong type, converts. - assert.NoError(t, err) + require.NoError(t, err) overflowUnsigned, err := NewDatum(uint64(math.MaxUint64)-10, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, strconv.Itoa(math.MaxInt64), overflowSigned.Add(100).String()) assert.Equal(t, "18446744073709551615", overflowUnsigned.Add(100).String()) // Test unsigned with signed input unsigned, err = NewDatum(int(1), unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "1", unsigned.String()) // Test binary type. binary, err := NewDatum("0", binaryType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, `"0"`, binary.String()) // Test string comparisons (VARCHAR/TEXT) str1, err := NewDatumFromValue("apple", "VARCHAR(255)") - assert.NoError(t, err) + require.NoError(t, err) str2, err := NewDatumFromValue("banana", "VARCHAR(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.True(t, str2.GreaterThan(str1)) // "banana" > "apple" assert.True(t, str2.GreaterThanOrEqual(str1)) // "banana" >= "apple" assert.True(t, str1.LessThan(str2)) // "apple" < "banana" assert.True(t, str1.LessThanOrEqual(str2)) // "apple" <= "banana" str3, err := NewDatumFromValue("apple", "VARCHAR(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.True(t, str1.GreaterThanOrEqual(str3)) // equal values assert.True(t, str1.LessThanOrEqual(str3)) // equal values assert.False(t, str1.GreaterThan(str3)) // equal values @@ -76,9 +77,9 @@ func TestDatum(t *testing.T) { // Test temporal comparisons (DATETIME) datetime1, err := NewDatumFromValue("2024-01-01 10:00:00", "DATETIME") - assert.NoError(t, err) + require.NoError(t, err) datetime2, err := NewDatumFromValue("2024-01-02 10:00:00", "DATETIME") - assert.NoError(t, err) + require.NoError(t, err) assert.True(t, datetime2.GreaterThan(datetime1)) assert.True(t, datetime1.LessThan(datetime2)) @@ -98,7 +99,7 @@ func TestDatumInt32ToUnsigned(t *testing.T) { // Positive int32 value positiveInt32 := int32(123456) d1, err := NewDatum(positiveInt32, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(123456), d1.Val) assert.Equal(t, "123456", d1.String()) @@ -106,7 +107,7 @@ func TestDatumInt32ToUnsigned(t *testing.T) { // -840443956 as int32 = 3454523340 as uint32 negativeInt32 := int32(-840443956) d2, err := NewDatum(negativeInt32, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) expectedUint32 := uint32(negativeInt32) // Reinterpret bits as unsigned assert.Equal(t, uint64(expectedUint32), d2.Val) assert.Equal(t, uint64(3454523340), d2.Val) @@ -115,14 +116,14 @@ func TestDatumInt32ToUnsigned(t *testing.T) { // Edge case: int32 max value maxInt32 := int32(math.MaxInt32) d3, err := NewDatum(maxInt32, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(math.MaxInt32), d3.Val) assert.Equal(t, "2147483647", d3.String()) // Edge case: int32 min value (becomes max uint32 range) minInt32 := int32(math.MinInt32) d4, err := NewDatum(minInt32, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) expectedUint32Min := uint32(minInt32) assert.Equal(t, uint64(expectedUint32Min), d4.Val) assert.Equal(t, uint64(2147483648), d4.Val) @@ -131,7 +132,7 @@ func TestDatumInt32ToUnsigned(t *testing.T) { // Test uint32 values pass through correctly positiveUint32 := uint32(3454523340) d5, err := NewDatum(positiveUint32, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(3454523340), d5.Val) assert.Equal(t, "3454523340", d5.String()) } @@ -143,7 +144,7 @@ func TestDatumInt64ToUnsigned(t *testing.T) { // Positive int64 value positiveInt64 := int64(123456789012345) d1, err := NewDatum(positiveInt64, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(123456789012345), d1.Val) assert.Equal(t, "123456789012345", d1.String()) @@ -151,14 +152,14 @@ func TestDatumInt64ToUnsigned(t *testing.T) { // -1 as int64 = max uint64 negativeInt64 := int64(-1) d2, err := NewDatum(negativeInt64, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(negativeInt64), d2.Val) assert.Equal(t, uint64(math.MaxUint64), d2.Val) // Edge case: int64 max value maxInt64 := int64(math.MaxInt64) d3, err := NewDatum(maxInt64, unsignedType) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, uint64(math.MaxInt64), d3.Val) } @@ -206,120 +207,120 @@ func TestKeyBelowLowWatermarkWithNegativeInt32(t *testing.T) { func TestNewDatumFromValue(t *testing.T) { // Test NULL values d, err := NewDatumFromValue(nil, "INT") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "NULL", d.String()) d, err = NewDatumFromValue(nil, "VARCHAR(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "NULL", d.String()) d, err = NewDatumFromValue(nil, "JSON") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "NULL", d.String()) // Test integer types intBytes := []byte("123") d, err = NewDatumFromValue(intBytes, "INT") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "123", d.String()) d, err = NewDatumFromValue(intBytes, "BIGINT") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "123", d.String()) d, err = NewDatumFromValue(456, "INT") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "456", d.String()) d, err = NewDatumFromValue(intBytes, "INT(11)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "123", d.String()) // Test VARCHAR/TEXT types textBytes := []byte("hello world") d, err = NewDatumFromValue(textBytes, "VARCHAR(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"hello world\"", d.String()) d, err = NewDatumFromValue("hello world", "TEXT") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"hello world\"", d.String()) d, err = NewDatumFromValue(textBytes, "CHAR(50)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"hello world\"", d.String()) // Test with quotes that need escaping textWithQuotes := []byte("hello 'world'") d, err = NewDatumFromValue(textWithQuotes, "VARCHAR(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"hello \\'world\\'\"", d.String()) // Test DATETIME/TIMESTAMP types timeBytes := []byte("2023-01-01 12:00:00") d, err = NewDatumFromValue(timeBytes, "DATETIME") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"2023-01-01 12:00:00\"", d.String()) d, err = NewDatumFromValue(timeBytes, "TIMESTAMP") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"2023-01-01 12:00:00\"", d.String()) // Test float types floatBytes := []byte("123.45") d, err = NewDatumFromValue(floatBytes, "FLOAT") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"123.45\"", d.String()) d, err = NewDatumFromValue(floatBytes, "DOUBLE") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"123.45\"", d.String()) // Test VARBINARY/BLOB types - should use hex encoding binaryData := []byte{0x01, 0x02, 0x03} d, err = NewDatumFromValue(binaryData, "VARBINARY(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "0x010203", d.String()) // Test BLOB types blobData := []byte{0xFF, 0xFE, 0xFD} d, err = NewDatumFromValue(blobData, "BLOB") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "0xfffefd", d.String()) // Test empty binary values - must NOT serialize as "0x" because // MySQL parses that as an identifier, not a hex literal. d, err = NewDatumFromValue([]byte{0x00}, "MEDIUMBLOB") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "0x00", d.String()) d, err = NewDatumFromValue([]byte{}, "VARBINARY(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "0x00", d.String()) d, err = NewDatumFromValue("", "BLOB") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "0x00", d.String()) d, err = NewDatumFromValue(nil, "BLOB") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "NULL", d.String()) // Test JSON types - should be quoted like text jsonBytes := []byte(`[1, 2, 3]`) d, err = NewDatumFromValue(jsonBytes, "JSON") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"[1, 2, 3]\"", d.String()) d, err = NewDatumFromValue("test", "json") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"test\"", d.String()) // Test case insensitivity d, err = NewDatumFromValue(intBytes, "int") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "123", d.String()) d, err = NewDatumFromValue(intBytes, "Int") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "123", d.String()) d, err = NewDatumFromValue([]byte("hello"), "varchar(100)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"hello\"", d.String()) // Test unknown/default types unknownBytes := []byte("unknown data") d, err = NewDatumFromValue(unknownBytes, "UNKNOWN_TYPE") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"unknown data\"", d.String()) d, err = NewDatumFromValue("test", "CUSTOM_TYPE") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"test\"", d.String()) } @@ -328,7 +329,7 @@ func TestNewDatumFromValueBinaryString(t *testing.T) { // Test binary data that's not valid UTF-8 binaryData := []byte{0x00, 0x01, 0x02, 0xFF} d, err := NewDatumFromValue(binaryData, "VARBINARY(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "0x000102ff", d.String()) // Test string that starts with 0x - for VARCHAR (unknownType), this is just a normal string. @@ -336,12 +337,12 @@ func TestNewDatumFromValueBinaryString(t *testing.T) { // so the JSON checkpoint round-trip is safe without encoding. jsonLikeString := "0x123" d, err = NewDatumFromValue(jsonLikeString, "VARCHAR(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"0x123\"", d.String()) // Test normal UTF-8 string normalString := "hello" d, err = NewDatumFromValue(normalString, "VARCHAR(255)") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "\"hello\"", d.String()) } diff --git a/pkg/table/tableinfo_test.go b/pkg/table/tableinfo_test.go index ac292e65..7484169c 100644 --- a/pkg/table/tableinfo_test.go +++ b/pkg/table/tableinfo_test.go @@ -9,6 +9,7 @@ import ( "github.com/block/spirit/pkg/testutils" _ "github.com/go-sql-driver/mysql" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "go.uber.org/goleak" ) @@ -25,8 +26,8 @@ func TestOpenOnBinaryType(t *testing.T) { t1.KeyIsAutoInc = true t1.Columns = []string{"id", "name"} chunker, err := NewChunker(t1, ChunkerConfig{}) - assert.NoError(t, err) - assert.NoError(t, chunker.Open()) + require.NoError(t, err) + require.NoError(t, chunker.Open()) } func TestOpenOnNoMinMax(t *testing.T) { @@ -38,8 +39,8 @@ func TestOpenOnNoMinMax(t *testing.T) { t1.KeyIsAutoInc = true t1.Columns = []string{"id", "name"} chunker, err := NewChunker(t1, ChunkerConfig{TargetChunkTime: 100}) - assert.NoError(t, err) - assert.NoError(t, chunker.Open()) + require.NoError(t, err) + require.NoError(t, chunker.Open()) } func TestCallingNextChunkWithoutOpen(t *testing.T) { @@ -51,14 +52,14 @@ func TestCallingNextChunkWithoutOpen(t *testing.T) { t1.KeyIsAutoInc = true t1.Columns = []string{"id", "name"} chunker, err := NewChunker(t1, ChunkerConfig{TargetChunkTime: 100}) - assert.NoError(t, err) + require.NoError(t, err) _, err = chunker.Next() assert.Error(t, err) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) _, err = chunker.Next() - assert.NoError(t, err) + require.NoError(t, err) } func newTableInfo4Test(schema, table string) *TableInfo { //nolint: unparam @@ -77,12 +78,12 @@ func TestDiscovery(t *testing.T) { testutils.RunSQL(t, `insert into discoveryt1 values (1, 'a'), (2, 'b'), (3, 'c')`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { _ = db.Close() }() t1 := NewTableInfo(db, "test", "discoveryt1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) assert.Equal(t, "discoveryt1", t1.TableName) assert.Equal(t, "test", t1.SchemaName) @@ -91,10 +92,10 @@ func TestDiscovery(t *testing.T) { // normalize for mysql 5.7 and 8.0 assert.Equal(t, "int", removeWidth(t1.columnsMySQLTps["id"])) castID, err := t1.wrapCastType("id") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "CAST(`id` AS signed)", castID) castName, err := t1.wrapCastType("name") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, "CAST(`name` AS char CHARACTER SET utf8mb4)", castName) assert.Equal(t, "1", t1.minValue.String()) @@ -117,7 +118,7 @@ func TestDiscoveryUInt(t *testing.T) { testutils.RunSQL(t, `insert into discoveryuintt1 values (1, 'a'), (2, 'b'), (3, 'c')`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -125,7 +126,7 @@ func TestDiscoveryUInt(t *testing.T) { }() t1 := NewTableInfo(db, "test", "discoveryuintt1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) assert.Equal(t, "discoveryuintt1", t1.TableName) assert.Equal(t, "test", t1.SchemaName) @@ -150,7 +151,7 @@ func TestDiscoveryNoKeyColumnsOrNoTable(t *testing.T) { testutils.RunSQL(t, `insert into discoverynokeyst1 values (1, 'a'), (2, 'b'), (3, 'c')`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -188,7 +189,7 @@ func TestDiscoveryBalancesTable(t *testing.T) { testutils.RunSQL(t, table) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -196,7 +197,7 @@ func TestDiscoveryBalancesTable(t *testing.T) { }() t1 := NewTableInfo(db, "test", "balances") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) assert.True(t, t1.KeyIsAutoInc) assert.Equal(t, []string{"bigint"}, t1.keyColumnsMySQLTp) @@ -205,9 +206,9 @@ func TestDiscoveryBalancesTable(t *testing.T) { assert.Equal(t, "0", t1.maxValue.String()) chunker, err := NewChunker(t1, ChunkerConfig{TargetChunkTime: 100}) - assert.NoError(t, err) + require.NoError(t, err) - assert.NoError(t, chunker.Open()) + require.NoError(t, chunker.Open()) assert.Equal(t, "0", t1.minValue.String()) assert.Equal(t, "0", t1.maxValue.String()) } @@ -223,7 +224,7 @@ func TestDiscoveryCompositeNonComparable(t *testing.T) { testutils.RunSQL(t, `insert into compnoncomparable values (1, 'a'), (2, 'b'), (3, 'c')`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -231,7 +232,7 @@ func TestDiscoveryCompositeNonComparable(t *testing.T) { }() t1 := NewTableInfo(db, "test", "compnoncomparable") - assert.NoError(t, t1.SetInfo(t.Context())) // still discovers the primary key + require.NoError(t, t1.SetInfo(t.Context())) // still discovers the primary key assert.Error(t, t1.PrimaryKeyIsMemoryComparable()) // but its non comparable } @@ -246,7 +247,7 @@ func TestDiscoveryCompositeComparable(t *testing.T) { testutils.RunSQL(t, `insert into compcomparable values (1, 1), (2, 2), (3, 3)`) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -254,7 +255,7 @@ func TestDiscoveryCompositeComparable(t *testing.T) { }() t1 := NewTableInfo(db, "test", "compcomparable") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) assert.True(t, t1.KeyIsAutoInc) assert.Equal(t, []string{"int unsigned", "int"}, t1.keyColumnsMySQLTp) @@ -263,7 +264,7 @@ func TestDiscoveryCompositeComparable(t *testing.T) { func TestStatisticsUpdate(t *testing.T) { db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -298,12 +299,12 @@ func TestStatisticsUpdate(t *testing.T) { go t1.AutoUpdateStatistics(t.Context(), time.Millisecond*10, slog.Default()) time.Sleep(time.Millisecond * 100) - assert.NoError(t, t1.Close()) + require.NoError(t, t1.Close()) } func TestKeyColumnsValuesExtraction(t *testing.T) { db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -321,19 +322,19 @@ func TestKeyColumnsValuesExtraction(t *testing.T) { testutils.RunSQL(t, `insert into colvaluest1 values (1, 'a', 15), (2, 'b', 20), (3, 'c', 25)`) t1 := NewTableInfo(db, "test", "colvaluest1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) var id, age int var name string err = db.QueryRowContext(t.Context(), "SELECT * FROM `test`.`colvaluest1` ORDER BY id DESC LIMIT 1").Scan(&id, &name, &age) - assert.NoError(t, err) + require.NoError(t, err) row := []any{id, name, age} pkVals, err := t1.PrimaryKeyValues(row) assert.Equal(t, id, pkVals[0]) assert.Equal(t, age, pkVals[1]) - assert.NoError(t, err) + require.NoError(t, err) } func TestDiscoveryGeneratedCols(t *testing.T) { @@ -351,7 +352,7 @@ func TestDiscoveryGeneratedCols(t *testing.T) { testutils.RunSQL(t, table) db, err := sql.Open("mysql", testutils.DSN()) - assert.NoError(t, err) + require.NoError(t, err) defer func() { if err := db.Close(); err != nil { t.Logf("failed to close db: %v", err) @@ -359,7 +360,7 @@ func TestDiscoveryGeneratedCols(t *testing.T) { }() t1 := NewTableInfo(db, "test", "generatedcolst1") - assert.NoError(t, t1.SetInfo(t.Context())) + require.NoError(t, t1.SetInfo(t.Context())) // Can't check estimated rows (depends on MySQL version etc) assert.Equal(t, []string{"id", "name", "b", "c1", "c2", "c3", "d"}, t1.Columns) @@ -379,7 +380,7 @@ func TestDiscoveryGeneratedCols(t *testing.T) { );` testutils.RunSQL(t, table) t2 := NewTableInfo(db, "test", "generatedcolst2") - assert.NoError(t, t2.SetInfo(t.Context())) + require.NoError(t, t2.SetInfo(t.Context())) // Can't check estimated rows (depends on MySQL version etc) assert.Equal(t, []string{"id", "pa", "p1", "p2", "s1", "s2", "s3", "s4"}, t2.Columns) @@ -393,19 +394,19 @@ func TestGetColumnOrdinal(t *testing.T) { // Test finding existing columns ordinal, err := t1.GetColumnOrdinal("id") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, 0, ordinal) ordinal, err = t1.GetColumnOrdinal("name") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, 1, ordinal) ordinal, err = t1.GetColumnOrdinal("age") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, 2, ordinal) ordinal, err = t1.GetColumnOrdinal("email") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, 3, ordinal) // Test finding non-existent column @@ -426,19 +427,19 @@ func TestGetNonGeneratedColumnOrdinal(t *testing.T) { // Test finding existing non-generated columns // Note: ordinals are relative to NonGeneratedColumns, not Columns ordinal, err := t1.GetNonGeneratedColumnOrdinal("id") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, 0, ordinal, "id should be at position 0 in NonGeneratedColumns") ordinal, err = t1.GetNonGeneratedColumnOrdinal("name") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, 1, ordinal, "name should be at position 1 in NonGeneratedColumns") ordinal, err = t1.GetNonGeneratedColumnOrdinal("age") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, 2, ordinal, "age should be at position 2 in NonGeneratedColumns (skipping name_reversed)") ordinal, err = t1.GetNonGeneratedColumnOrdinal("email") - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, 3, ordinal, "email should be at position 3 in NonGeneratedColumns") // Test finding a generated column (should fail) From 11b388916d0ae178b7f3e654ce28da769750a043 Mon Sep 17 00:00:00 2001 From: Morgan Tocker Date: Fri, 1 May 2026 21:09:44 -0600 Subject: [PATCH 2/3] =?UTF-8?q?test(#781):=20tighten=20assert=E2=86=92requ?= =?UTF-8?q?ire=20=E2=80=94=20only=20goroutines=20and=20table-driven=20loop?= =?UTF-8?q?s=20keep=20assert?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Per #779 follow-up: assert.* is now allowed ONLY inside goroutine bodies (where require.FailNow is unsafe) and inside table-driven test loops (where seeing all case failures together helps). Everything else uses require.*. Co-Authored-By: Claude Opus 4.7 (1M context) --- pkg/table/chunk_test.go | 39 ++-- pkg/table/chunker_composite_test.go | 264 +++++++++++++-------------- pkg/table/chunker_multi_test.go | 243 ++++++++++++------------ pkg/table/chunker_optimistic_test.go | 149 ++++++++------- pkg/table/chunker_test.go | 11 +- pkg/table/column_mapping_test.go | 56 +++--- pkg/table/datum_test.go | 167 +++++++++-------- pkg/table/sharding_test.go | 35 ++-- pkg/table/table_schema_test.go | 35 ++-- pkg/table/tableinfo_test.go | 121 ++++++------ pkg/table/utils_test.go | 21 ++- 11 files changed, 567 insertions(+), 574 deletions(-) diff --git a/pkg/table/chunk_test.go b/pkg/table/chunk_test.go index 6551a49c..1c704630 100644 --- a/pkg/table/chunk_test.go +++ b/pkg/table/chunk_test.go @@ -3,7 +3,6 @@ package table import ( "testing" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -19,7 +18,7 @@ func TestChunk2String(t *testing.T) { Inclusive: false, }, } - assert.Equal(t, "`id` >= 100 AND `id` < 200", chunk.String()) + require.Equal(t, "`id` >= 100 AND `id` < 200", chunk.String()) chunk = &Chunk{ Key: []string{"id"}, LowerBound: &Boundary{ @@ -27,7 +26,7 @@ func TestChunk2String(t *testing.T) { Inclusive: false, }, } - assert.Equal(t, "`id` > 100", chunk.String()) + require.Equal(t, "`id` > 100", chunk.String()) chunk = &Chunk{ Key: []string{"id"}, UpperBound: &Boundary{ @@ -35,13 +34,13 @@ func TestChunk2String(t *testing.T) { Inclusive: true, }, } - assert.Equal(t, "`id` <= 200", chunk.String()) + require.Equal(t, "`id` <= 200", chunk.String()) // Empty chunks are possible with the composite chunker chunk = &Chunk{ Key: []string{"id"}, } - assert.Equal(t, "1=1", chunk.String()) + require.Equal(t, "1=1", chunk.String()) } func TestBoundary_ValueString(t *testing.T) { @@ -49,20 +48,20 @@ func TestBoundary_ValueString(t *testing.T) { Value: []Datum{{Val: 100, Tp: signedType}, {Val: 200, Tp: signedType}}, Inclusive: false, } - assert.Equal(t, "\"100\",\"200\"", boundary1.valuesString()) + require.Equal(t, "\"100\",\"200\"", boundary1.valuesString()) boundary2 := &Boundary{ Value: []Datum{{Val: 100, Tp: signedType}, {Val: 200, Tp: signedType}}, Inclusive: true, } // Tests that Inclusive doesn't matter between Boundaries for valuesString - assert.Equal(t, boundary2.valuesString(), boundary1.valuesString()) + require.Equal(t, boundary2.valuesString(), boundary1.valuesString()) // Tests composite key boundary with mixed types boundary3 := &Boundary{ Value: []Datum{{Val: "PENDING", Tp: binaryType}, {Val: 2, Tp: signedType}}, } - assert.Equal(t, "\"PENDING\",\"2\"", boundary3.valuesString()) + require.Equal(t, "\"PENDING\",\"2\"", boundary3.valuesString()) } func TestCompositeChunks(t *testing.T) { @@ -77,7 +76,7 @@ func TestCompositeChunks(t *testing.T) { Inclusive: false, }, } - assert.Equal(t, "((`id1` > 100)\n OR (`id1` = 100 AND `id2` > 200)) AND ((`id1` < 100)\n OR (`id1` = 100 AND `id2` < 300))", chunk.String()) + require.Equal(t, "((`id1` > 100)\n OR (`id1` = 100 AND `id2` > 200)) AND ((`id1` < 100)\n OR (`id1` = 100 AND `id2` < 300))", chunk.String()) // 4 parts to the key - pretty unlikely. chunk = &Chunk{ Key: []string{"id1", "id2", "id3", "id4"}, @@ -90,7 +89,7 @@ func TestCompositeChunks(t *testing.T) { Inclusive: false, }, } - assert.Equal(t, "((`id1` > 100)\n OR (`id1` = 100 AND `id2` > 200)\n OR (`id1` = 100 AND `id2` = 200 AND `id3` > 200)\n OR (`id1` = 100 AND `id2` = 200 AND `id3` = 200 AND `id4` >= 200)) AND ((`id1` < 101)\n OR (`id1` = 101 AND `id2` < 12)\n OR (`id1` = 101 AND `id2` = 12 AND `id3` < 123)\n OR (`id1` = 101 AND `id2` = 12 AND `id3` = 123 AND `id4` < 1))", chunk.String()) + require.Equal(t, "((`id1` > 100)\n OR (`id1` = 100 AND `id2` > 200)\n OR (`id1` = 100 AND `id2` = 200 AND `id3` > 200)\n OR (`id1` = 100 AND `id2` = 200 AND `id3` = 200 AND `id4` >= 200)) AND ((`id1` < 101)\n OR (`id1` = 101 AND `id2` < 12)\n OR (`id1` = 101 AND `id2` = 12 AND `id3` < 123)\n OR (`id1` = 101 AND `id2` = 12 AND `id3` = 123 AND `id4` < 1))", chunk.String()) // A possible scenario when chunking on a non primary key is possible: chunk = &Chunk{ Key: []string{"status", "id"}, @@ -103,7 +102,7 @@ func TestCompositeChunks(t *testing.T) { Inclusive: false, }, } - assert.Equal(t, "((`status` > \"ARCHIVED\")\n OR (`status` = \"ARCHIVED\" AND `id` >= 1234)) AND ((`status` < \"ARCHIVED\")\n OR (`status` = \"ARCHIVED\" AND `id` < 5412))", chunk.String()) + require.Equal(t, "((`status` > \"ARCHIVED\")\n OR (`status` = \"ARCHIVED\" AND `id` >= 1234)) AND ((`status` < \"ARCHIVED\")\n OR (`status` = \"ARCHIVED\" AND `id` < 5412))", chunk.String()) } func TestComparesTo(t *testing.T) { @@ -115,11 +114,11 @@ func TestComparesTo(t *testing.T) { Value: []Datum{{Val: 200, Tp: signedType}}, Inclusive: true, } - assert.True(t, b1.comparesTo(b2)) + require.True(t, b1.comparesTo(b2)) b2.Inclusive = false // change operator - assert.True(t, b1.comparesTo(b2)) // still compares + require.True(t, b1.comparesTo(b2)) // still compares b2.Value = []Datum{{Val: 300, Tp: signedType}} - assert.False(t, b1.comparesTo(b2)) + require.False(t, b1.comparesTo(b2)) // Compound values. b1 = &Boundary{ @@ -130,9 +129,9 @@ func TestComparesTo(t *testing.T) { Value: []Datum{{Val: 200, Tp: signedType}, {Val: 300, Tp: signedType}}, Inclusive: true, } - assert.True(t, b1.comparesTo(b2)) + require.True(t, b1.comparesTo(b2)) b2.Value = []Datum{{Val: 200, Tp: signedType}, {Val: 400, Tp: signedType}} - assert.False(t, b1.comparesTo(b2)) + require.False(t, b1.comparesTo(b2)) } func TestWatermarkAboveClause(t *testing.T) { @@ -145,7 +144,7 @@ func TestWatermarkAboveClause(t *testing.T) { watermark := `{"Key":["id"],"ChunkSize":1000,"LowerBound":{"Value":["50"],"Inclusive":true},"UpperBound":{"Value":["100"],"Inclusive":false}}` clause, err := WatermarkAboveClause(ti, watermark) require.NoError(t, err) - assert.Equal(t, "`id` > 100", clause) + require.Equal(t, "`id` > 100", clause) // Composite key ti2 := NewTableInfo(nil, "test", "t2") @@ -155,10 +154,10 @@ func TestWatermarkAboveClause(t *testing.T) { watermark2 := `{"Key":["tenant_id","item_id"],"ChunkSize":1000,"LowerBound":{"Value":["1","50"],"Inclusive":true},"UpperBound":{"Value":["2","100"],"Inclusive":false}}` clause2, err := WatermarkAboveClause(ti2, watermark2) require.NoError(t, err) - assert.Contains(t, clause2, "`tenant_id`") - assert.Contains(t, clause2, "`item_id`") + require.Contains(t, clause2, "`tenant_id`") + require.Contains(t, clause2, "`item_id`") // Should be a row constructor comparison: ((tenant_id > 2) OR (tenant_id = 2 AND item_id > 100)) - assert.Equal(t, "((`tenant_id` > 2)\n OR (`tenant_id` = 2 AND `item_id` > 100))", clause2) + require.Equal(t, "((`tenant_id` > 2)\n OR (`tenant_id` = 2 AND `item_id` > 100))", clause2) // Invalid JSON _, err = WatermarkAboveClause(ti, "not-json") diff --git a/pkg/table/chunker_composite_test.go b/pkg/table/chunker_composite_test.go index 51a369e5..369419c2 100644 --- a/pkg/table/chunker_composite_test.go +++ b/pkg/table/chunker_composite_test.go @@ -41,19 +41,19 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { require.NoError(t, t1.SetInfo(t.Context())) // Assert that the types are correct. - assert.Equal(t, []string{"varbinary", "varbinary"}, t1.keyColumnsMySQLTp) - assert.Equal(t, binaryType, t1.keyDatums[0]) - assert.Equal(t, binaryType, t1.keyDatums[1]) + require.Equal(t, []string{"varbinary", "varbinary"}, t1.keyColumnsMySQLTp) + require.Equal(t, binaryType, t1.keyDatums[0]) + require.Equal(t, binaryType, t1.keyDatums[1]) chunker, err := NewChunker(t1, ChunkerConfig{}) require.NoError(t, err) - assert.IsType(t, &chunkerComposite{}, chunker) + require.IsType(t, &chunkerComposite{}, chunker) require.NoError(t, chunker.Open()) chunk, err := chunker.Next() require.NoError(t, err) - assert.NotContains(t, "`a` >= ", chunk.String()) // first chunk is special + require.NotContains(t, "`a` >= ", chunk.String()) // first chunk is special upperBound := chunk.UpperBound.Value chunk, err = chunker.Next() @@ -61,7 +61,7 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { previousUpperBound := upperBound upperBound = chunk.UpperBound.Value require.NotEqual(t, previousUpperBound, upperBound) - assert.Equal(t, fmt.Sprintf("((`a` > %s)\n OR (`a` = %s AND `b` >= %s)) AND ((`a` < %s)\n OR (`a` = %s AND `b` < %s))", + require.Equal(t, fmt.Sprintf("((`a` > %s)\n OR (`a` = %s AND `b` >= %s)) AND ((`a` < %s)\n OR (`a` = %s AND `b` < %s))", previousUpperBound[0].String(), previousUpperBound[0].String(), previousUpperBound[1].String(), @@ -76,7 +76,7 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { previousUpperBound = upperBound upperBound = chunk.UpperBound.Value require.NotEqual(t, previousUpperBound, upperBound) - assert.Equal(t, fmt.Sprintf("((`a` > %s)\n OR (`a` = %s AND `b` >= %s)) AND ((`a` < %s)\n OR (`a` = %s AND `b` < %s))", + require.Equal(t, fmt.Sprintf("((`a` > %s)\n OR (`a` = %s AND `b` >= %s)) AND ((`a` < %s)\n OR (`a` = %s AND `b` < %s))", previousUpperBound[0].String(), previousUpperBound[0].String(), previousUpperBound[1].String(), @@ -92,7 +92,7 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { previousUpperBound = upperBound upperBound = chunk.UpperBound.Value require.NotEqual(t, previousUpperBound, upperBound) - assert.Equal(t, fmt.Sprintf("((`a` > %s)\n OR (`a` = %s AND `b` >= %s)) AND ((`a` < %s)\n OR (`a` = %s AND `b` < %s))", + require.Equal(t, fmt.Sprintf("((`a` > %s)\n OR (`a` = %s AND `b` >= %s)) AND ((`a` < %s)\n OR (`a` = %s AND `b` < %s))", previousUpperBound[0].String(), previousUpperBound[0].String(), previousUpperBound[1].String(), @@ -112,12 +112,12 @@ func TestCompositeChunkerCompositeBinary(t *testing.T) { break } totalChunks++ - assert.NotNil(t, chunk) + require.NotNil(t, chunk) } // there are 1001010 rows. It should be about 1002 chunks. // we don't care that it's exact, since we don't want a flaky // test if we make small changes. - assert.True(t, totalChunks < 1005 && totalChunks > 995) + require.True(t, totalChunks < 1005 && totalChunks > 995) } func TestCompositeChunkerBinary(t *testing.T) { @@ -146,18 +146,18 @@ func TestCompositeChunkerBinary(t *testing.T) { require.NoError(t, t1.SetInfo(t.Context())) // Assert that the types are correct. - assert.Equal(t, []string{"varbinary"}, t1.keyColumnsMySQLTp) - assert.Equal(t, binaryType, t1.keyDatums[0]) + require.Equal(t, []string{"varbinary"}, t1.keyColumnsMySQLTp) + require.Equal(t, binaryType, t1.keyDatums[0]) chunker, err := NewChunker(t1, ChunkerConfig{}) require.NoError(t, err) - assert.IsType(t, &chunkerComposite{}, chunker) + require.IsType(t, &chunkerComposite{}, chunker) require.NoError(t, chunker.Open()) chunk, err := chunker.Next() require.NoError(t, err) - assert.NotContains(t, "`pk` >= ", chunk.String()) // first chunk is special + require.NotContains(t, "`pk` >= ", chunk.String()) // first chunk is special upperBound := chunk.UpperBound.Value[0].String() chunk, err = chunker.Next() @@ -165,14 +165,14 @@ func TestCompositeChunkerBinary(t *testing.T) { previousUpperBound := upperBound upperBound = chunk.UpperBound.Value[0].String() require.NotEqual(t, previousUpperBound, upperBound) - assert.Equal(t, fmt.Sprintf("`pk` >= %s AND `pk` < %s", previousUpperBound, upperBound), chunk.String()) + require.Equal(t, fmt.Sprintf("`pk` >= %s AND `pk` < %s", previousUpperBound, upperBound), chunk.String()) chunk, err = chunker.Next() require.NoError(t, err) previousUpperBound = upperBound upperBound = chunk.UpperBound.Value[0].String() require.NotEqual(t, previousUpperBound, upperBound) - assert.Equal(t, fmt.Sprintf("`pk` >= %s AND `pk` < %s", previousUpperBound, upperBound), chunk.String()) + require.Equal(t, fmt.Sprintf("`pk` >= %s AND `pk` < %s", previousUpperBound, upperBound), chunk.String()) // Test it advances again chunk, err = chunker.Next() @@ -180,7 +180,7 @@ func TestCompositeChunkerBinary(t *testing.T) { previousUpperBound = upperBound upperBound = chunk.UpperBound.Value[0].String() require.NotEqual(t, previousUpperBound, upperBound) - assert.Equal(t, fmt.Sprintf("`pk` >= %s AND `pk` < %s", previousUpperBound, upperBound), chunk.String()) + require.Equal(t, fmt.Sprintf("`pk` >= %s AND `pk` < %s", previousUpperBound, upperBound), chunk.String()) // Repeat until done (final chunk is sent.) // Add to the total chunks @@ -192,12 +192,12 @@ func TestCompositeChunkerBinary(t *testing.T) { break } totalChunks++ - assert.NotNil(t, chunk) + require.NotNil(t, chunk) } // there are 1001010 rows. It should be about 1002 chunks. // we don't care that it's exact, since we don't want a flaky // test if we make small changes. - assert.True(t, totalChunks < 1005 && totalChunks > 995) + require.True(t, totalChunks < 1005 && totalChunks > 995) } func TestCompositeChunkerInt(t *testing.T) { testutils.RunSQL(t, "DROP TABLE IF EXISTS compositeint_t1") @@ -226,12 +226,12 @@ func TestCompositeChunkerInt(t *testing.T) { require.NoError(t, t1.SetInfo(t.Context())) // Assert that the types are correct. - assert.Equal(t, []string{"int"}, t1.keyColumnsMySQLTp) - assert.Equal(t, signedType, t1.keyDatums[0]) + require.Equal(t, []string{"int"}, t1.keyColumnsMySQLTp) + require.Equal(t, signedType, t1.keyDatums[0]) chunker, err := NewChunker(t1, ChunkerConfig{}) require.NoError(t, err) - assert.IsType(t, &chunkerComposite{}, chunker) + require.IsType(t, &chunkerComposite{}, chunker) require.NoError(t, chunker.Open()) @@ -240,15 +240,15 @@ func TestCompositeChunkerInt(t *testing.T) { chunk, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` < 1008", chunk.String()) // first chunk is special + require.Equal(t, "`pk` < 1008", chunk.String()) // first chunk is special chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` >= 1008 AND `pk` < 2032", chunk.String()) + require.Equal(t, "`pk` >= 1008 AND `pk` < 2032", chunk.String()) chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` >= 2032 AND `pk` < 3033", chunk.String()) + require.Equal(t, "`pk` >= 2032 AND `pk` < 3033", chunk.String()) totalChunks := 3 // 3 so far for range 5000 { @@ -257,12 +257,12 @@ func TestCompositeChunkerInt(t *testing.T) { break } totalChunks++ - assert.NotNil(t, chunk) + require.NotNil(t, chunk) } // there are 1001010 rows. It should be about 1002 chunks. // we don't care that it's exact, since we don't want a flaky // test if we make small changes. - assert.True(t, totalChunks < 1005 && totalChunks > 995) + require.True(t, totalChunks < 1005 && totalChunks > 995) } func TestCompositeLowWatermark(t *testing.T) { @@ -295,92 +295,92 @@ func TestCompositeLowWatermark(t *testing.T) { logger: slog.Default(), } _, err = chunker.Next() - assert.Error(t, err) // not open yet + require.Error(t, err) // not open yet require.NoError(t, chunker.Open()) - assert.Error(t, chunker.Open()) // double open should fail + require.Error(t, chunker.Open()) // double open should fail _, err = chunker.GetLowWatermark() - assert.Error(t, err) + require.Error(t, err) - assert.Equal(t, StartingChunkSize, int(chunker.chunkSize)) + require.Equal(t, StartingChunkSize, int(chunker.chunkSize)) chunk, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` < 1008", chunk.String()) // first chunk + require.Equal(t, "`pk` < 1008", chunk.String()) // first chunk _, err = chunker.GetLowWatermark() - assert.Error(t, err) // no feedback yet. + require.Error(t, err) // no feedback yet. chunker.Feedback(chunk, time.Millisecond*500, 1) - assert.Equal(t, StartingChunkSize, int(chunker.chunkSize)) // should not have changed yet (requires 10 feedbacks) + require.Equal(t, StartingChunkSize, int(chunker.chunkSize)) // should not have changed yet (requires 10 feedbacks) _, err = chunker.GetLowWatermark() - assert.Error(t, err) // there has been feedback, but watermark is not ready after first chunk. + require.Error(t, err) // there has been feedback, but watermark is not ready after first chunk. chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` >= 1008 AND `pk` < 2032", chunk.String()) + require.Equal(t, "`pk` >= 1008 AND `pk` < 2032", chunk.String()) chunker.Feedback(chunk, time.Second, 1) - assert.Equal(t, 100, int(chunker.chunkSize)) // usually requires 10 feedbacks, but changed because >5x target + require.Equal(t, 100, int(chunker.chunkSize)) // usually requires 10 feedbacks, but changed because >5x target watermark, err := chunker.GetLowWatermark() require.NoError(t, err) // The watermark can be divided into the chunkJSON and the rows. var compositeWM compositeWatermark require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) - assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1008\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2032\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) + require.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1008\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2032\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` >= 2032 AND `pk` < 2133", chunk.String()) + require.Equal(t, "`pk` >= 2032 AND `pk` < 2133", chunk.String()) chunker.Feedback(chunk, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) - assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) + require.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunkAsync1, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` >= 2133 AND `pk` < 2144", chunkAsync1.String()) + require.Equal(t, "`pk` >= 2133 AND `pk` < 2144", chunkAsync1.String()) chunkAsync2, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` >= 2144 AND `pk` < 2155", chunkAsync2.String()) + require.Equal(t, "`pk` >= 2144 AND `pk` < 2155", chunkAsync2.String()) chunkAsync3, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` >= 2155 AND `pk` < 2166", chunkAsync3.String()) + require.Equal(t, "`pk` >= 2155 AND `pk` < 2166", chunkAsync3.String()) chunker.Feedback(chunkAsync2, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) - assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) + require.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunker.Feedback(chunkAsync3, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) - assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) + require.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":100,\"LowerBound\":{\"Value\": [\"2032\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2133\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunker.Feedback(chunkAsync1, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) - assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2155\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2166\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) + require.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2155\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2166\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "`pk` >= 2166 AND `pk` < 2177", chunk.String()) + require.Equal(t, "`pk` >= 2166 AND `pk` < 2177", chunk.String()) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) - assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2155\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2166\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) + require.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2155\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2166\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) chunker.Feedback(chunk, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) require.NoError(t, json.Unmarshal([]byte(watermark), &compositeWM)) - assert.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2166\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2177\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) + require.JSONEq(t, "{\"Key\":[\"pk\"],\"ChunkSize\":10,\"LowerBound\":{\"Value\": [\"2166\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2177\"],\"Inclusive\":false}}", compositeWM.ChunkJSON) // Give enough feedback that the chunk size recalculation runs. - assert.Equal(t, 10, int(chunker.chunkSize)) + require.Equal(t, 10, int(chunker.chunkSize)) for range 50 { chunk, err = chunker.Next() require.NoError(t, err) @@ -389,12 +389,12 @@ func TestCompositeLowWatermark(t *testing.T) { } chunker.Feedback(chunk, time.Millisecond*5, 1) // say that it took 5ms to process 10 rows } - assert.Empty(t, chunker.chunkTimingInfo) - assert.Equal(t, 15, int(chunker.chunkSize)) // scales up a maximum of 50% at a time. + require.Empty(t, chunker.chunkTimingInfo) + require.Equal(t, 15, int(chunker.chunkSize)) // scales up a maximum of 50% at a time. // Test that we have applied all stored chunks and the map is empty, // as we gave Feedback for all chunks. - assert.Empty(t, chunker.lowerBoundWatermarkMap) + require.Empty(t, chunker.lowerBoundWatermarkMap) } func TestCompositeSmallTable(t *testing.T) { @@ -422,13 +422,13 @@ func TestCompositeSmallTable(t *testing.T) { chunker, err := NewChunker(t1, ChunkerConfig{}) require.NoError(t, err) - assert.IsType(t, &chunkerComposite{}, chunker) + require.IsType(t, &chunkerComposite{}, chunker) require.NoError(t, chunker.Open()) chunk, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "1=1", chunk.String()) // small chunk + require.Equal(t, "1=1", chunk.String()) // small chunk require.NoError(t, chunker.Close()) } @@ -490,7 +490,7 @@ func TestSetKey(t *testing.T) { require.NoError(t, err) // Because there are zero rows with status archived or updated_at that old, // it returns 1 chunk with 1=1 and the original condition. - assert.Equal(t, "1=1 AND (status = 'ARCHIVED' AND updated_at < NOW() - INTERVAL 1 DAY)", chunk.String()) + require.Equal(t, "1=1 AND (status = 'ARCHIVED' AND updated_at < NOW() - INTERVAL 1 DAY)", chunk.String()) require.NoError(t, chunker.Close()) // If I reset again with a different condition it should range as chunks. @@ -504,12 +504,12 @@ func TestSetKey(t *testing.T) { require.NoError(t, chunker.Open()) chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "((`status` < \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` < 1008)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) + require.Equal(t, "((`status` < \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` < 1008)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) // Check a chunk with both a lowerbound and upper bound. chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "((`status` > \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` >= 1008)) AND ((`status` < \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` < 2032)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) + require.Equal(t, "((`status` > \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` >= 1008)) AND ((`status` < \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` < 2032)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) // repeat ~10 more times without calling Feedback() for range 8 { @@ -518,10 +518,10 @@ func TestSetKey(t *testing.T) { } chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "((`status` > \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` >= 10040)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) + require.Equal(t, "((`status` > \"PENDING\")\n OR (`status` = \"PENDING\" AND `id` >= 10040)) AND (status = 'PENDING' AND updated_at > NOW() - INTERVAL 1 DAY)", chunk.String()) _, err = chunker.Next() - assert.ErrorIs(t, err, ErrTableIsRead) + require.ErrorIs(t, err, ErrTableIsRead) require.NoError(t, chunker.Close()) @@ -606,7 +606,7 @@ func TestSetKeyCompositeKeyMerge(t *testing.T) { } err = chunker.SetKey("dnc", "") require.NoError(t, err) - assert.Equal(t, []string{"dob", "name", "city", "ssn"}, chunker.chunkKeys) + require.Equal(t, []string{"dob", "name", "city", "ssn"}, chunker.chunkKeys) } func TestCompositeChunkerReset(t *testing.T) { @@ -645,8 +645,8 @@ func TestCompositeChunkerReset(t *testing.T) { // Test that Reset() fails when chunker is not open err = chunker.Reset() - assert.Error(t, err) - assert.ErrorIs(t, err, ErrChunkerNotOpen) + require.Error(t, err) + require.ErrorIs(t, err, ErrChunkerNotOpen) // Open the chunker require.NoError(t, chunker.Open()) @@ -660,15 +660,15 @@ func TestCompositeChunkerReset(t *testing.T) { // Process some chunks to change the state chunk1, err := chunker.Next() require.NoError(t, err) - assert.Contains(t, chunk1.String(), "`pk` <") // first chunk + require.Contains(t, chunk1.String(), "`pk` <") // first chunk chunk2, err := chunker.Next() require.NoError(t, err) - assert.Contains(t, chunk2.String(), "`pk` >=") // second chunk has bounds + require.Contains(t, chunk2.String(), "`pk` >=") // second chunk has bounds chunk3, err := chunker.Next() require.NoError(t, err) - assert.Contains(t, chunk3.String(), "`pk` >=") // third chunk has bounds + require.Contains(t, chunk3.String(), "`pk` >=") // third chunk has bounds // Give feedback to advance watermark and change state chunker.Feedback(chunk1, time.Second, 100) @@ -677,51 +677,51 @@ func TestCompositeChunkerReset(t *testing.T) { // Verify state has changed currentRowsCopied, currentChunksCopied, _ := chunker.Progress() - assert.Greater(t, currentRowsCopied, initialRowsCopied) - assert.Greater(t, currentChunksCopied, initialChunksCopied) - assert.Greater(t, len(chunker.chunkPtrs), initialChunkPtrs) // Should have chunk pointers now + require.Greater(t, currentRowsCopied, initialRowsCopied) + require.Greater(t, currentChunksCopied, initialChunksCopied) + require.Greater(t, len(chunker.chunkPtrs), initialChunkPtrs) // Should have chunk pointers now // Verify watermark exists watermark, err := chunker.GetLowWatermark() require.NoError(t, err) - assert.NotEmpty(t, watermark) + require.NotEmpty(t, watermark) // Now reset the chunker err = chunker.Reset() require.NoError(t, err) // Verify state is reset to initial values - assert.Len(t, chunker.chunkPtrs, initialChunkPtrs, "chunkPtrs should be reset to initial value (empty slice)") - assert.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value") - assert.Equal(t, initialFinalChunkSent, chunker.finalChunkSent, "finalChunkSent should be reset to initial value") + require.Len(t, chunker.chunkPtrs, initialChunkPtrs, "chunkPtrs should be reset to initial value (empty slice)") + require.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value") + require.Equal(t, initialFinalChunkSent, chunker.finalChunkSent, "finalChunkSent should be reset to initial value") // Verify progress is reset resetRowsCopied, resetChunksCopied, _ := chunker.Progress() - assert.Equal(t, initialRowsCopied, resetRowsCopied, "rowsCopied should be reset to initial value") - assert.Equal(t, initialChunksCopied, resetChunksCopied, "chunksCopied should be reset to initial value") + require.Equal(t, initialRowsCopied, resetRowsCopied, "rowsCopied should be reset to initial value") + require.Equal(t, initialChunksCopied, resetChunksCopied, "chunksCopied should be reset to initial value") // Verify watermark is cleared - assert.Nil(t, chunker.watermark, "watermark should be nil after reset") - assert.Empty(t, chunker.lowerBoundWatermarkMap, "lowerBoundWatermarkMap should be empty after reset") - assert.Empty(t, chunker.chunkTimingInfo, "chunkTimingInfo should be empty after reset") + require.Nil(t, chunker.watermark, "watermark should be nil after reset") + require.Empty(t, chunker.lowerBoundWatermarkMap, "lowerBoundWatermarkMap should be empty after reset") + require.Empty(t, chunker.chunkTimingInfo, "chunkTimingInfo should be empty after reset") // Verify watermark is not ready after reset _, err = chunker.GetLowWatermark() - assert.Error(t, err) - assert.ErrorIs(t, err, ErrWatermarkNotReady) + require.Error(t, err) + require.ErrorIs(t, err, ErrWatermarkNotReady) // Verify that after reset, the chunker produces the same sequence as a fresh chunker resetChunk1, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, chunk1.String(), resetChunk1.String(), "First chunk after reset should match original first chunk") + require.Equal(t, chunk1.String(), resetChunk1.String(), "First chunk after reset should match original first chunk") resetChunk2, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, chunk2.String(), resetChunk2.String(), "Second chunk after reset should match original second chunk") + require.Equal(t, chunk2.String(), resetChunk2.String(), "Second chunk after reset should match original second chunk") resetChunk3, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, chunk3.String(), resetChunk3.String(), "Third chunk after reset should match original third chunk") + require.Equal(t, chunk3.String(), resetChunk3.String(), "Third chunk after reset should match original third chunk") // Test that reset works even with more complex state changes chunker.Feedback(resetChunk1, 5*time.Second, 50) // Very slow feedback to trigger panic reduction @@ -736,12 +736,12 @@ func TestCompositeChunkerReset(t *testing.T) { require.NoError(t, err) // Verify chunk size is back to initial value - assert.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value even after dynamic changes") + require.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value even after dynamic changes") // Verify we can still get the same first chunk finalResetChunk, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, chunk1.String(), finalResetChunk.String(), "First chunk after second reset should still match original") + require.Equal(t, chunk1.String(), finalResetChunk.String(), "First chunk after second reset should still match original") // Test with custom key and where condition chunker2 := &chunkerComposite{ @@ -759,7 +759,7 @@ func TestCompositeChunkerReset(t *testing.T) { // Get a chunk with the custom condition customChunk, err := chunker2.Next() require.NoError(t, err) - assert.Contains(t, customChunk.String(), "a = 1") // Should contain the where condition + require.Contains(t, customChunk.String(), "a = 1") // Should contain the where condition // Reset and verify the custom condition is preserved err = chunker2.Reset() @@ -767,7 +767,7 @@ func TestCompositeChunkerReset(t *testing.T) { resetCustomChunk, err := chunker2.Next() require.NoError(t, err) - assert.Equal(t, customChunk.String(), resetCustomChunk.String(), "Custom chunk should match after reset") + require.Equal(t, customChunk.String(), resetCustomChunk.String(), "Custom chunk should match after reset") require.NoError(t, chunker2.Close()) require.NoError(t, chunker.Close()) @@ -834,15 +834,15 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { comp := chunker.(*chunkerComposite) // Before opening, everything is above high watermark - assert.True(t, comp.KeyAboveHighWatermark(1)) - assert.True(t, comp.KeyAboveHighWatermark(100)) - assert.False(t, comp.KeyBelowLowWatermark(1)) // watermark not ready + require.True(t, comp.KeyAboveHighWatermark(1)) + require.True(t, comp.KeyAboveHighWatermark(100)) + require.False(t, comp.KeyBelowLowWatermark(1)) // watermark not ready require.NoError(t, comp.Open()) // After opening but before first chunk, key=1 should still be above - assert.True(t, comp.KeyAboveHighWatermark(1)) - assert.False(t, comp.KeyBelowLowWatermark(1)) + require.True(t, comp.KeyAboveHighWatermark(1)) + require.False(t, comp.KeyBelowLowWatermark(1)) // Get first chunk for tenant_id=1 chunk1, err := comp.Next() @@ -864,14 +864,14 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { require.True(t, val1 >= 1 && val1 <= 3, "First chunk upper bound should be within data range") // Key below the lowest 'a' value should not be above watermark - assert.False(t, comp.KeyAboveHighWatermark(0)) + require.False(t, comp.KeyAboveHighWatermark(0)) // Key at or above chunkPtr should be above high watermark - assert.True(t, comp.KeyAboveHighWatermark(val1)) - assert.True(t, comp.KeyAboveHighWatermark(val1+1)) + require.True(t, comp.KeyAboveHighWatermark(val1)) + require.True(t, comp.KeyAboveHighWatermark(val1+1)) // Nothing is below low watermark yet (no feedback given) - assert.False(t, comp.KeyBelowLowWatermark(1)) + require.False(t, comp.KeyBelowLowWatermark(1)) // Provide feedback to bump watermark comp.Feedback(chunk1, 100*time.Millisecond, 1000) @@ -880,10 +880,10 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { // If val1=1, then nothing is below yet (chunk hasn't been fully processed) // If val1=2, then a=1 should be below if val1 > 1 { - assert.True(t, comp.KeyBelowLowWatermark(1)) + require.True(t, comp.KeyBelowLowWatermark(1)) } - assert.False(t, comp.KeyBelowLowWatermark(val1)) // upper bound itself not below - assert.False(t, comp.KeyBelowLowWatermark(val1+1)) // above upper bound not below + require.False(t, comp.KeyBelowLowWatermark(val1)) // upper bound itself not below + require.False(t, comp.KeyBelowLowWatermark(val1+1)) // above upper bound not below // Get second chunk chunk2, err := comp.Next() @@ -896,15 +896,15 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { // But the upper bound itself is NOT below (watermark > key, not >=) if chunk2.UpperBound != nil { val2 := int(chunk2.UpperBound.Value[0].Val.(int64)) - assert.False(t, comp.KeyBelowLowWatermark(val2), "Upper bound itself should not be below watermark") + require.False(t, comp.KeyBelowLowWatermark(val2), "Upper bound itself should not be below watermark") if val2 > 1 { - assert.True(t, comp.KeyBelowLowWatermark(val2-1), "Keys below upper bound should be below watermark") + require.True(t, comp.KeyBelowLowWatermark(val2-1), "Keys below upper bound should be below watermark") } } } // After first chunk feedback, a=1 should definitely be below if val1 > 1 { - assert.True(t, comp.KeyBelowLowWatermark(1)) + require.True(t, comp.KeyBelowLowWatermark(1)) } // Exhaust remaining chunks @@ -921,10 +921,10 @@ func TestCompositeChunkerWatermarkOptimizations(t *testing.T) { } // After final chunk is sent, everything should be below, nothing above - assert.False(t, comp.KeyAboveHighWatermark(1)) - assert.False(t, comp.KeyAboveHighWatermark(100)) - assert.True(t, comp.KeyBelowLowWatermark(1)) - assert.True(t, comp.KeyBelowLowWatermark(100)) + require.False(t, comp.KeyAboveHighWatermark(1)) + require.False(t, comp.KeyAboveHighWatermark(100)) + require.True(t, comp.KeyBelowLowWatermark(1)) + require.True(t, comp.KeyBelowLowWatermark(100)) require.NoError(t, comp.Close()) } @@ -993,17 +993,17 @@ func TestCompositeChunkerWatermarkNonNumeric(t *testing.T) { // For VARCHAR keys, the optimization should work (not fall back to conservative) // Test with a key that's clearly above the first chunk's upper bound upperVal := chunk1.UpperBound.Value[0].Val.(string) - assert.False(t, comp.KeyAboveHighWatermark("key00001")) // Below or equal to upper bound - assert.True(t, comp.KeyAboveHighWatermark("zzzzzzzzz")) // Above upper bound + require.False(t, comp.KeyAboveHighWatermark("key00001")) // Below or equal to upper bound + require.True(t, comp.KeyAboveHighWatermark("zzzzzzzzz")) // Above upper bound // KeyBelowLowWatermark should work with VARCHAR comparison comp.Feedback(chunk1, 100*time.Millisecond, 100) watermarkUpper := comp.watermark.UpperBound.Value[0].Val.(string) - assert.True(t, comp.KeyBelowLowWatermark("key00001")) // Below watermark - assert.False(t, comp.KeyBelowLowWatermark("zzzzzzzzz")) // Above watermark + require.True(t, comp.KeyBelowLowWatermark("key00001")) // Below watermark + require.False(t, comp.KeyBelowLowWatermark("zzzzzzzzz")) // Above watermark // Verify the watermark value is what we expect - assert.Equal(t, upperVal, watermarkUpper) + require.Equal(t, upperVal, watermarkUpper) require.NoError(t, comp.Close()) } @@ -1072,17 +1072,17 @@ func TestCompositeChunkerWatermarkDateTime(t *testing.T) { // For DATETIME keys, the optimization should work // Test with timestamps that are clearly above/below the first chunk's upper bound upperVal := chunk1.UpperBound.Value[0].Val.(string) - assert.False(t, comp.KeyAboveHighWatermark("2024-01-01 00:00:00")) // Below upper bound - assert.True(t, comp.KeyAboveHighWatermark("2025-12-31 23:59:59")) // Above upper bound + require.False(t, comp.KeyAboveHighWatermark("2024-01-01 00:00:00")) // Below upper bound + require.True(t, comp.KeyAboveHighWatermark("2025-12-31 23:59:59")) // Above upper bound // KeyBelowLowWatermark should work with DATETIME comparison comp.Feedback(chunk1, 100*time.Millisecond, 100) watermarkUpper := comp.watermark.UpperBound.Value[0].Val.(string) - assert.True(t, comp.KeyBelowLowWatermark("2024-01-01 00:00:00")) // Below watermark - assert.False(t, comp.KeyBelowLowWatermark("2025-12-31 23:59:59")) // Above watermark + require.True(t, comp.KeyBelowLowWatermark("2024-01-01 00:00:00")) // Below watermark + require.False(t, comp.KeyBelowLowWatermark("2025-12-31 23:59:59")) // Above watermark // Verify the watermark value is what we expect - assert.Equal(t, upperVal, watermarkUpper) + require.Equal(t, upperVal, watermarkUpper) require.NoError(t, comp.Close()) } @@ -1152,10 +1152,10 @@ func TestCompositeChunkerCollationDifference(t *testing.T) { require.NoError(t, err) for rows.Next() { var name string - assert.NoError(t, rows.Scan(&name)) + require.NoError(t, rows.Scan(&name)) mysqlOrder = append(mysqlOrder, name) } - assert.NoError(t, rows.Close()) + require.NoError(t, rows.Close()) // MySQL collation order (case-insensitive): KEY/Key/key variants grouped together t.Logf("MySQL collation order (first 20): %v", mysqlOrder) @@ -1220,7 +1220,7 @@ func TestCompositeChunkerCollationDifference(t *testing.T) { // MySQL would consider this row already processed (case-insensitive: key0050 < Key0100) // But Go thinks it's above the watermark (byte order: key > Key) - assert.False(t, isBelowWatermark, + require.False(t, isBelowWatermark, "Go incorrectly classifies '%s' as above watermark '%s' (would buffer the change)", testKey, watermarkVal) @@ -1307,13 +1307,13 @@ func TestCompositeChunkerWatermarkWithOutOfOrderCompletion(t *testing.T) { comp.Feedback(chunk2, 100*time.Millisecond, 1000) // Watermark should still be nil because chunk1 hasn't completed - assert.Nil(t, comp.watermark) + require.Nil(t, comp.watermark) // Keys in chunk2 range should not be below watermark yet (chunk1 hasn't completed) // chunk1 is the first chunk (no LowerBound), chunk2 is second (has LowerBound) if chunk2.LowerBound != nil && len(chunk2.LowerBound.Value) > 0 { chunk2Lower := int(chunk2.LowerBound.Value[0].Val.(int64)) - assert.False(t, comp.KeyBelowLowWatermark(chunk2Lower), "Keys in chunk2 should not be below watermark until chunk1 completes") + require.False(t, comp.KeyBelowLowWatermark(chunk2Lower), "Keys in chunk2 should not be below watermark until chunk1 completes") } // Complete chunk1 (the first chunk, which aligns with nil watermark) @@ -1330,20 +1330,20 @@ func TestCompositeChunkerWatermarkWithOutOfOrderCompletion(t *testing.T) { chunk1Lower := 1 // Keys up to chunk2's upper bound should now be below watermark - assert.True(t, comp.KeyBelowLowWatermark(chunk1Lower)) - assert.True(t, comp.KeyBelowLowWatermark(chunk2Upper-1)) + require.True(t, comp.KeyBelowLowWatermark(chunk1Lower)) + require.True(t, comp.KeyBelowLowWatermark(chunk2Upper-1)) // chunk3 range should not be below yet require.NotNil(t, chunk3.LowerBound, "chunk3.LowerBound should not be nil") require.NotEmpty(t, chunk3.LowerBound.Value, "chunk3.LowerBound.Value should not be empty") chunk3Lower := int(chunk3.LowerBound.Value[0].Val.(int64)) - assert.False(t, comp.KeyBelowLowWatermark(chunk3Lower)) + require.False(t, comp.KeyBelowLowWatermark(chunk3Lower)) // Complete chunk3 comp.Feedback(chunk3, 100*time.Millisecond, 1000) // Now chunk3 range should be below watermark - assert.True(t, comp.KeyBelowLowWatermark(chunk3Lower)) + require.True(t, comp.KeyBelowLowWatermark(chunk3Lower)) require.NoError(t, comp.Close()) } @@ -1403,7 +1403,7 @@ func TestCompositeChunkerCheckpointHighPtr(t *testing.T) { comp := chunker.(*chunkerComposite) // Before OpenAtWatermark: everything should be "above" (no chunks dispatched) - assert.True(t, comp.KeyAboveHighWatermark(1)) + require.True(t, comp.KeyAboveHighWatermark(1)) // Simulate a watermark at a=200 — the copier had reached this point before interruption. watermark := `{"ChunkJSON":"{\"Key\":[\"a\",\"b\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\":[\"100\",\"1\"],\"Inclusive\":true},\"UpperBound\":{\"Value\":[\"200\",\"1\"],\"Inclusive\":false}}","RowsCopied":200}` @@ -1411,24 +1411,24 @@ func TestCompositeChunkerCheckpointHighPtr(t *testing.T) { // checkpointHighPtr should now be set to the max value of the destination // table's first PK column (a=500, since dstTable has rows up to a=499). - assert.False(t, comp.checkpointHighPtr.IsNil(), "checkpointHighPtr should be set after OpenAtWatermark") + require.False(t, comp.checkpointHighPtr.IsNil(), "checkpointHighPtr should be set after OpenAtWatermark") // Key a=150 is below the watermark — should NOT be above high watermark. - assert.False(t, comp.KeyAboveHighWatermark(150)) + require.False(t, comp.KeyAboveHighWatermark(150)) // Key a=300 is above the watermark but below checkpointHighPtr (~500). // This key may have been copied before the interruption. // It should NOT be considered "above high watermark" — we must not discard events for it. - assert.False(t, comp.KeyAboveHighWatermark(300)) + require.False(t, comp.KeyAboveHighWatermark(300)) // Key a=499 is at the max of the destination — should NOT be above. - assert.False(t, comp.KeyAboveHighWatermark(499)) + require.False(t, comp.KeyAboveHighWatermark(499)) // Key a=501 is above checkpointHighPtr — safe to discard. - assert.True(t, comp.KeyAboveHighWatermark(501)) + require.True(t, comp.KeyAboveHighWatermark(501)) // Key a=999 is well above — safe to discard. - assert.True(t, comp.KeyAboveHighWatermark(999)) + require.True(t, comp.KeyAboveHighWatermark(999)) require.NoError(t, comp.Close()) } diff --git a/pkg/table/chunker_multi_test.go b/pkg/table/chunker_multi_test.go index 420f2bc8..2275ae79 100644 --- a/pkg/table/chunker_multi_test.go +++ b/pkg/table/chunker_multi_test.go @@ -6,32 +6,31 @@ import ( "testing" "time" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestNewMultiChunker(t *testing.T) { t.Run("EmptyChunkers", func(t *testing.T) { chunker := NewMultiChunker() - assert.Nil(t, chunker) + require.Nil(t, chunker) }) t.Run("SingleChunker", func(t *testing.T) { mock := NewMockChunker("table1", 1000) chunker := NewMultiChunker(mock) - assert.Equal(t, mock, chunker) + require.Equal(t, mock, chunker) }) t.Run("MultipleChunkers", func(t *testing.T) { mock1 := NewMockChunker("table1", 1000) mock2 := NewMockChunker("table2", 2000) chunker := NewMultiChunker(mock1, mock2) - assert.IsType(t, &multiChunker{}, chunker) + require.IsType(t, &multiChunker{}, chunker) multiChunker := chunker.(*multiChunker) - assert.Len(t, multiChunker.chunkers, 2) - assert.Contains(t, multiChunker.chunkers, "test.table1") - assert.Contains(t, multiChunker.chunkers, "test.table2") + require.Len(t, multiChunker.chunkers, 2) + require.Contains(t, multiChunker.chunkers, "test.table1") + require.Contains(t, multiChunker.chunkers, "test.table2") }) } @@ -42,13 +41,13 @@ func TestMultiChunkerLifecycle(t *testing.T) { t.Run("Open", func(t *testing.T) { err := chunker.Open() - assert.NoError(t, err) - assert.True(t, chunker.isOpen) + require.NoError(t, err) + require.True(t, chunker.isOpen) // Double open should fail err = chunker.Open() - assert.Error(t, err) - assert.ErrorIs(t, err, ErrChunkerAlreadyOpen) + require.Error(t, err) + require.ErrorIs(t, err, ErrChunkerAlreadyOpen) }) t.Run("OpenError", func(t *testing.T) { @@ -58,14 +57,14 @@ func TestMultiChunkerLifecycle(t *testing.T) { chunker2 := NewMultiChunker(mock3, mock4).(*multiChunker) err := chunker2.Open() - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to open child chunker") + require.Error(t, err) + require.Contains(t, err.Error(), "failed to open child chunker") }) t.Run("Close", func(t *testing.T) { err := chunker.Close() - assert.NoError(t, err) - assert.False(t, chunker.isOpen) + require.NoError(t, err) + require.False(t, chunker.isOpen) }) t.Run("CloseError", func(t *testing.T) { @@ -77,8 +76,8 @@ func TestMultiChunkerLifecycle(t *testing.T) { require.NoError(t, err) err = chunker2.Close() - assert.Error(t, err) - assert.Contains(t, err.Error(), "failed to close chunker") + require.Error(t, err) + require.Contains(t, err.Error(), "failed to close chunker") }) } @@ -102,8 +101,8 @@ func TestMultiChunkerProgressBasedSelection(t *testing.T) { mock3.SimulateProgress(0.9) // 90% chunk, err := chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table2", chunk.Table.TableName) + require.NoError(t, err) + require.Equal(t, "table2", chunk.Table.TableName) }) t.Run("SelectLargestWhenEqualProgress", func(t *testing.T) { @@ -113,8 +112,8 @@ func TestMultiChunkerProgressBasedSelection(t *testing.T) { mock3.SimulateProgress(0.0) // 0%, 500 rows chunk, err := chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table2", chunk.Table.TableName) + require.NoError(t, err) + require.Equal(t, "table2", chunk.Table.TableName) }) t.Run("SkipCompletedChunkers", func(t *testing.T) { @@ -124,8 +123,8 @@ func TestMultiChunkerProgressBasedSelection(t *testing.T) { mock2.SimulateProgress(0.5) // Only this one is active chunk, err := chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table2", chunk.Table.TableName) + require.NoError(t, err) + require.Equal(t, "table2", chunk.Table.TableName) }) t.Run("AllChunkersComplete", func(t *testing.T) { @@ -134,7 +133,7 @@ func TestMultiChunkerProgressBasedSelection(t *testing.T) { mock3.MarkAsComplete() _, err := chunker.Next() - assert.ErrorIs(t, err, ErrTableIsRead) + require.ErrorIs(t, err, ErrTableIsRead) }) } @@ -147,14 +146,14 @@ func TestMultiChunkerIsRead(t *testing.T) { mock1.MarkAsComplete() mock2.SimulateProgress(0.5) // Still active - assert.False(t, chunker.IsRead()) + require.False(t, chunker.IsRead()) }) t.Run("ReadWhenAllComplete", func(t *testing.T) { mock1.MarkAsComplete() mock2.MarkAsComplete() - assert.True(t, chunker.IsRead()) + require.True(t, chunker.IsRead()) }) } @@ -173,9 +172,9 @@ func TestMultiChunkerProgress(t *testing.T) { rowsCopied, chunksCopied, totalRows := chunker.Progress() - assert.Equal(t, uint64(1100), rowsCopied) // 500 + 600 - assert.Equal(t, uint64(8), chunksCopied) // 5 + 3 - assert.Equal(t, uint64(3000), totalRows) // 1000 + 2000 + require.Equal(t, uint64(1100), rowsCopied) // 500 + 600 + require.Equal(t, uint64(8), chunksCopied) // 5 + 3 + require.Equal(t, uint64(3000), totalRows) // 1000 + 2000 } func TestMultiChunkerFeedbackRouting(t *testing.T) { @@ -195,7 +194,7 @@ func TestMultiChunkerFeedbackRouting(t *testing.T) { chunk1, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "table1", chunk1.Table.TableName) + require.Equal(t, "table1", chunk1.Table.TableName) // Provide feedback duration := 100 * time.Millisecond @@ -206,12 +205,12 @@ func TestMultiChunkerFeedbackRouting(t *testing.T) { feedback1 := mock1.GetFeedbackCalls() feedback2 := mock2.GetFeedbackCalls() - assert.Len(t, feedback1, 1) - assert.Empty(t, feedback2) + require.Len(t, feedback1, 1) + require.Empty(t, feedback2) - assert.Equal(t, chunk1, feedback1[0].Chunk) - assert.Equal(t, duration, feedback1[0].Duration) - assert.Equal(t, actualRows, feedback1[0].ActualRows) + require.Equal(t, chunk1, feedback1[0].Chunk) + require.Equal(t, duration, feedback1[0].Duration) + require.Equal(t, actualRows, feedback1[0].ActualRows) } func TestMultiChunkerWatermarkHandling(t *testing.T) { @@ -225,19 +224,19 @@ func TestMultiChunkerWatermarkHandling(t *testing.T) { mock2.SimulateProgress(0.5) // position 1000 watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err) + require.NoError(t, err) // Parse the watermark var watermarks map[string]string err = json.Unmarshal([]byte(watermark), &watermarks) - assert.NoError(t, err) + require.NoError(t, err) - assert.Contains(t, watermarks, "test.table1") - assert.Contains(t, watermarks, "test.table2") + require.Contains(t, watermarks, "test.table1") + require.Contains(t, watermarks, "test.table2") // Check individual watermarks - assert.Equal(t, "300", watermarks["test.table1"]) - assert.Equal(t, "1000", watermarks["test.table2"]) + require.Equal(t, "300", watermarks["test.table1"]) + require.Equal(t, "1000", watermarks["test.table2"]) }) t.Run("OpenAtWatermark", func(t *testing.T) { @@ -250,21 +249,21 @@ func TestMultiChunkerWatermarkHandling(t *testing.T) { require.NoError(t, err) err = chunker.OpenAtWatermark(string(watermarkJSON)) - assert.NoError(t, err) - assert.True(t, chunker.isOpen) + require.NoError(t, err) + require.True(t, chunker.isOpen) // Verify chunkers were opened at correct positions progress1, _, _ := mock1.Progress() progress2, _, _ := mock2.Progress() - assert.Equal(t, uint64(300), progress1) - assert.Equal(t, uint64(1000), progress2) + require.Equal(t, uint64(300), progress1) + require.Equal(t, uint64(1000), progress2) }) t.Run("OpenAtWatermarkError", func(t *testing.T) { // Invalid JSON err := chunker.OpenAtWatermark("invalid json") - assert.Error(t, err) - assert.Contains(t, err.Error(), "could not parse multi-chunker watermark") + require.Error(t, err) + require.Contains(t, err.Error(), "could not parse multi-chunker watermark") }) t.Run("OpenAtWatermarkMissingTable", func(t *testing.T) { @@ -282,15 +281,15 @@ func TestMultiChunkerWatermarkHandling(t *testing.T) { chunker2 := NewMultiChunker(mock3, mock4).(*multiChunker) err = chunker2.OpenAtWatermark(string(watermarkJSON)) - assert.NoError(t, err, "Should handle missing table watermarks gracefully") + require.NoError(t, err, "Should handle missing table watermarks gracefully") // Verify table1 was opened at watermark position progress1, _, _ := mock3.Progress() - assert.Equal(t, uint64(300), progress1, "table1 should resume from watermark position") + require.Equal(t, uint64(300), progress1, "table1 should resume from watermark position") // Verify table2 was opened from scratch (position 0) progress2, _, _ := mock4.Progress() - assert.Equal(t, uint64(0), progress2, "table2 should start from scratch when no watermark") + require.Equal(t, uint64(0), progress2, "table2 should start from scratch when no watermark") }) } @@ -309,9 +308,9 @@ func TestMultiChunkerTables(t *testing.T) { tableNames[table.TableName] = true } - assert.Len(t, tableNames, 2) // Only unique table names - assert.Contains(t, tableNames, "table1") - assert.Contains(t, tableNames, "table2") + require.Len(t, tableNames, 2) // Only unique table names + require.Contains(t, tableNames, "table1") + require.Contains(t, tableNames, "table2") } func TestMultiChunkerErrorHandling(t *testing.T) { @@ -321,7 +320,7 @@ func TestMultiChunkerErrorHandling(t *testing.T) { chunker := NewMultiChunker(mock1, mock2).(*multiChunker) _, err := chunker.Next() - assert.ErrorIs(t, err, ErrTableNotOpen) + require.ErrorIs(t, err, ErrTableNotOpen) }) t.Run("NextError", func(t *testing.T) { @@ -337,8 +336,8 @@ func TestMultiChunkerErrorHandling(t *testing.T) { }() _, err := chunker.Next() - assert.Error(t, err) - assert.Contains(t, err.Error(), "next failed") + require.Error(t, err) + require.Contains(t, err.Error(), "next failed") }) t.Run("WatermarkError", func(t *testing.T) { @@ -349,15 +348,15 @@ func TestMultiChunkerErrorHandling(t *testing.T) { // With our fix, this should succeed and skip the errored chunker watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err, "Should skip chunker with watermark error") + require.NoError(t, err, "Should skip chunker with watermark error") // Parse the watermark to verify only table2 is included var watermarks map[string]string err = json.Unmarshal([]byte(watermark), &watermarks) - assert.NoError(t, err) + require.NoError(t, err) - assert.NotContains(t, watermarks, "test.table1", "Should skip table1 due to watermark error") - assert.Contains(t, watermarks, "test.table2", "Should include table2 watermark") + require.NotContains(t, watermarks, "test.table1", "Should skip table1 due to watermark error") + require.Contains(t, watermarks, "test.table2", "Should include table2 watermark") }) } @@ -417,7 +416,7 @@ func TestMultiChunkerDeterministicBehavior(t *testing.T) { // All runs should produce the same selection order for i := 1; i < len(results); i++ { - assert.Equal(t, results[0], results[i], "Selection order should be deterministic") + require.Equal(t, results[0], results[i], "Selection order should be deterministic") } }) } @@ -432,8 +431,8 @@ func TestMultiChunkerReset(t *testing.T) { // Test that Reset() fails when chunker is not open err := chunker.Reset() - assert.Error(t, err) - assert.ErrorIs(t, err, ErrChunkerNotOpen) + require.Error(t, err) + require.ErrorIs(t, err, ErrChunkerNotOpen) // Open the chunker require.NoError(t, chunker.Open()) @@ -450,14 +449,14 @@ func TestMultiChunkerReset(t *testing.T) { // Get chunks from different tables chunk1, err := chunker.Next() // Should select table1 (lowest progress) - assert.NoError(t, err) - assert.Equal(t, "table1", chunk1.Table.TableName) + require.NoError(t, err) + require.Equal(t, "table1", chunk1.Table.TableName) // Advance table1's progress so table3 becomes lowest mock1.SimulateProgress(0.4) chunk2, err := chunker.Next() // Should select table3 (now lowest progress) - assert.NoError(t, err) - assert.Equal(t, "table3", chunk2.Table.TableName) + require.NoError(t, err) + require.Equal(t, "table3", chunk2.Table.TableName) // Give feedback to change state chunker.Feedback(chunk1, time.Second, 100) @@ -468,47 +467,47 @@ func TestMultiChunkerReset(t *testing.T) { currentRowsCopied2, currentChunksCopied2, _ := mock2.Progress() currentRowsCopied3, currentChunksCopied3, _ := mock3.Progress() - assert.Greater(t, currentRowsCopied1, initialRowsCopied1) - assert.Greater(t, currentChunksCopied1, initialChunksCopied1) - assert.Greater(t, currentRowsCopied3, initialRowsCopied3) - assert.Greater(t, currentChunksCopied3, initialChunksCopied3) - assert.Greater(t, currentRowsCopied2, initialRowsCopied2) // in the mock: rows it based on progress. - assert.Equal(t, initialChunksCopied2, currentChunksCopied2) // chunks is based on actual + require.Greater(t, currentRowsCopied1, initialRowsCopied1) + require.Greater(t, currentChunksCopied1, initialChunksCopied1) + require.Greater(t, currentRowsCopied3, initialRowsCopied3) + require.Greater(t, currentChunksCopied3, initialChunksCopied3) + require.Greater(t, currentRowsCopied2, initialRowsCopied2) // in the mock: rows it based on progress. + require.Equal(t, initialChunksCopied2, currentChunksCopied2) // chunks is based on actual // Verify feedback was recorded feedback1 := mock1.GetFeedbackCalls() feedback3 := mock3.GetFeedbackCalls() - assert.Len(t, feedback1, 1) - assert.Len(t, feedback3, 1) + require.Len(t, feedback1, 1) + require.Len(t, feedback3, 1) // Verify watermark exists watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err) - assert.NotEmpty(t, watermark) + require.NoError(t, err) + require.NotEmpty(t, watermark) // Now reset the chunker err = chunker.Reset() - assert.NoError(t, err) + require.NoError(t, err) // Verify all child chunkers are reset to initial values resetRowsCopied1, resetChunksCopied1, _ := mock1.Progress() resetRowsCopied2, resetChunksCopied2, _ := mock2.Progress() resetRowsCopied3, resetChunksCopied3, _ := mock3.Progress() - assert.Equal(t, initialRowsCopied1, resetRowsCopied1, "mock1 rowsCopied should be reset to initial value") - assert.Equal(t, initialChunksCopied1, resetChunksCopied1, "mock1 chunksCopied should be reset to initial value") - assert.Equal(t, initialRowsCopied2, resetRowsCopied2, "mock2 rowsCopied should be reset to initial value") - assert.Equal(t, initialChunksCopied2, resetChunksCopied2, "mock2 chunksCopied should be reset to initial value") - assert.Equal(t, initialRowsCopied3, resetRowsCopied3, "mock3 rowsCopied should be reset to initial value") - assert.Equal(t, initialChunksCopied3, resetChunksCopied3, "mock3 chunksCopied should be reset to initial value") + require.Equal(t, initialRowsCopied1, resetRowsCopied1, "mock1 rowsCopied should be reset to initial value") + require.Equal(t, initialChunksCopied1, resetChunksCopied1, "mock1 chunksCopied should be reset to initial value") + require.Equal(t, initialRowsCopied2, resetRowsCopied2, "mock2 rowsCopied should be reset to initial value") + require.Equal(t, initialChunksCopied2, resetChunksCopied2, "mock2 chunksCopied should be reset to initial value") + require.Equal(t, initialRowsCopied3, resetRowsCopied3, "mock3 rowsCopied should be reset to initial value") + require.Equal(t, initialChunksCopied3, resetChunksCopied3, "mock3 chunksCopied should be reset to initial value") // Verify feedback history is cleared resetFeedback1 := mock1.GetFeedbackCalls() resetFeedback2 := mock2.GetFeedbackCalls() resetFeedback3 := mock3.GetFeedbackCalls() - assert.Empty(t, resetFeedback1, "mock1 feedback should be cleared after reset") - assert.Empty(t, resetFeedback2, "mock2 feedback should be cleared after reset") - assert.Empty(t, resetFeedback3, "mock3 feedback should be cleared after reset") + require.Empty(t, resetFeedback1, "mock1 feedback should be cleared after reset") + require.Empty(t, resetFeedback2, "mock2 feedback should be cleared after reset") + require.Empty(t, resetFeedback3, "mock3 feedback should be cleared after reset") // Verify that after reset, the chunker produces the same selection behavior // Reset progress to same initial state @@ -517,27 +516,27 @@ func TestMultiChunkerReset(t *testing.T) { mock3.SimulateProgress(0.3) // 30% resetChunk1, err := chunker.Next() // Should select table1 (lowest progress) again - assert.NoError(t, err) - assert.Equal(t, chunk1.Table.TableName, resetChunk1.Table.TableName, "First chunk after reset should be from same table as original") + require.NoError(t, err) + require.Equal(t, chunk1.Table.TableName, resetChunk1.Table.TableName, "First chunk after reset should be from same table as original") // Advance table1's progress so table3 becomes lowest (same as before) mock1.SimulateProgress(0.4) resetChunk2, err := chunker.Next() // Should select table3 again - assert.NoError(t, err) - assert.Equal(t, chunk2.Table.TableName, resetChunk2.Table.TableName, "Second chunk after reset should be from same table as original") + require.NoError(t, err) + require.Equal(t, chunk2.Table.TableName, resetChunk2.Table.TableName, "Second chunk after reset should be from same table as original") // Test aggregate progress after reset totalRowsCopied, totalChunksCopied, totalRowsExpected := chunker.Progress() - assert.Equal(t, uint64(1900), totalRowsCopied) - assert.Equal(t, uint64(2), totalChunksCopied) // 1 + 0 + 1 = 2 chunks processed - assert.Equal(t, uint64(3500), totalRowsExpected) // 1000 + 2000 + 500 = 3500 + require.Equal(t, uint64(1900), totalRowsCopied) + require.Equal(t, uint64(2), totalChunksCopied) // 1 + 0 + 1 = 2 chunks processed + require.Equal(t, uint64(3500), totalRowsExpected) // 1000 + 2000 + 500 = 3500 // Test that reset works with child chunker errors mock1.SetNextError(errors.New("mock error")) // Reset should still work even if child chunkers have errors configured err = chunker.Reset() - assert.NoError(t, err) + require.NoError(t, err) // Clear the error and verify normal operation resumes mock1.SetNextError(nil) @@ -546,8 +545,8 @@ func TestMultiChunkerReset(t *testing.T) { mock3.SimulateProgress(0.3) finalChunk, err := chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table1", finalChunk.Table.TableName) + require.NoError(t, err) + require.Equal(t, "table1", finalChunk.Table.TableName) // Test reset with one child chunker reset error mock2.SetNextError(errors.New("reset would fail but we don't call Next")) @@ -557,11 +556,11 @@ func TestMultiChunkerReset(t *testing.T) { mock2.MarkAsComplete() // This changes state err = chunker.Reset() - assert.NoError(t, err) // Should still succeed + require.NoError(t, err) // Should still succeed // Verify the completed chunker was reset (no longer complete) - assert.False(t, mock2.isRead(), "mock2 should not be read after reset") - assert.NoError(t, chunker.Close()) + require.False(t, mock2.isRead(), "mock2 should not be read after reset") + require.NoError(t, chunker.Close()) } // TestMultiChunkerSelectionRegressions tests for specific bugs that were fixed @@ -588,16 +587,16 @@ func TestMultiChunkerSelectionRegressions(t *testing.T) { // The chunker should select table2 (90%) because it has lower progress // This is correct behavior - we want to prioritize the table that's furthest behind chunk, err := chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table2", chunk.Table.TableName, "Should select table with lowest progress (90%)") + require.NoError(t, err) + require.Equal(t, "table2", chunk.Table.TableName, "Should select table with lowest progress (90%)") // Advance table2's progress past table1 mock2.SimulateProgress(0.96) // Now table2 is at 96% // Now table1 (95.34%) should be selected as it has lower progress chunk, err = chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table1", chunk.Table.TableName, "Should now select table1 as it has lower progress") + require.NoError(t, err) + require.Equal(t, "table1", chunk.Table.TableName, "Should now select table1 as it has lower progress") }) t.Run("RegressionFirstChunkerBias", func(t *testing.T) { @@ -620,16 +619,16 @@ func TestMultiChunkerSelectionRegressions(t *testing.T) { mock3.SimulateProgress(0.20) // 20% chunk, err := chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table2", chunk.Table.TableName, "Should select table with lowest progress, not first table") + require.NoError(t, err) + require.Equal(t, "table2", chunk.Table.TableName, "Should select table with lowest progress, not first table") // Advance table2 past table3 mock2.SimulateProgress(0.25) // Now 25% // table3 should now be selected (20% < 25%) chunk, err = chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table3", chunk.Table.TableName, "Should select table3 as it now has lowest progress") + require.NoError(t, err) + require.Equal(t, "table3", chunk.Table.TableName, "Should select table3 as it now has lowest progress") }) t.Run("RegressionNilSelectedChunker", func(t *testing.T) { @@ -653,9 +652,9 @@ func TestMultiChunkerSelectionRegressions(t *testing.T) { // Should successfully select the only active chunker chunk, err := chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table3", chunk.Table.TableName, "Should select the only active chunker") - assert.NotNil(t, chunk, "Chunk should not be nil") + require.NoError(t, err) + require.Equal(t, "table3", chunk.Table.TableName, "Should select the only active chunker") + require.NotNil(t, chunk, "Chunk should not be nil") }) t.Run("RegressionAllChunkersComplete", func(t *testing.T) { @@ -677,7 +676,7 @@ func TestMultiChunkerSelectionRegressions(t *testing.T) { // Should return ErrTableIsRead when all chunkers are complete _, err := chunker.Next() - assert.ErrorIs(t, err, ErrTableIsRead, "Should return ErrTableIsRead when all chunkers are complete") + require.ErrorIs(t, err, ErrTableIsRead, "Should return ErrTableIsRead when all chunkers are complete") }) t.Run("RegressionEqualProgressLargestTable", func(t *testing.T) { @@ -700,8 +699,8 @@ func TestMultiChunkerSelectionRegressions(t *testing.T) { mock3.SimulateProgress(0.0) chunk, err := chunker.Next() - assert.NoError(t, err) - assert.Equal(t, "table2", chunk.Table.TableName, "Should select largest table when progress is equal") + require.NoError(t, err) + require.Equal(t, "table2", chunk.Table.TableName, "Should select largest table when progress is equal") }) t.Run("RegressionWatermarkErrorHandling", func(t *testing.T) { @@ -722,17 +721,17 @@ func TestMultiChunkerSelectionRegressions(t *testing.T) { // GetLowWatermark should succeed and include table1 and table3, skip table2 watermark, err := chunker.GetLowWatermark() - assert.NoError(t, err, "Should not fail when one table has watermark error") + require.NoError(t, err, "Should not fail when one table has watermark error") // Parse the watermark to verify contents var watermarks map[string]string err = json.Unmarshal([]byte(watermark), &watermarks) - assert.NoError(t, err) + require.NoError(t, err) // Should contain table1 and table3, but not table2 (error) - assert.Contains(t, watermarks, "test.table1", "Should include table1 watermark") - assert.Contains(t, watermarks, "test.table3", "Should include table3 watermark") - assert.NotContains(t, watermarks, "test.table2", "Should skip table2 due to watermark error") + require.Contains(t, watermarks, "test.table1", "Should include table1 watermark") + require.Contains(t, watermarks, "test.table3", "Should include table3 watermark") + require.NotContains(t, watermarks, "test.table2", "Should skip table2 due to watermark error") }) t.Run("RegressionOpenAtWatermarkMissingTable", func(t *testing.T) { @@ -751,14 +750,14 @@ func TestMultiChunkerSelectionRegressions(t *testing.T) { // Should succeed - table1 resumes from watermark, table2 starts from scratch err = chunker.OpenAtWatermark(string(watermarkJSON)) - assert.NoError(t, err, "Should handle missing table watermarks gracefully") + require.NoError(t, err, "Should handle missing table watermarks gracefully") // Verify table1 was opened at watermark position progress1, _, _ := mock1.Progress() - assert.Equal(t, uint64(500), progress1, "table1 should resume from watermark position") + require.Equal(t, uint64(500), progress1, "table1 should resume from watermark position") // Verify table2 was opened from scratch (position 0) progress2, _, _ := mock2.Progress() - assert.Equal(t, uint64(0), progress2, "table2 should start from scratch when no watermark") + require.Equal(t, uint64(0), progress2, "table2 should start from scratch when no watermark") }) } diff --git a/pkg/table/chunker_optimistic_test.go b/pkg/table/chunker_optimistic_test.go index 7f2a1847..bfa7cdf6 100644 --- a/pkg/table/chunker_optimistic_test.go +++ b/pkg/table/chunker_optimistic_test.go @@ -7,7 +7,6 @@ import ( "time" "github.com/block/spirit/pkg/testutils" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -36,26 +35,26 @@ func TestOptimisticChunkerBasic(t *testing.T) { require.NoError(t, t1.PrimaryKeyIsMemoryComparable()) t1.keyColumnsMySQLTp[0] = "varchar" t1.keyDatums[0] = unknownType - assert.Error(t, t1.PrimaryKeyIsMemoryComparable()) + require.Error(t, t1.PrimaryKeyIsMemoryComparable()) t1.keyColumnsMySQLTp[0] = "bigint" t1.keyDatums[0] = signedType require.NoError(t, t1.PrimaryKeyIsMemoryComparable()) - assert.Equal(t, "`t1`", t1.QuotedTableName) + require.Equal(t, "`t1`", t1.QuotedTableName) require.NoError(t, chunker.Open()) - assert.Error(t, chunker.Open()) // can't open twice. - assert.True(t, chunker.KeyAboveHighWatermark(1)) // we haven't started copying. + require.Error(t, chunker.Open()) // can't open twice. + require.True(t, chunker.KeyAboveHighWatermark(1)) // we haven't started copying. _, err := chunker.Next() require.NoError(t, err) - assert.True(t, chunker.KeyAboveHighWatermark(100)) // we are at 1 + require.True(t, chunker.KeyAboveHighWatermark(100)) // we are at 1 _, err = chunker.Next() require.NoError(t, err) - assert.False(t, chunker.KeyAboveHighWatermark(100)) // we are at 1001 + require.False(t, chunker.KeyAboveHighWatermark(100)) // we are at 1001 for range 999 { _, err = chunker.Next() @@ -67,8 +66,8 @@ func TestOptimisticChunkerBasic(t *testing.T) { require.NoError(t, err) _, err = chunker.Next() - assert.Error(t, err) // err: table is read. - assert.Equal(t, "table is read", err.Error()) + require.Error(t, err) // err: table is read. + require.Equal(t, "table is read", err.Error()) require.NoError(t, chunker.Close()) } @@ -96,90 +95,90 @@ func TestLowWatermark(t *testing.T) { require.NoError(t, chunker.Open()) _, err := chunker.GetLowWatermark() - assert.Error(t, err) + require.Error(t, err) chunk, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` < 1", chunk.String()) // first chunk + require.Equal(t, "`id` < 1", chunk.String()) // first chunk _, err = chunker.GetLowWatermark() - assert.Error(t, err) // no feedback yet. + require.Error(t, err) // no feedback yet. chunker.Feedback(chunk, time.Second, 1) _, err = chunker.GetLowWatermark() - assert.Error(t, err) // there has been feedback, but watermark is not ready after first chunk. + require.Error(t, err) // there has been feedback, but watermark is not ready after first chunk. chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` >= 1 AND `id` < 1001", chunk.String()) // first chunk + require.Equal(t, "`id` >= 1 AND `id` < 1001", chunk.String()) // first chunk chunker.Feedback(chunk, time.Second, 1) watermark, err := chunker.GetLowWatermark() require.NoError(t, err) - assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"1001\"],\"Inclusive\":false}}", watermark) + require.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"1001\"],\"Inclusive\":false}}", watermark) // Check key w.r.t. watermark - assert.False(t, chunker.KeyAboveHighWatermark(1000)) - assert.True(t, chunker.KeyAboveHighWatermark(1001)) - assert.True(t, chunker.KeyBelowLowWatermark(1000)) // 1000 is done, so this is below. - assert.False(t, chunker.KeyBelowLowWatermark(1001)) + require.False(t, chunker.KeyAboveHighWatermark(1000)) + require.True(t, chunker.KeyAboveHighWatermark(1001)) + require.True(t, chunker.KeyBelowLowWatermark(1000)) // 1000 is done, so this is below. + require.False(t, chunker.KeyBelowLowWatermark(1001)) chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` >= 1001 AND `id` < 2001", chunk.String()) // first chunk + require.Equal(t, "`id` >= 1001 AND `id` < 2001", chunk.String()) // first chunk // Check KeyBelowLowWatermark before and after feedback. - assert.False(t, chunker.KeyBelowLowWatermark(1001)) + require.False(t, chunker.KeyBelowLowWatermark(1001)) chunker.Feedback(chunk, time.Second, 1) - assert.True(t, chunker.KeyBelowLowWatermark(1001)) + require.True(t, chunker.KeyBelowLowWatermark(1001)) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) - assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) + require.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) chunkAsync1, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` >= 2001 AND `id` < 3001", chunkAsync1.String()) - assert.False(t, chunker.KeyBelowLowWatermark(2001)) + require.Equal(t, "`id` >= 2001 AND `id` < 3001", chunkAsync1.String()) + require.False(t, chunker.KeyBelowLowWatermark(2001)) chunkAsync2, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` >= 3001 AND `id` < 4001", chunkAsync2.String()) - assert.False(t, chunker.KeyBelowLowWatermark(2001)) + require.Equal(t, "`id` >= 3001 AND `id` < 4001", chunkAsync2.String()) + require.False(t, chunker.KeyBelowLowWatermark(2001)) chunkAsync3, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` >= 4001 AND `id` < 5001", chunkAsync3.String()) - assert.False(t, chunker.KeyBelowLowWatermark(2001)) + require.Equal(t, "`id` >= 4001 AND `id` < 5001", chunkAsync3.String()) + require.False(t, chunker.KeyBelowLowWatermark(2001)) chunker.Feedback(chunkAsync2, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) - assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) + require.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) chunker.Feedback(chunkAsync3, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) - assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) - assert.False(t, chunker.KeyBelowLowWatermark(2001)) + require.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"1001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"2001\"],\"Inclusive\":false}}", watermark) + require.False(t, chunker.KeyBelowLowWatermark(2001)) chunker.Feedback(chunkAsync1, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) - assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"4001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"5001\"],\"Inclusive\":false}}", watermark) - assert.True(t, chunker.KeyBelowLowWatermark(2001)) - assert.True(t, chunker.KeyBelowLowWatermark(5000)) + require.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"4001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"5001\"],\"Inclusive\":false}}", watermark) + require.True(t, chunker.KeyBelowLowWatermark(2001)) + require.True(t, chunker.KeyBelowLowWatermark(5000)) chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` >= 5001 AND `id` < 6001", chunk.String()) // should bump immediately + require.Equal(t, "`id` >= 5001 AND `id` < 6001", chunk.String()) // should bump immediately watermark, err = chunker.GetLowWatermark() require.NoError(t, err) - assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"4001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"5001\"],\"Inclusive\":false}}", watermark) + require.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"4001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"5001\"],\"Inclusive\":false}}", watermark) chunker.Feedback(chunk, time.Second, 1) watermark, err = chunker.GetLowWatermark() require.NoError(t, err) - assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"5001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"6001\"],\"Inclusive\":false}}", watermark) + require.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":1000,\"LowerBound\":{\"Value\": [\"5001\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"6001\"],\"Inclusive\":false}}", watermark) // Test that we have applied all stored chunks and the map is empty, // as we gave Feedback for all chunks. - assert.Empty(t, chunker.lowerBoundWatermarkMap) + require.Empty(t, chunker.lowerBoundWatermarkMap) } func TestOptimisticDynamicChunking(t *testing.T) { @@ -206,12 +205,12 @@ func TestOptimisticDynamicChunking(t *testing.T) { chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, uint64(100), chunk.ChunkSize) // immediate change from before + require.Equal(t, uint64(100), chunk.ChunkSize) // immediate change from before chunker.Feedback(chunk, time.Second, 1) // way too long again, it will reduce to 10 newChunk, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, uint64(10), newChunk.ChunkSize) // immediate change from before + require.Equal(t, uint64(10), newChunk.ChunkSize) // immediate change from before // Feedback is only taken if the chunk.ChunkSize matches the current size. // so lets give bad feedback and see no change. newChunk.ChunkSize = 1234 @@ -219,7 +218,7 @@ func TestOptimisticDynamicChunking(t *testing.T) { chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, uint64(10), chunk.ChunkSize) // no change + require.Equal(t, uint64(10), chunk.ChunkSize) // no change chunker.Feedback(chunk, 50*time.Microsecond, 1) // must give feedback to advance watermark. // Feedback to increase the chunk size is more gradual. @@ -227,13 +226,13 @@ func TestOptimisticDynamicChunking(t *testing.T) { chunk, err = chunker.Next() chunker.Feedback(chunk, 50*time.Microsecond, 1) // very short. require.NoError(t, err) - assert.Equal(t, uint64(10), chunk.ChunkSize) // no change. + require.Equal(t, uint64(10), chunk.ChunkSize) // no change. } // On the 11th piece of feedback *with this chunk size* // it finally changes. But no greater than 50% increase at a time. chunk, err = chunker.Next() require.NoError(t, err) - assert.Equal(t, uint64(15), chunk.ChunkSize) + require.Equal(t, uint64(15), chunk.ChunkSize) chunker.Feedback(chunk, 50*time.Microsecond, 1) // Advance the watermark a little bit. @@ -247,7 +246,7 @@ func TestOptimisticDynamicChunking(t *testing.T) { watermark, err := chunker.GetLowWatermark() require.NoError(t, err) - assert.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":22,\"LowerBound\":{\"Value\": [\"584\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"606\"],\"Inclusive\":false}}", watermark) + require.JSONEq(t, "{\"Key\":[\"id\"],\"ChunkSize\":22,\"LowerBound\":{\"Value\": [\"584\"],\"Inclusive\":true},\"UpperBound\":{\"Value\": [\"606\"],\"Inclusive\":false}}", watermark) // Start everything over again as t2. t2 := newTableInfo4Test("test", "t1") @@ -271,7 +270,7 @@ func TestOptimisticDynamicChunking(t *testing.T) { // we would have to worry about off-by-1 errors. chunk, err = chunker2.Next() require.NoError(t, err) - assert.Equal(t, "584", chunk.LowerBound.Value[0].String()) + require.Equal(t, "584", chunk.LowerBound.Value[0].String()) } func TestOptimisticPrefetchChunking(t *testing.T) { @@ -321,14 +320,14 @@ func TestOptimisticPrefetchChunking(t *testing.T) { } chunker.SetDynamicChunking(true) require.NoError(t, chunker.Open()) - assert.False(t, chunker.chunkPrefetchingEnabled) + require.False(t, chunker.chunkPrefetchingEnabled) for !chunker.finalChunkSent { chunk, err := chunker.Next() require.NoError(t, err) chunker.Feedback(chunk, 100*time.Millisecond, 1) // way too short. } - assert.True(t, chunker.chunkPrefetchingEnabled) + require.True(t, chunker.chunkPrefetchingEnabled) } func TestOptimisticChunkerReset(t *testing.T) { @@ -359,8 +358,8 @@ func TestOptimisticChunkerReset(t *testing.T) { // Test that Reset() fails when chunker is not open err := chunker.Reset() - assert.Error(t, err) - assert.ErrorIs(t, err, ErrChunkerNotOpen) + require.Error(t, err) + require.ErrorIs(t, err, ErrChunkerNotOpen) // Open the chunker require.NoError(t, chunker.Open()) @@ -374,15 +373,15 @@ func TestOptimisticChunkerReset(t *testing.T) { // Process some chunks to change the state chunk1, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` < 1", chunk1.String()) // first chunk + require.Equal(t, "`id` < 1", chunk1.String()) // first chunk chunk2, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` >= 1 AND `id` < 1001", chunk2.String()) + require.Equal(t, "`id` >= 1 AND `id` < 1001", chunk2.String()) chunk3, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, "`id` >= 1001 AND `id` < 2001", chunk3.String()) + require.Equal(t, "`id` >= 1001 AND `id` < 2001", chunk3.String()) // Give feedback to advance watermark and change state chunker.Feedback(chunk1, time.Second, 100) @@ -391,58 +390,58 @@ func TestOptimisticChunkerReset(t *testing.T) { // Verify state has changed currentRowsCopied, currentChunksCopied, _ := chunker.Progress() - assert.Greater(t, currentRowsCopied, initialRowsCopied) - assert.Greater(t, currentChunksCopied, initialChunksCopied) - assert.NotEqual(t, initialChunkPtr.String(), chunker.chunkPtr.String()) + require.Greater(t, currentRowsCopied, initialRowsCopied) + require.Greater(t, currentChunksCopied, initialChunksCopied) + require.NotEqual(t, initialChunkPtr.String(), chunker.chunkPtr.String()) // Verify watermark exists watermark, err := chunker.GetLowWatermark() require.NoError(t, err) - assert.NotEmpty(t, watermark) + require.NotEmpty(t, watermark) // Now reset the chunker err = chunker.Reset() require.NoError(t, err) // Verify state is reset to initial values - assert.Equal(t, initialChunkPtr.String(), chunker.chunkPtr.String(), "chunkPtr should be reset to initial value") - assert.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value") - assert.Equal(t, initialFinalChunkSent, chunker.finalChunkSent, "finalChunkSent should be reset to initial value") + require.Equal(t, initialChunkPtr.String(), chunker.chunkPtr.String(), "chunkPtr should be reset to initial value") + require.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value") + require.Equal(t, initialFinalChunkSent, chunker.finalChunkSent, "finalChunkSent should be reset to initial value") // Verify progress is reset resetRowsCopied, resetChunksCopied, _ := chunker.Progress() - assert.Equal(t, initialRowsCopied, resetRowsCopied, "rowsCopied should be reset to initial value") - assert.Equal(t, initialChunksCopied, resetChunksCopied, "chunksCopied should be reset to initial value") + require.Equal(t, initialRowsCopied, resetRowsCopied, "rowsCopied should be reset to initial value") + require.Equal(t, initialChunksCopied, resetChunksCopied, "chunksCopied should be reset to initial value") // Verify watermark is cleared - assert.Nil(t, chunker.watermark, "watermark should be nil after reset") - assert.Empty(t, chunker.lowerBoundWatermarkMap, "lowerBoundWatermarkMap should be empty after reset") - assert.Empty(t, chunker.chunkTimingInfo, "chunkTimingInfo should be empty after reset") - assert.False(t, chunker.chunkPrefetchingEnabled, "chunkPrefetchingEnabled should be false after reset") + require.Nil(t, chunker.watermark, "watermark should be nil after reset") + require.Empty(t, chunker.lowerBoundWatermarkMap, "lowerBoundWatermarkMap should be empty after reset") + require.Empty(t, chunker.chunkTimingInfo, "chunkTimingInfo should be empty after reset") + require.False(t, chunker.chunkPrefetchingEnabled, "chunkPrefetchingEnabled should be false after reset") // Verify watermark is not ready after reset _, err = chunker.GetLowWatermark() - assert.Error(t, err) - assert.ErrorIs(t, err, ErrWatermarkNotReady) + require.Error(t, err) + require.ErrorIs(t, err, ErrWatermarkNotReady) // Verify that after reset, the chunker produces the same sequence as a fresh chunker resetChunk1, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, chunk1.String(), resetChunk1.String(), "First chunk after reset should match original first chunk") + require.Equal(t, chunk1.String(), resetChunk1.String(), "First chunk after reset should match original first chunk") resetChunk2, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, chunk2.String(), resetChunk2.String(), "Second chunk after reset should match original second chunk") + require.Equal(t, chunk2.String(), resetChunk2.String(), "Second chunk after reset should match original second chunk") // Verify KeyAboveHighWatermark behavior is reset // In the previous copy we had Next()'ed up to id=2000 // Here we have only up to 1001. - assert.True(t, chunker.KeyAboveHighWatermark(1500), "KeyAboveHighWatermark not reset correctly") - assert.False(t, chunker.KeyAboveHighWatermark(900), "KeyAboveHighWatermark not reset correctly") + require.True(t, chunker.KeyAboveHighWatermark(1500), "KeyAboveHighWatermark not reset correctly") + require.False(t, chunker.KeyAboveHighWatermark(900), "KeyAboveHighWatermark not reset correctly") resetChunk3, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, chunk3.String(), resetChunk3.String(), "Third chunk after reset should match original third chunk") + require.Equal(t, chunk3.String(), resetChunk3.String(), "Third chunk after reset should match original third chunk") // Test that reset works even with more complex state changes chunker.Feedback(resetChunk1, 5*time.Second, 50) // Very slow feedback to trigger panic reduction @@ -457,10 +456,10 @@ func TestOptimisticChunkerReset(t *testing.T) { require.NoError(t, err) // Verify chunk size is back to initial value - assert.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value even after dynamic changes") + require.Equal(t, initialChunkSize, chunker.chunkSize, "chunkSize should be reset to initial value even after dynamic changes") // Verify we can still get the same first chunk finalResetChunk, err := chunker.Next() require.NoError(t, err) - assert.Equal(t, chunk1.String(), finalResetChunk.String(), "First chunk after second reset should still match original") + require.Equal(t, chunk1.String(), finalResetChunk.String(), "First chunk after second reset should still match original") } diff --git a/pkg/table/chunker_test.go b/pkg/table/chunker_test.go index 4a66d59e..b851ca5a 100644 --- a/pkg/table/chunker_test.go +++ b/pkg/table/chunker_test.go @@ -5,7 +5,6 @@ import ( "testing" "github.com/block/spirit/pkg/testutils" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -31,7 +30,7 @@ func TestCompositeChunker(t *testing.T) { chunker, err := NewChunker(t1, ChunkerConfig{}) require.NoError(t, err) - assert.IsType(t, &chunkerComposite{}, chunker) + require.IsType(t, &chunkerComposite{}, chunker) } func TestOptimisticChunker(t *testing.T) { @@ -55,7 +54,7 @@ func TestOptimisticChunker(t *testing.T) { chunker, err := NewChunker(t1, ChunkerConfig{}) require.NoError(t, err) - assert.IsType(t, &chunkerOptimistic{}, chunker) + require.IsType(t, &chunkerOptimistic{}, chunker) } func TestNewCompositeChunkerWithKeyAndWhere(t *testing.T) { @@ -86,7 +85,7 @@ func TestNewCompositeChunkerWithKeyAndWhere(t *testing.T) { Where: "age > 50", }) require.NoError(t, err) - assert.IsType(t, &chunkerComposite{}, chunker) - assert.Equal(t, "age_idx", chunker.(*chunkerComposite).keyName) - assert.Equal(t, "age > 50", chunker.(*chunkerComposite).where) + require.IsType(t, &chunkerComposite{}, chunker) + require.Equal(t, "age_idx", chunker.(*chunkerComposite).keyName) + require.Equal(t, "age > 50", chunker.(*chunkerComposite).where) } diff --git a/pkg/table/column_mapping_test.go b/pkg/table/column_mapping_test.go index 5604ab21..b8119eb4 100644 --- a/pkg/table/column_mapping_test.go +++ b/pkg/table/column_mapping_test.go @@ -3,7 +3,7 @@ package table import ( "testing" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestColumnMappingColumns(t *testing.T) { @@ -13,17 +13,17 @@ func TestColumnMappingColumns(t *testing.T) { t1new.NonGeneratedColumns = []string{"a", "b", "c"} m := NewColumnMapping(t1, t1new, nil) src, _ := m.Columns() - assert.Equal(t, "`a`, `b`, `c`", src) + require.Equal(t, "`a`, `b`, `c`", src) t1new.NonGeneratedColumns = []string{"a", "c"} m = NewColumnMapping(t1, t1new, nil) src, _ = m.Columns() - assert.Equal(t, "`a`, `c`", src) + require.Equal(t, "`a`, `c`", src) t1new.NonGeneratedColumns = []string{"a", "c", "d"} m = NewColumnMapping(t1, t1new, nil) src, _ = m.Columns() - assert.Equal(t, "`a`, `c`", src) + require.Equal(t, "`a`, `c`", src) } func TestColumnMappingColumnsSlice(t *testing.T) { @@ -33,17 +33,17 @@ func TestColumnMappingColumnsSlice(t *testing.T) { t1new.NonGeneratedColumns = []string{"a", "b", "c"} m := NewColumnMapping(t1, t1new, nil) cols, _ := m.ColumnsSlice() - assert.Equal(t, []string{"a", "b", "c"}, cols) + require.Equal(t, []string{"a", "b", "c"}, cols) t1new.NonGeneratedColumns = []string{"a", "c"} m = NewColumnMapping(t1, t1new, nil) cols, _ = m.ColumnsSlice() - assert.Equal(t, []string{"a", "c"}, cols) + require.Equal(t, []string{"a", "c"}, cols) t1new.NonGeneratedColumns = []string{"a", "c", "d"} m = NewColumnMapping(t1, t1new, nil) cols, _ = m.ColumnsSlice() - assert.Equal(t, []string{"a", "c"}, cols) + require.Equal(t, []string{"a", "c"}, cols) } func TestColumnMappingWithRenames(t *testing.T) { @@ -57,12 +57,12 @@ func TestColumnMappingWithRenames(t *testing.T) { m := NewColumnMapping(t1, t1new, renames) srcStr, tgtStr := m.Columns() - assert.Equal(t, "`a`, `b`, `c`", srcStr) - assert.Equal(t, "`x`, `b`, `c`", tgtStr) + require.Equal(t, "`a`, `b`, `c`", srcStr) + require.Equal(t, "`x`, `b`, `c`", tgtStr) srcSlice, tgtSlice := m.ColumnsSlice() - assert.Equal(t, []string{"a", "b", "c"}, srcSlice) - assert.Equal(t, []string{"x", "b", "c"}, tgtSlice) + require.Equal(t, []string{"a", "b", "c"}, srcSlice) + require.Equal(t, []string{"x", "b", "c"}, tgtSlice) // Multiple renames: a→x, c→z t1.NonGeneratedColumns = []string{"a", "b", "c"} @@ -71,8 +71,8 @@ func TestColumnMappingWithRenames(t *testing.T) { m = NewColumnMapping(t1, t1new, renames) srcStr, tgtStr = m.Columns() - assert.Equal(t, "`a`, `b`, `c`", srcStr) - assert.Equal(t, "`x`, `b`, `z`", tgtStr) + require.Equal(t, "`a`, `b`, `c`", srcStr) + require.Equal(t, "`x`, `b`, `z`", tgtStr) // No renames (nil map) - should behave like original t1.NonGeneratedColumns = []string{"a", "b", "c"} @@ -80,14 +80,14 @@ func TestColumnMappingWithRenames(t *testing.T) { m = NewColumnMapping(t1, t1new, nil) srcStr, tgtStr = m.Columns() - assert.Equal(t, "`a`, `b`, `c`", srcStr) - assert.Equal(t, "`a`, `b`, `c`", tgtStr) + require.Equal(t, "`a`, `b`, `c`", srcStr) + require.Equal(t, "`a`, `b`, `c`", tgtStr) // Empty renames map - should behave like original m = NewColumnMapping(t1, t1new, map[string]string{}) srcStr, tgtStr = m.Columns() - assert.Equal(t, "`a`, `b`, `c`", srcStr) - assert.Equal(t, "`a`, `b`, `c`", tgtStr) + require.Equal(t, "`a`, `b`, `c`", srcStr) + require.Equal(t, "`a`, `b`, `c`", tgtStr) // Rename with column added in new table (d is new, not in source) t1.NonGeneratedColumns = []string{"a", "b", "c"} @@ -96,8 +96,8 @@ func TestColumnMappingWithRenames(t *testing.T) { m = NewColumnMapping(t1, t1new, renames) srcSlice, tgtSlice = m.ColumnsSlice() - assert.Equal(t, []string{"a", "b", "c"}, srcSlice) - assert.Equal(t, []string{"x", "b", "c"}, tgtSlice) + require.Equal(t, []string{"a", "b", "c"}, srcSlice) + require.Equal(t, []string{"x", "b", "c"}, tgtSlice) // Rename with column dropped from new table (c dropped) t1.NonGeneratedColumns = []string{"a", "b", "c"} @@ -106,8 +106,8 @@ func TestColumnMappingWithRenames(t *testing.T) { m = NewColumnMapping(t1, t1new, renames) srcSlice, tgtSlice = m.ColumnsSlice() - assert.Equal(t, []string{"a", "b"}, srcSlice) - assert.Equal(t, []string{"x", "b"}, tgtSlice) + require.Equal(t, []string{"a", "b"}, srcSlice) + require.Equal(t, []string{"x", "b"}, tgtSlice) // Dangerous pattern: RENAME COLUMN c1 TO n1, ADD COLUMN c1 varchar(100) // The old name "c1" now exists in BOTH the source table AND the target table @@ -121,8 +121,8 @@ func TestColumnMappingWithRenames(t *testing.T) { srcSlice, tgtSlice = m.ColumnsSlice() // Source c1 must map to target n1 (via rename), NOT to target c1 (identity match). // The new target c1 has no source counterpart — it should get its DEFAULT value. - assert.Equal(t, []string{"id", "c1"}, srcSlice) - assert.Equal(t, []string{"id", "n1"}, tgtSlice) + require.Equal(t, []string{"id", "c1"}, srcSlice) + require.Equal(t, []string{"id", "n1"}, tgtSlice) // Reverse dangerous pattern: RENAME COLUMN a TO c (where c already existed) // Source: [id, a, c], Target: [id, c] where a→c is the rename @@ -135,8 +135,8 @@ func TestColumnMappingWithRenames(t *testing.T) { m = NewColumnMapping(t1, t1new, renames) srcSlice, tgtSlice = m.ColumnsSlice() // source.a → target.c (rename), source.c is excluded (target.c is claimed) - assert.Equal(t, []string{"id", "a"}, srcSlice) - assert.Equal(t, []string{"id", "c"}, tgtSlice) + require.Equal(t, []string{"id", "a"}, srcSlice) + require.Equal(t, []string{"id", "c"}, tgtSlice) } func TestColumnMappingTargetNil(t *testing.T) { @@ -146,7 +146,7 @@ func TestColumnMappingTargetNil(t *testing.T) { m := NewColumnMapping(t1, nil, nil) src, tgt := m.Columns() - assert.Equal(t, "`a`, `b`, `c`", src) - assert.Equal(t, "`a`, `b`, `c`", tgt) - assert.Equal(t, t1, m.TargetTable()) + require.Equal(t, "`a`, `b`, `c`", src) + require.Equal(t, "`a`, `b`, `c`", tgt) + require.Equal(t, t1, m.TargetTable()) } diff --git a/pkg/table/datum_test.go b/pkg/table/datum_test.go index e57270a4..640ba8ab 100644 --- a/pkg/table/datum_test.go +++ b/pkg/table/datum_test.go @@ -7,7 +7,6 @@ import ( "testing" "time" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -17,27 +16,27 @@ func TestDatum(t *testing.T) { unsigned, err := NewDatum(uint(1), unsignedType) require.NoError(t, err) - assert.Equal(t, "1", signed.String()) - assert.Equal(t, "1", unsigned.String()) + require.Equal(t, "1", signed.String()) + require.Equal(t, "1", unsigned.String()) - assert.Equal(t, strconv.Itoa(math.MinInt64), signed.MinValue().String()) - assert.Equal(t, strconv.Itoa(math.MaxInt64), signed.MaxValue().String()) - assert.Equal(t, "0", unsigned.MinValue().String()) - assert.Equal(t, "18446744073709551615", unsigned.MaxValue().String()) + require.Equal(t, strconv.Itoa(math.MinInt64), signed.MinValue().String()) + require.Equal(t, strconv.Itoa(math.MaxInt64), signed.MaxValue().String()) + require.Equal(t, "0", unsigned.MinValue().String()) + require.Equal(t, "18446744073709551615", unsigned.MaxValue().String()) newsigned := signed.Add(10) newunsigned := unsigned.Add(10) - assert.Equal(t, "11", newsigned.String()) - assert.Equal(t, "11", newunsigned.String()) - - assert.True(t, newsigned.GreaterThanOrEqual(signed)) - assert.True(t, newunsigned.GreaterThanOrEqual(unsigned)) - assert.True(t, newsigned.GreaterThan(signed)) - assert.True(t, newunsigned.GreaterThan(unsigned)) - assert.True(t, signed.LessThan(newsigned)) - assert.True(t, unsigned.LessThan(newunsigned)) - assert.True(t, signed.LessThanOrEqual(newsigned)) - assert.True(t, unsigned.LessThanOrEqual(newunsigned)) + require.Equal(t, "11", newsigned.String()) + require.Equal(t, "11", newunsigned.String()) + + require.True(t, newsigned.GreaterThanOrEqual(signed)) + require.True(t, newunsigned.GreaterThanOrEqual(unsigned)) + require.True(t, newsigned.GreaterThan(signed)) + require.True(t, newunsigned.GreaterThan(unsigned)) + require.True(t, signed.LessThan(newsigned)) + require.True(t, unsigned.LessThan(newunsigned)) + require.True(t, signed.LessThanOrEqual(newsigned)) + require.True(t, unsigned.LessThanOrEqual(newunsigned)) // Test that add operations do not overflow. i.e. // We initialize the values to max-10 of the range, but then add 100 to each. @@ -46,48 +45,48 @@ func TestDatum(t *testing.T) { require.NoError(t, err) overflowUnsigned, err := NewDatum(uint64(math.MaxUint64)-10, unsignedType) require.NoError(t, err) - assert.Equal(t, strconv.Itoa(math.MaxInt64), overflowSigned.Add(100).String()) - assert.Equal(t, "18446744073709551615", overflowUnsigned.Add(100).String()) + require.Equal(t, strconv.Itoa(math.MaxInt64), overflowSigned.Add(100).String()) + require.Equal(t, "18446744073709551615", overflowUnsigned.Add(100).String()) // Test unsigned with signed input unsigned, err = NewDatum(int(1), unsignedType) require.NoError(t, err) - assert.Equal(t, "1", unsigned.String()) + require.Equal(t, "1", unsigned.String()) // Test binary type. binary, err := NewDatum("0", binaryType) require.NoError(t, err) - assert.Equal(t, `"0"`, binary.String()) + require.Equal(t, `"0"`, binary.String()) // Test string comparisons (VARCHAR/TEXT) str1, err := NewDatumFromValue("apple", "VARCHAR(255)") require.NoError(t, err) str2, err := NewDatumFromValue("banana", "VARCHAR(255)") require.NoError(t, err) - assert.True(t, str2.GreaterThan(str1)) // "banana" > "apple" - assert.True(t, str2.GreaterThanOrEqual(str1)) // "banana" >= "apple" - assert.True(t, str1.LessThan(str2)) // "apple" < "banana" - assert.True(t, str1.LessThanOrEqual(str2)) // "apple" <= "banana" + require.True(t, str2.GreaterThan(str1)) // "banana" > "apple" + require.True(t, str2.GreaterThanOrEqual(str1)) // "banana" >= "apple" + require.True(t, str1.LessThan(str2)) // "apple" < "banana" + require.True(t, str1.LessThanOrEqual(str2)) // "apple" <= "banana" str3, err := NewDatumFromValue("apple", "VARCHAR(255)") require.NoError(t, err) - assert.True(t, str1.GreaterThanOrEqual(str3)) // equal values - assert.True(t, str1.LessThanOrEqual(str3)) // equal values - assert.False(t, str1.GreaterThan(str3)) // equal values - assert.False(t, str1.LessThan(str3)) // equal values + require.True(t, str1.GreaterThanOrEqual(str3)) // equal values + require.True(t, str1.LessThanOrEqual(str3)) // equal values + require.False(t, str1.GreaterThan(str3)) // equal values + require.False(t, str1.LessThan(str3)) // equal values // Test temporal comparisons (DATETIME) datetime1, err := NewDatumFromValue("2024-01-01 10:00:00", "DATETIME") require.NoError(t, err) datetime2, err := NewDatumFromValue("2024-01-02 10:00:00", "DATETIME") require.NoError(t, err) - assert.True(t, datetime2.GreaterThan(datetime1)) - assert.True(t, datetime1.LessThan(datetime2)) + require.True(t, datetime2.GreaterThan(datetime1)) + require.True(t, datetime1.LessThan(datetime2)) // Test that comparing different types panics - assert.Panics(t, func() { + require.Panics(t, func() { signed.GreaterThan(unsigned) }) - assert.Panics(t, func() { + require.Panics(t, func() { signed.LessThan(str1) }) } @@ -100,8 +99,8 @@ func TestDatumInt32ToUnsigned(t *testing.T) { positiveInt32 := int32(123456) d1, err := NewDatum(positiveInt32, unsignedType) require.NoError(t, err) - assert.Equal(t, uint64(123456), d1.Val) - assert.Equal(t, "123456", d1.String()) + require.Equal(t, uint64(123456), d1.Val) + require.Equal(t, "123456", d1.String()) // Negative int32 value (large unsigned value) // -840443956 as int32 = 3454523340 as uint32 @@ -109,32 +108,32 @@ func TestDatumInt32ToUnsigned(t *testing.T) { d2, err := NewDatum(negativeInt32, unsignedType) require.NoError(t, err) expectedUint32 := uint32(negativeInt32) // Reinterpret bits as unsigned - assert.Equal(t, uint64(expectedUint32), d2.Val) - assert.Equal(t, uint64(3454523340), d2.Val) - assert.Equal(t, "3454523340", d2.String()) + require.Equal(t, uint64(expectedUint32), d2.Val) + require.Equal(t, uint64(3454523340), d2.Val) + require.Equal(t, "3454523340", d2.String()) // Edge case: int32 max value maxInt32 := int32(math.MaxInt32) d3, err := NewDatum(maxInt32, unsignedType) require.NoError(t, err) - assert.Equal(t, uint64(math.MaxInt32), d3.Val) - assert.Equal(t, "2147483647", d3.String()) + require.Equal(t, uint64(math.MaxInt32), d3.Val) + require.Equal(t, "2147483647", d3.String()) // Edge case: int32 min value (becomes max uint32 range) minInt32 := int32(math.MinInt32) d4, err := NewDatum(minInt32, unsignedType) require.NoError(t, err) expectedUint32Min := uint32(minInt32) - assert.Equal(t, uint64(expectedUint32Min), d4.Val) - assert.Equal(t, uint64(2147483648), d4.Val) - assert.Equal(t, "2147483648", d4.String()) + require.Equal(t, uint64(expectedUint32Min), d4.Val) + require.Equal(t, uint64(2147483648), d4.Val) + require.Equal(t, "2147483648", d4.String()) // Test uint32 values pass through correctly positiveUint32 := uint32(3454523340) d5, err := NewDatum(positiveUint32, unsignedType) require.NoError(t, err) - assert.Equal(t, uint64(3454523340), d5.Val) - assert.Equal(t, "3454523340", d5.String()) + require.Equal(t, uint64(3454523340), d5.Val) + require.Equal(t, "3454523340", d5.String()) } func TestDatumInt64ToUnsigned(t *testing.T) { @@ -145,22 +144,22 @@ func TestDatumInt64ToUnsigned(t *testing.T) { positiveInt64 := int64(123456789012345) d1, err := NewDatum(positiveInt64, unsignedType) require.NoError(t, err) - assert.Equal(t, uint64(123456789012345), d1.Val) - assert.Equal(t, "123456789012345", d1.String()) + require.Equal(t, uint64(123456789012345), d1.Val) + require.Equal(t, "123456789012345", d1.String()) // Negative int64 value (large unsigned value) // -1 as int64 = max uint64 negativeInt64 := int64(-1) d2, err := NewDatum(negativeInt64, unsignedType) require.NoError(t, err) - assert.Equal(t, uint64(negativeInt64), d2.Val) - assert.Equal(t, uint64(math.MaxUint64), d2.Val) + require.Equal(t, uint64(negativeInt64), d2.Val) + require.Equal(t, uint64(math.MaxUint64), d2.Val) // Edge case: int64 max value maxInt64 := int64(math.MaxInt64) d3, err := NewDatum(maxInt64, unsignedType) require.NoError(t, err) - assert.Equal(t, uint64(math.MaxInt64), d3.Val) + require.Equal(t, uint64(math.MaxInt64), d3.Val) } func TestKeyBelowLowWatermarkWithNegativeInt32(t *testing.T) { @@ -196,9 +195,9 @@ func TestKeyBelowLowWatermarkWithNegativeInt32(t *testing.T) { // KeyBelowLowWatermark should correctly handle int32 values for unsigned columns // The int32 will be reinterpreted as uint32, giving us 4294954951 // Since 4294954951 > 100 (watermark), it should return false - assert.NotPanics(t, func() { + require.NotPanics(t, func() { result := chk.KeyBelowLowWatermark(originalVal) - assert.False(t, result, "4294954951 should not be below watermark of 100") + require.False(t, result, "4294954951 should not be below watermark of 100") }, "KeyBelowLowWatermark should handle int32 values for unsigned columns") } @@ -208,120 +207,120 @@ func TestNewDatumFromValue(t *testing.T) { // Test NULL values d, err := NewDatumFromValue(nil, "INT") require.NoError(t, err) - assert.Equal(t, "NULL", d.String()) + require.Equal(t, "NULL", d.String()) d, err = NewDatumFromValue(nil, "VARCHAR(255)") require.NoError(t, err) - assert.Equal(t, "NULL", d.String()) + require.Equal(t, "NULL", d.String()) d, err = NewDatumFromValue(nil, "JSON") require.NoError(t, err) - assert.Equal(t, "NULL", d.String()) + require.Equal(t, "NULL", d.String()) // Test integer types intBytes := []byte("123") d, err = NewDatumFromValue(intBytes, "INT") require.NoError(t, err) - assert.Equal(t, "123", d.String()) + require.Equal(t, "123", d.String()) d, err = NewDatumFromValue(intBytes, "BIGINT") require.NoError(t, err) - assert.Equal(t, "123", d.String()) + require.Equal(t, "123", d.String()) d, err = NewDatumFromValue(456, "INT") require.NoError(t, err) - assert.Equal(t, "456", d.String()) + require.Equal(t, "456", d.String()) d, err = NewDatumFromValue(intBytes, "INT(11)") require.NoError(t, err) - assert.Equal(t, "123", d.String()) + require.Equal(t, "123", d.String()) // Test VARCHAR/TEXT types textBytes := []byte("hello world") d, err = NewDatumFromValue(textBytes, "VARCHAR(255)") require.NoError(t, err) - assert.Equal(t, "\"hello world\"", d.String()) + require.Equal(t, "\"hello world\"", d.String()) d, err = NewDatumFromValue("hello world", "TEXT") require.NoError(t, err) - assert.Equal(t, "\"hello world\"", d.String()) + require.Equal(t, "\"hello world\"", d.String()) d, err = NewDatumFromValue(textBytes, "CHAR(50)") require.NoError(t, err) - assert.Equal(t, "\"hello world\"", d.String()) + require.Equal(t, "\"hello world\"", d.String()) // Test with quotes that need escaping textWithQuotes := []byte("hello 'world'") d, err = NewDatumFromValue(textWithQuotes, "VARCHAR(255)") require.NoError(t, err) - assert.Equal(t, "\"hello \\'world\\'\"", d.String()) + require.Equal(t, "\"hello \\'world\\'\"", d.String()) // Test DATETIME/TIMESTAMP types timeBytes := []byte("2023-01-01 12:00:00") d, err = NewDatumFromValue(timeBytes, "DATETIME") require.NoError(t, err) - assert.Equal(t, "\"2023-01-01 12:00:00\"", d.String()) + require.Equal(t, "\"2023-01-01 12:00:00\"", d.String()) d, err = NewDatumFromValue(timeBytes, "TIMESTAMP") require.NoError(t, err) - assert.Equal(t, "\"2023-01-01 12:00:00\"", d.String()) + require.Equal(t, "\"2023-01-01 12:00:00\"", d.String()) // Test float types floatBytes := []byte("123.45") d, err = NewDatumFromValue(floatBytes, "FLOAT") require.NoError(t, err) - assert.Equal(t, "\"123.45\"", d.String()) + require.Equal(t, "\"123.45\"", d.String()) d, err = NewDatumFromValue(floatBytes, "DOUBLE") require.NoError(t, err) - assert.Equal(t, "\"123.45\"", d.String()) + require.Equal(t, "\"123.45\"", d.String()) // Test VARBINARY/BLOB types - should use hex encoding binaryData := []byte{0x01, 0x02, 0x03} d, err = NewDatumFromValue(binaryData, "VARBINARY(255)") require.NoError(t, err) - assert.Equal(t, "0x010203", d.String()) + require.Equal(t, "0x010203", d.String()) // Test BLOB types blobData := []byte{0xFF, 0xFE, 0xFD} d, err = NewDatumFromValue(blobData, "BLOB") require.NoError(t, err) - assert.Equal(t, "0xfffefd", d.String()) + require.Equal(t, "0xfffefd", d.String()) // Test empty binary values - must NOT serialize as "0x" because // MySQL parses that as an identifier, not a hex literal. d, err = NewDatumFromValue([]byte{0x00}, "MEDIUMBLOB") require.NoError(t, err) - assert.Equal(t, "0x00", d.String()) + require.Equal(t, "0x00", d.String()) d, err = NewDatumFromValue([]byte{}, "VARBINARY(255)") require.NoError(t, err) - assert.Equal(t, "0x00", d.String()) + require.Equal(t, "0x00", d.String()) d, err = NewDatumFromValue("", "BLOB") require.NoError(t, err) - assert.Equal(t, "0x00", d.String()) + require.Equal(t, "0x00", d.String()) d, err = NewDatumFromValue(nil, "BLOB") require.NoError(t, err) - assert.Equal(t, "NULL", d.String()) + require.Equal(t, "NULL", d.String()) // Test JSON types - should be quoted like text jsonBytes := []byte(`[1, 2, 3]`) d, err = NewDatumFromValue(jsonBytes, "JSON") require.NoError(t, err) - assert.Equal(t, "\"[1, 2, 3]\"", d.String()) + require.Equal(t, "\"[1, 2, 3]\"", d.String()) d, err = NewDatumFromValue("test", "json") require.NoError(t, err) - assert.Equal(t, "\"test\"", d.String()) + require.Equal(t, "\"test\"", d.String()) // Test case insensitivity d, err = NewDatumFromValue(intBytes, "int") require.NoError(t, err) - assert.Equal(t, "123", d.String()) + require.Equal(t, "123", d.String()) d, err = NewDatumFromValue(intBytes, "Int") require.NoError(t, err) - assert.Equal(t, "123", d.String()) + require.Equal(t, "123", d.String()) d, err = NewDatumFromValue([]byte("hello"), "varchar(100)") require.NoError(t, err) - assert.Equal(t, "\"hello\"", d.String()) + require.Equal(t, "\"hello\"", d.String()) // Test unknown/default types unknownBytes := []byte("unknown data") d, err = NewDatumFromValue(unknownBytes, "UNKNOWN_TYPE") require.NoError(t, err) - assert.Equal(t, "\"unknown data\"", d.String()) + require.Equal(t, "\"unknown data\"", d.String()) d, err = NewDatumFromValue("test", "CUSTOM_TYPE") require.NoError(t, err) - assert.Equal(t, "\"test\"", d.String()) + require.Equal(t, "\"test\"", d.String()) } // TestNewDatumFromValueBinaryString tests that binary strings are hex-encoded @@ -330,7 +329,7 @@ func TestNewDatumFromValueBinaryString(t *testing.T) { binaryData := []byte{0x00, 0x01, 0x02, 0xFF} d, err := NewDatumFromValue(binaryData, "VARBINARY(255)") require.NoError(t, err) - assert.Equal(t, "0x000102ff", d.String()) + require.Equal(t, "0x000102ff", d.String()) // Test string that starts with 0x - for VARCHAR (unknownType), this is just a normal string. // It should NOT be hex-encoded because datumValFromString only hex-decodes for binaryType, @@ -338,11 +337,11 @@ func TestNewDatumFromValueBinaryString(t *testing.T) { jsonLikeString := "0x123" d, err = NewDatumFromValue(jsonLikeString, "VARCHAR(255)") require.NoError(t, err) - assert.Equal(t, "\"0x123\"", d.String()) + require.Equal(t, "\"0x123\"", d.String()) // Test normal UTF-8 string normalString := "hello" d, err = NewDatumFromValue(normalString, "VARCHAR(255)") require.NoError(t, err) - assert.Equal(t, "\"hello\"", d.String()) + require.Equal(t, "\"hello\"", d.String()) } diff --git a/pkg/table/sharding_test.go b/pkg/table/sharding_test.go index 393b0b67..37bca98b 100644 --- a/pkg/table/sharding_test.go +++ b/pkg/table/sharding_test.go @@ -4,7 +4,6 @@ import ( "fmt" "testing" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -56,13 +55,13 @@ func TestShardingProviderInterface(t *testing.T) { column, hashFunc, err := provider.GetShardingMetadata("testdb", "users") require.NoError(t, err) - assert.Equal(t, "user_id", column) - assert.NotNil(t, hashFunc) + require.Equal(t, "user_id", column) + require.NotNil(t, hashFunc) // Test the hash function hash, err := hashFunc(123) require.NoError(t, err) - assert.Equal(t, uint64(123), hash) + require.Equal(t, uint64(123), hash) }) t.Run("returns empty for no vindex", func(t *testing.T) { @@ -70,8 +69,8 @@ func TestShardingProviderInterface(t *testing.T) { column, hashFunc, err := provider.GetShardingMetadata("testdb", "config") require.NoError(t, err) - assert.Empty(t, column) - assert.Nil(t, hashFunc) + require.Empty(t, column) + require.Nil(t, hashFunc) }) t.Run("returns error", func(t *testing.T) { @@ -81,7 +80,7 @@ func TestShardingProviderInterface(t *testing.T) { _, _, err := provider.GetShardingMetadata("testdb", "users") require.Error(t, err) - assert.Contains(t, err.Error(), "test error") + require.Contains(t, err.Error(), "test error") }) } @@ -102,17 +101,17 @@ func TestHashFunc(t *testing.T) { // Test with int hash, err := hashFunc(42) require.NoError(t, err) - assert.Equal(t, uint64(84), hash) + require.Equal(t, uint64(84), hash) // Test with string hash, err = hashFunc("hello") require.NoError(t, err) - assert.Equal(t, uint64(5), hash) + require.Equal(t, uint64(5), hash) // Test with unsupported type _, err = hashFunc(3.14) require.Error(t, err) - assert.Contains(t, err.Error(), "unsupported type") + require.Contains(t, err.Error(), "unsupported type") }) } @@ -167,29 +166,29 @@ func TestMultiTableShardingProvider(t *testing.T) { t.Run("returns correct config for users table", func(t *testing.T) { column, hashFunc, err := provider.GetShardingMetadata("testdb", "users") require.NoError(t, err) - assert.Equal(t, "user_id", column) - assert.NotNil(t, hashFunc) + require.Equal(t, "user_id", column) + require.NotNil(t, hashFunc) hash, err := hashFunc(123) require.NoError(t, err) - assert.Equal(t, uint64(1), hash) + require.Equal(t, uint64(1), hash) }) t.Run("returns correct config for orders table", func(t *testing.T) { column, hashFunc, err := provider.GetShardingMetadata("testdb", "orders") require.NoError(t, err) - assert.Equal(t, "order_id", column) - assert.NotNil(t, hashFunc) + require.Equal(t, "order_id", column) + require.NotNil(t, hashFunc) hash, err := hashFunc(456) require.NoError(t, err) - assert.Equal(t, uint64(2), hash) + require.Equal(t, uint64(2), hash) }) t.Run("returns empty for unconfigured table", func(t *testing.T) { column, hashFunc, err := provider.GetShardingMetadata("testdb", "config") require.NoError(t, err) - assert.Empty(t, column) - assert.Nil(t, hashFunc) + require.Empty(t, column) + require.Nil(t, hashFunc) }) } diff --git a/pkg/table/table_schema_test.go b/pkg/table/table_schema_test.go index c6b280c9..6bb03da2 100644 --- a/pkg/table/table_schema_test.go +++ b/pkg/table/table_schema_test.go @@ -6,7 +6,6 @@ import ( "github.com/block/spirit/pkg/testutils" _ "github.com/go-sql-driver/mysql" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -31,7 +30,7 @@ func TestLoadSchemaFromDB(t *testing.T) { tables, err := LoadSchemaFromDB(t.Context(), db) require.NoError(t, err) - assert.Len(t, tables, 2) + require.Len(t, tables, 2) // Build a map for easier assertions byName := make(map[string]TableSchema) @@ -39,11 +38,11 @@ func TestLoadSchemaFromDB(t *testing.T) { byName[ts.Name] = ts } - assert.Contains(t, byName, "users") - assert.Contains(t, byName, "orders") - assert.Contains(t, byName["users"].Schema, "CREATE TABLE") - assert.Contains(t, byName["users"].Schema, "`name` varchar(100)") - assert.Contains(t, byName["orders"].Schema, "`amount` decimal(10,2)") + require.Contains(t, byName, "users") + require.Contains(t, byName, "orders") + require.Contains(t, byName["users"].Schema, "CREATE TABLE") + require.Contains(t, byName["users"].Schema, "`name` varchar(100)") + require.Contains(t, byName["orders"].Schema, "`amount` decimal(10,2)") } func TestLoadSchemaFromDB_EmptyDatabase(t *testing.T) { @@ -55,7 +54,7 @@ func TestLoadSchemaFromDB_EmptyDatabase(t *testing.T) { tables, err := LoadSchemaFromDB(t.Context(), db) require.NoError(t, err) - assert.Empty(t, tables) + require.Empty(t, tables) } func TestLoadSchemaFromDB_PreservesAutoIncrement(t *testing.T) { @@ -74,8 +73,8 @@ func TestLoadSchemaFromDB_PreservesAutoIncrement(t *testing.T) { tables, err := LoadSchemaFromDB(t.Context(), db) require.NoError(t, err) require.Len(t, tables, 1) - assert.Equal(t, "counters", tables[0].Name) - assert.Contains(t, tables[0].Schema, "AUTO_INCREMENT=") + require.Equal(t, "counters", tables[0].Name) + require.Contains(t, tables[0].Schema, "AUTO_INCREMENT=") } func TestLoadSchemaFromDB_FilterUnderscoreTables(t *testing.T) { @@ -91,13 +90,13 @@ func TestLoadSchemaFromDB_FilterUnderscoreTables(t *testing.T) { // Without filter: all 3 tables returned. all, err := LoadSchemaFromDB(t.Context(), db) require.NoError(t, err) - assert.Len(t, all, 3) + require.Len(t, all, 3) // With underscore filter: only "users" returned. filtered, err := LoadSchemaFromDB(t.Context(), db, WithoutUnderscoreTables) require.NoError(t, err) require.Len(t, filtered, 1) - assert.Equal(t, "users", filtered[0].Name) + require.Equal(t, "users", filtered[0].Name) } func TestLoadSchemaFromDB_FilterArchiveTables(t *testing.T) { @@ -114,13 +113,13 @@ func TestLoadSchemaFromDB_FilterArchiveTables(t *testing.T) { // Without filter: all 4 tables returned. all, err := LoadSchemaFromDB(t.Context(), db) require.NoError(t, err) - assert.Len(t, all, 4) + require.Len(t, all, 4) // With archive filter: only "users" returned. filtered, err := LoadSchemaFromDB(t.Context(), db, WithoutArchiveTables) require.NoError(t, err) require.Len(t, filtered, 1) - assert.Equal(t, "users", filtered[0].Name) + require.Equal(t, "users", filtered[0].Name) } func TestLoadSchemaFromDB_StripAutoIncrement(t *testing.T) { @@ -137,9 +136,9 @@ func TestLoadSchemaFromDB_StripAutoIncrement(t *testing.T) { tables, err := LoadSchemaFromDB(t.Context(), db, WithStrippedAutoIncrement) require.NoError(t, err) require.Len(t, tables, 1) - assert.NotContains(t, tables[0].Schema, "AUTO_INCREMENT=") + require.NotContains(t, tables[0].Schema, "AUTO_INCREMENT=") // The column-level AUTO_INCREMENT keyword should still be present. - assert.Contains(t, tables[0].Schema, "AUTO_INCREMENT") + require.Contains(t, tables[0].Schema, "AUTO_INCREMENT") } func TestLoadSchemaFromDB_CombinedFilters(t *testing.T) { @@ -162,6 +161,6 @@ func TestLoadSchemaFromDB_CombinedFilters(t *testing.T) { ) require.NoError(t, err) require.Len(t, filtered, 1) - assert.Equal(t, "users", filtered[0].Name) - assert.NotContains(t, filtered[0].Schema, "AUTO_INCREMENT=") + require.Equal(t, "users", filtered[0].Name) + require.NotContains(t, filtered[0].Schema, "AUTO_INCREMENT=") } diff --git a/pkg/table/tableinfo_test.go b/pkg/table/tableinfo_test.go index 7484169c..fdb1b7fb 100644 --- a/pkg/table/tableinfo_test.go +++ b/pkg/table/tableinfo_test.go @@ -8,7 +8,6 @@ import ( "github.com/block/spirit/pkg/testutils" _ "github.com/go-sql-driver/mysql" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.uber.org/goleak" ) @@ -55,7 +54,7 @@ func TestCallingNextChunkWithoutOpen(t *testing.T) { require.NoError(t, err) _, err = chunker.Next() - assert.Error(t, err) + require.Error(t, err) require.NoError(t, chunker.Open()) _, err = chunker.Next() @@ -85,26 +84,26 @@ func TestDiscovery(t *testing.T) { t1 := NewTableInfo(db, "test", "discoveryt1") require.NoError(t, t1.SetInfo(t.Context())) - assert.Equal(t, "discoveryt1", t1.TableName) - assert.Equal(t, "test", t1.SchemaName) - assert.Equal(t, "id", t1.KeyColumns[0]) + require.Equal(t, "discoveryt1", t1.TableName) + require.Equal(t, "test", t1.SchemaName) + require.Equal(t, "id", t1.KeyColumns[0]) // normalize for mysql 5.7 and 8.0 - assert.Equal(t, "int", removeWidth(t1.columnsMySQLTps["id"])) + require.Equal(t, "int", removeWidth(t1.columnsMySQLTps["id"])) castID, err := t1.wrapCastType("id") require.NoError(t, err) - assert.Equal(t, "CAST(`id` AS signed)", castID) + require.Equal(t, "CAST(`id` AS signed)", castID) castName, err := t1.wrapCastType("name") require.NoError(t, err) - assert.Equal(t, "CAST(`name` AS char CHARACTER SET utf8mb4)", castName) + require.Equal(t, "CAST(`name` AS char CHARACTER SET utf8mb4)", castName) - assert.Equal(t, "1", t1.minValue.String()) - assert.Equal(t, "3", t1.maxValue.String()) + require.Equal(t, "1", t1.minValue.String()) + require.Equal(t, "3", t1.maxValue.String()) // Can't check estimated rows (depends on MySQL version etc) - assert.Equal(t, []string{"int"}, t1.keyColumnsMySQLTp) - assert.True(t, t1.KeyIsAutoInc) - assert.Len(t, t1.Columns, 2) + require.Equal(t, []string{"int"}, t1.keyColumnsMySQLTp) + require.True(t, t1.KeyIsAutoInc) + require.Len(t, t1.Columns, 2) } func TestDiscoveryUInt(t *testing.T) { @@ -128,17 +127,17 @@ func TestDiscoveryUInt(t *testing.T) { t1 := NewTableInfo(db, "test", "discoveryuintt1") require.NoError(t, t1.SetInfo(t.Context())) - assert.Equal(t, "discoveryuintt1", t1.TableName) - assert.Equal(t, "test", t1.SchemaName) - assert.Equal(t, "id", t1.KeyColumns[0]) + require.Equal(t, "discoveryuintt1", t1.TableName) + require.Equal(t, "test", t1.SchemaName) + require.Equal(t, "id", t1.KeyColumns[0]) - assert.Equal(t, "1", t1.minValue.String()) - assert.Equal(t, "3", t1.maxValue.String()) + require.Equal(t, "1", t1.minValue.String()) + require.Equal(t, "3", t1.maxValue.String()) // Can't check estimated rows (depends on MySQL version etc) - assert.Equal(t, []string{"int unsigned"}, t1.keyColumnsMySQLTp) - assert.True(t, t1.KeyIsAutoInc) - assert.Len(t, t1.Columns, 2) + require.Equal(t, []string{"int unsigned"}, t1.keyColumnsMySQLTp) + require.True(t, t1.KeyIsAutoInc) + require.Len(t, t1.Columns, 2) } func TestDiscoveryNoKeyColumnsOrNoTable(t *testing.T) { @@ -159,10 +158,10 @@ func TestDiscoveryNoKeyColumnsOrNoTable(t *testing.T) { }() t1 := NewTableInfo(db, "test", "discoverynokeyst1") - assert.ErrorContains(t, t1.SetInfo(t.Context()), "no primary key found") + require.ErrorContains(t, t1.SetInfo(t.Context()), "no primary key found") t2 := NewTableInfo(db, "test", "t2fdsfds") - assert.ErrorContains(t, t2.SetInfo(t.Context()), "table test.t2fdsfds does not exist") + require.ErrorContains(t, t2.SetInfo(t.Context()), "table test.t2fdsfds does not exist") } func TestDiscoveryBalancesTable(t *testing.T) { @@ -199,18 +198,18 @@ func TestDiscoveryBalancesTable(t *testing.T) { t1 := NewTableInfo(db, "test", "balances") require.NoError(t, t1.SetInfo(t.Context())) - assert.True(t, t1.KeyIsAutoInc) - assert.Equal(t, []string{"bigint"}, t1.keyColumnsMySQLTp) - assert.Equal(t, []string{"id"}, t1.KeyColumns) - assert.Equal(t, "0", t1.minValue.String()) - assert.Equal(t, "0", t1.maxValue.String()) + require.True(t, t1.KeyIsAutoInc) + require.Equal(t, []string{"bigint"}, t1.keyColumnsMySQLTp) + require.Equal(t, []string{"id"}, t1.KeyColumns) + require.Equal(t, "0", t1.minValue.String()) + require.Equal(t, "0", t1.maxValue.String()) chunker, err := NewChunker(t1, ChunkerConfig{TargetChunkTime: 100}) require.NoError(t, err) require.NoError(t, chunker.Open()) - assert.Equal(t, "0", t1.minValue.String()) - assert.Equal(t, "0", t1.maxValue.String()) + require.Equal(t, "0", t1.minValue.String()) + require.Equal(t, "0", t1.maxValue.String()) } func TestDiscoveryCompositeNonComparable(t *testing.T) { @@ -233,7 +232,7 @@ func TestDiscoveryCompositeNonComparable(t *testing.T) { t1 := NewTableInfo(db, "test", "compnoncomparable") require.NoError(t, t1.SetInfo(t.Context())) // still discovers the primary key - assert.Error(t, t1.PrimaryKeyIsMemoryComparable()) // but its non comparable + require.Error(t, t1.PrimaryKeyIsMemoryComparable()) // but its non comparable } func TestDiscoveryCompositeComparable(t *testing.T) { @@ -257,9 +256,9 @@ func TestDiscoveryCompositeComparable(t *testing.T) { t1 := NewTableInfo(db, "test", "compcomparable") require.NoError(t, t1.SetInfo(t.Context())) - assert.True(t, t1.KeyIsAutoInc) - assert.Equal(t, []string{"int unsigned", "int"}, t1.keyColumnsMySQLTp) - assert.Equal(t, []string{"id", "age"}, t1.KeyColumns) + require.True(t, t1.KeyIsAutoInc) + require.Equal(t, []string{"int unsigned", "int"}, t1.keyColumnsMySQLTp) + require.Equal(t, []string{"id", "age"}, t1.KeyColumns) } func TestStatisticsUpdate(t *testing.T) { @@ -332,8 +331,8 @@ func TestKeyColumnsValuesExtraction(t *testing.T) { row := []any{id, name, age} pkVals, err := t1.PrimaryKeyValues(row) - assert.Equal(t, id, pkVals[0]) - assert.Equal(t, age, pkVals[1]) + require.Equal(t, id, pkVals[0]) + require.Equal(t, age, pkVals[1]) require.NoError(t, err) } @@ -363,8 +362,8 @@ func TestDiscoveryGeneratedCols(t *testing.T) { require.NoError(t, t1.SetInfo(t.Context())) // Can't check estimated rows (depends on MySQL version etc) - assert.Equal(t, []string{"id", "name", "b", "c1", "c2", "c3", "d"}, t1.Columns) - assert.Equal(t, []string{"id", "name", "b", "d"}, t1.NonGeneratedColumns) + require.Equal(t, []string{"id", "name", "b", "c1", "c2", "c3", "d"}, t1.Columns) + require.Equal(t, []string{"id", "name", "b", "d"}, t1.NonGeneratedColumns) testutils.RunSQL(t, `DROP TABLE IF EXISTS generatedcolst2`) table = `CREATE TABLE generatedcolst2 ( @@ -383,8 +382,8 @@ func TestDiscoveryGeneratedCols(t *testing.T) { require.NoError(t, t2.SetInfo(t.Context())) // Can't check estimated rows (depends on MySQL version etc) - assert.Equal(t, []string{"id", "pa", "p1", "p2", "s1", "s2", "s3", "s4"}, t2.Columns) - assert.Equal(t, []string{"id", "pa", "p1", "p2"}, t2.NonGeneratedColumns) + require.Equal(t, []string{"id", "pa", "p1", "p2", "s1", "s2", "s3", "s4"}, t2.Columns) + require.Equal(t, []string{"id", "pa", "p1", "p2"}, t2.NonGeneratedColumns) } func TestGetColumnOrdinal(t *testing.T) { @@ -395,25 +394,25 @@ func TestGetColumnOrdinal(t *testing.T) { // Test finding existing columns ordinal, err := t1.GetColumnOrdinal("id") require.NoError(t, err) - assert.Equal(t, 0, ordinal) + require.Equal(t, 0, ordinal) ordinal, err = t1.GetColumnOrdinal("name") require.NoError(t, err) - assert.Equal(t, 1, ordinal) + require.Equal(t, 1, ordinal) ordinal, err = t1.GetColumnOrdinal("age") require.NoError(t, err) - assert.Equal(t, 2, ordinal) + require.Equal(t, 2, ordinal) ordinal, err = t1.GetColumnOrdinal("email") require.NoError(t, err) - assert.Equal(t, 3, ordinal) + require.Equal(t, 3, ordinal) // Test finding non-existent column ordinal, err = t1.GetColumnOrdinal("nonexistent") - assert.Error(t, err) - assert.Equal(t, -1, ordinal) - assert.ErrorContains(t, err, "column nonexistent not found in table testtable") + require.Error(t, err) + require.Equal(t, -1, ordinal) + require.ErrorContains(t, err, "column nonexistent not found in table testtable") } func TestGetNonGeneratedColumnOrdinal(t *testing.T) { @@ -428,48 +427,48 @@ func TestGetNonGeneratedColumnOrdinal(t *testing.T) { // Note: ordinals are relative to NonGeneratedColumns, not Columns ordinal, err := t1.GetNonGeneratedColumnOrdinal("id") require.NoError(t, err) - assert.Equal(t, 0, ordinal, "id should be at position 0 in NonGeneratedColumns") + require.Equal(t, 0, ordinal, "id should be at position 0 in NonGeneratedColumns") ordinal, err = t1.GetNonGeneratedColumnOrdinal("name") require.NoError(t, err) - assert.Equal(t, 1, ordinal, "name should be at position 1 in NonGeneratedColumns") + require.Equal(t, 1, ordinal, "name should be at position 1 in NonGeneratedColumns") ordinal, err = t1.GetNonGeneratedColumnOrdinal("age") require.NoError(t, err) - assert.Equal(t, 2, ordinal, "age should be at position 2 in NonGeneratedColumns (skipping name_reversed)") + require.Equal(t, 2, ordinal, "age should be at position 2 in NonGeneratedColumns (skipping name_reversed)") ordinal, err = t1.GetNonGeneratedColumnOrdinal("email") require.NoError(t, err) - assert.Equal(t, 3, ordinal, "email should be at position 3 in NonGeneratedColumns") + require.Equal(t, 3, ordinal, "email should be at position 3 in NonGeneratedColumns") // Test finding a generated column (should fail) ordinal, err = t1.GetNonGeneratedColumnOrdinal("name_reversed") - assert.Error(t, err) - assert.Equal(t, -1, ordinal) - assert.ErrorContains(t, err, "column name_reversed not found in non-generated columns of table testtable") + require.Error(t, err) + require.Equal(t, -1, ordinal) + require.ErrorContains(t, err, "column name_reversed not found in non-generated columns of table testtable") // Test finding non-existent column ordinal, err = t1.GetNonGeneratedColumnOrdinal("nonexistent") - assert.Error(t, err) - assert.Equal(t, -1, ordinal) - assert.ErrorContains(t, err, "column nonexistent not found in non-generated columns of table testtable") + require.Error(t, err) + require.Equal(t, -1, ordinal) + require.ErrorContains(t, err, "column nonexistent not found in non-generated columns of table testtable") } func TestQualifiedName(t *testing.T) { // Without Host, returns schema.table ti := &TableInfo{SchemaName: "mydb", TableName: "orders"} - assert.Equal(t, "mydb.orders", ti.QualifiedName()) + require.Equal(t, "mydb.orders", ti.QualifiedName()) // With Host, returns host.schema.table ti.Host = "server1:3306" - assert.Equal(t, "server1:3306.mydb.orders", ti.QualifiedName()) + require.Equal(t, "server1:3306.mydb.orders", ti.QualifiedName()) // Two tables with same schema.table but different hosts are distinct ti2 := &TableInfo{SchemaName: "mydb", TableName: "orders", Host: "server2:3306"} - assert.NotEqual(t, ti.QualifiedName(), ti2.QualifiedName()) + require.NotEqual(t, ti.QualifiedName(), ti2.QualifiedName()) // Two tables with same name, no host, different schemas are distinct ti3 := &TableInfo{SchemaName: "db1", TableName: "t1"} ti4 := &TableInfo{SchemaName: "db2", TableName: "t1"} - assert.NotEqual(t, ti3.QualifiedName(), ti4.QualifiedName()) + require.NotEqual(t, ti3.QualifiedName(), ti4.QualifiedName()) } diff --git a/pkg/table/utils_test.go b/pkg/table/utils_test.go index 252c129a..844a840a 100644 --- a/pkg/table/utils_test.go +++ b/pkg/table/utils_test.go @@ -5,6 +5,7 @@ import ( "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestFindP90(t *testing.T) { @@ -20,7 +21,7 @@ func TestFindP90(t *testing.T) { 1 * time.Second, 1 * time.Second, } - assert.Equal(t, 3*time.Second, LazyFindP90(times)) + require.Equal(t, 3*time.Second, LazyFindP90(times)) } type castableTpTest struct { @@ -74,44 +75,44 @@ func TestCastableTp(t *testing.T) { func TestQuoteCols(t *testing.T) { cols := []string{"a", "b", "c"} - assert.Equal(t, "`a`, `b`, `c`", QuoteColumns(cols)) + require.Equal(t, "`a`, `b`, `c`", QuoteColumns(cols)) cols = []string{"a"} - assert.Equal(t, "`a`", QuoteColumns(cols)) + require.Equal(t, "`a`", QuoteColumns(cols)) } func TestExpandRowConstructorComparison(t *testing.T) { - assert.Equal(t, "((`a` > 1)\n OR (`a` = 1 AND `b` >= 2))", + require.Equal(t, "((`a` > 1)\n OR (`a` = 1 AND `b` >= 2))", expandRowConstructorComparison([]string{"a", "b"}, OpGreaterEqual, []Datum{{Val: 1, Tp: signedType}, {Val: 2, Tp: signedType}})) - assert.Equal(t, "((`a` > 1)\n OR (`a` = 1 AND `b` > 2))", + require.Equal(t, "((`a` > 1)\n OR (`a` = 1 AND `b` > 2))", expandRowConstructorComparison([]string{"a", "b"}, OpGreaterThan, []Datum{{Val: 1, Tp: signedType}, {Val: 2, Tp: signedType}})) - assert.Equal(t, "((`a` > \"PENDING\")\n OR (`a` = \"PENDING\" AND `b` > 2))", + require.Equal(t, "((`a` > \"PENDING\")\n OR (`a` = \"PENDING\" AND `b` > 2))", expandRowConstructorComparison([]string{"a", "b"}, OpGreaterThan, []Datum{{Val: "PENDING", Tp: binaryType}, {Val: 2, Tp: signedType}})) - assert.Equal(t, "((`id1` > 2)\n OR (`id1` = 2 AND `id2` > 2)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` > 4)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` = 4 AND `id4` >= 5))", + require.Equal(t, "((`id1` > 2)\n OR (`id1` = 2 AND `id2` > 2)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` > 4)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` = 4 AND `id4` >= 5))", expandRowConstructorComparison([]string{"id1", "id2", "id3", "id4"}, OpGreaterEqual, []Datum{{Val: 2, Tp: signedType}, {Val: 2, Tp: signedType}, {Val: 4, Tp: signedType}, {Val: 5, Tp: signedType}})) - assert.Equal(t, "((`id1` < 2)\n OR (`id1` = 2 AND `id2` < 2)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` < 4)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` = 4 AND `id4` <= 5))", + require.Equal(t, "((`id1` < 2)\n OR (`id1` = 2 AND `id2` < 2)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` < 4)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` = 4 AND `id4` <= 5))", expandRowConstructorComparison([]string{"id1", "id2", "id3", "id4"}, OpLessEqual, []Datum{{Val: 2, Tp: signedType}, {Val: 2, Tp: signedType}, {Val: 4, Tp: signedType}, {Val: 5, Tp: signedType}})) - assert.Equal(t, "((`id1` < 2)\n OR (`id1` = 2 AND `id2` < 2)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` < 4)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` = 4 AND `id4` < 5))", + require.Equal(t, "((`id1` < 2)\n OR (`id1` = 2 AND `id2` < 2)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` < 4)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` = 4 AND `id4` < 5))", expandRowConstructorComparison([]string{"id1", "id2", "id3", "id4"}, OpLessThan, []Datum{{Val: 2, Tp: signedType}, {Val: 2, Tp: signedType}, {Val: 4, Tp: signedType}, {Val: 5, Tp: signedType}})) - assert.Equal(t, "((`id1` > 2)\n OR (`id1` = 2 AND `id2` > 2)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` > 4)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` = 4 AND `id4` > 5))", + require.Equal(t, "((`id1` > 2)\n OR (`id1` = 2 AND `id2` > 2)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` > 4)\n OR (`id1` = 2 AND `id2` = 2 AND `id3` = 4 AND `id4` > 5))", expandRowConstructorComparison([]string{"id1", "id2", "id3", "id4"}, OpGreaterThan, []Datum{{Val: 2, Tp: signedType}, {Val: 2, Tp: signedType}, {Val: 4, Tp: signedType}, {Val: 5, Tp: signedType}})) From 2d3d5d52be0b18024d5b6fc10a867090481e7d90 Mon Sep 17 00:00:00 2001 From: Morgan Tocker Date: Sat, 2 May 2026 05:34:51 -0600 Subject: [PATCH 3/3] style: gofmt Co-Authored-By: Claude Opus 4.7 (1M context) --- pkg/table/chunk_test.go | 2 +- pkg/table/chunker_optimistic_test.go | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/table/chunk_test.go b/pkg/table/chunk_test.go index 1c704630..f1defbd3 100644 --- a/pkg/table/chunk_test.go +++ b/pkg/table/chunk_test.go @@ -115,7 +115,7 @@ func TestComparesTo(t *testing.T) { Inclusive: true, } require.True(t, b1.comparesTo(b2)) - b2.Inclusive = false // change operator + b2.Inclusive = false // change operator require.True(t, b1.comparesTo(b2)) // still compares b2.Value = []Datum{{Val: 300, Tp: signedType}} require.False(t, b1.comparesTo(b2)) diff --git a/pkg/table/chunker_optimistic_test.go b/pkg/table/chunker_optimistic_test.go index bfa7cdf6..fcaeb10f 100644 --- a/pkg/table/chunker_optimistic_test.go +++ b/pkg/table/chunker_optimistic_test.go @@ -206,7 +206,7 @@ func TestOptimisticDynamicChunking(t *testing.T) { chunk, err = chunker.Next() require.NoError(t, err) require.Equal(t, uint64(100), chunk.ChunkSize) // immediate change from before - chunker.Feedback(chunk, time.Second, 1) // way too long again, it will reduce to 10 + chunker.Feedback(chunk, time.Second, 1) // way too long again, it will reduce to 10 newChunk, err := chunker.Next() require.NoError(t, err) @@ -218,7 +218,7 @@ func TestOptimisticDynamicChunking(t *testing.T) { chunk, err = chunker.Next() require.NoError(t, err) - require.Equal(t, uint64(10), chunk.ChunkSize) // no change + require.Equal(t, uint64(10), chunk.ChunkSize) // no change chunker.Feedback(chunk, 50*time.Microsecond, 1) // must give feedback to advance watermark. // Feedback to increase the chunk size is more gradual.