From 13070fa9b53cbce28df03d40bb29ae15c8eec3b7 Mon Sep 17 00:00:00 2001 From: Naohiro CHIKAMATSU Date: Sun, 31 Aug 2025 23:26:27 +0900 Subject: [PATCH] Introduce testify --- builder_test.go | 708 +++++++++----------------- column_inference_test.go | 43 +- dump_options_test.go | 79 +-- file_test.go | 140 ++--- filesql_test.go | 1038 +++++++++++++++----------------------- go.mod | 4 + go.sum | 6 +- stream_test.go | 58 +-- table_test.go | 42 +- types_test.go | 30 +- 10 files changed, 764 insertions(+), 1384 deletions(-) diff --git a/builder_test.go b/builder_test.go index 39810a1..e58c7c5 100644 --- a/builder_test.go +++ b/builder_test.go @@ -17,6 +17,8 @@ import ( "testing/fstest" "time" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "modernc.org/sqlite" ) @@ -27,15 +29,9 @@ func TestNewBuilder(t *testing.T) { t.Parallel() builder := NewBuilder() - if builder == nil { - t.Fatal("NewBuilder() returned nil") - } - if len(builder.paths) != 0 { - t.Errorf("NewBuilder() paths = %d, want 0", len(builder.paths)) - } - if len(builder.filesystems) != 0 { - t.Errorf("NewBuilder() filesystems = %d, want 0", len(builder.filesystems)) - } + require.NotNil(t, builder, "NewBuilder() should not return nil") + assert.Len(t, builder.paths, 0, "NewBuilder() should have empty paths slice") + assert.Len(t, builder.filesystems, 0, "NewBuilder() should have empty filesystems slice") } func TestDBBuilder_AddPath(t *testing.T) { @@ -44,12 +40,8 @@ func TestDBBuilder_AddPath(t *testing.T) { t.Run("single path", func(t *testing.T) { t.Parallel() builder := NewBuilder().AddPath("test.csv") - if len(builder.paths) != 1 { - t.Errorf("paths = %d, want 1", len(builder.paths)) - } - if builder.paths[0] != "test.csv" { - t.Errorf("paths[0] = %s, want test.csv", builder.paths[0]) - } + assert.Len(t, builder.paths, 1, "should have 1 path") + assert.Equal(t, "test.csv", builder.paths[0], "first path should be test.csv") }) t.Run("chain multiple paths", func(t *testing.T) { @@ -57,9 +49,7 @@ func TestDBBuilder_AddPath(t *testing.T) { builder := NewBuilder(). AddPath("test1.csv"). AddPath("test2.tsv") - if len(builder.paths) != 2 { - t.Errorf("paths = %d, want 2", len(builder.paths)) - } + assert.Len(t, builder.paths, 2, "should have 2 paths after chaining") }) } @@ -67,9 +57,7 @@ func TestDBBuilder_AddPaths(t *testing.T) { t.Parallel() builder := NewBuilder().AddPaths("test1.csv", "test2.tsv", "test3.ltsv") - if len(builder.paths) != 3 { - t.Errorf("paths = %d, want 3", len(builder.paths)) - } + assert.Len(t, builder.paths, 3, "should have 3 paths after AddPaths") } func TestDBBuilder_AddFS(t *testing.T) { @@ -82,9 +70,7 @@ func TestDBBuilder_AddFS(t *testing.T) { } builder := NewBuilder().AddFS(mockFS) - if len(builder.filesystems) != 1 { - t.Errorf("filesystems = %d, want 1", len(builder.filesystems)) - } + assert.Len(t, builder.filesystems, 1, "should have 1 filesystem") }) t.Run("add multiple filesystems", func(t *testing.T) { @@ -97,9 +83,7 @@ func TestDBBuilder_AddFS(t *testing.T) { } builder := NewBuilder().AddFS(mockFS1).AddFS(mockFS2) - if len(builder.filesystems) != 2 { - t.Errorf("filesystems = %d, want 2", len(builder.filesystems)) - } + assert.Len(t, builder.filesystems, 2, "should have 2 filesystems") }) } @@ -112,15 +96,9 @@ func TestDBBuilder_AddReader(t *testing.T) { reader := bytes.NewReader([]byte(data)) builder := NewBuilder().AddReader(reader, "users", FileTypeCSV) - if len(builder.readers) != 1 { - t.Errorf("readers = %d, want 1", len(builder.readers)) - } - if builder.readers[0].tableName != "users" { - t.Errorf("TableName = %s, want users", builder.readers[0].tableName) - } - if builder.readers[0].fileType != FileTypeCSV { - t.Errorf("FileType = %v, want FileTypeCSV", builder.readers[0].fileType) - } + assert.Len(t, builder.readers, 1, "should have 1 reader") + assert.Equal(t, "users", builder.readers[0].tableName, "table name should be users") + assert.Equal(t, FileTypeCSV, builder.readers[0].fileType, "file type should be CSV") // No compression fields to check since FileTypeCSV is uncompressed }) @@ -130,12 +108,8 @@ func TestDBBuilder_AddReader(t *testing.T) { reader := bytes.NewReader([]byte(data)) builder := NewBuilder().AddReader(reader, "data", FileTypeTSV) - if len(builder.readers) != 1 { - t.Errorf("readers = %d, want 1", len(builder.readers)) - } - if builder.readers[0].fileType != FileTypeTSV { - t.Errorf("FileType = %v, want FileTypeTSV", builder.readers[0].fileType) - } + assert.Len(t, builder.readers, 1, "should have 1 reader") + assert.Equal(t, FileTypeTSV, builder.readers[0].fileType, "file type should be TSV") }) t.Run("add compressed CSV reader", func(t *testing.T) { @@ -144,12 +118,8 @@ func TestDBBuilder_AddReader(t *testing.T) { reader := bytes.NewReader(data) builder := NewBuilder().AddReader(reader, "logs", FileTypeCSVGZ) - if len(builder.readers) != 1 { - t.Errorf("readers = %d, want 1", len(builder.readers)) - } - if builder.readers[0].fileType != FileTypeCSVGZ { - t.Errorf("FileType = %v, want FileTypeCSVGZ", builder.readers[0].fileType) - } + assert.Len(t, builder.readers, 1, "should have 1 reader") + assert.Equal(t, FileTypeCSVGZ, builder.readers[0].fileType, "file type should be CSV.GZ") // Regular CSV type for testing }) @@ -162,9 +132,7 @@ func TestDBBuilder_AddReader(t *testing.T) { AddReader(reader1, "table1", FileTypeCSV). AddReader(reader2, "table2", FileTypeTSV) - if len(builder.readers) != 2 { - t.Errorf("readers = %d, want 2", len(builder.readers)) - } + assert.Len(t, builder.readers, 2, "should have 2 readers") }) } @@ -176,9 +144,7 @@ func TestDBBuilder_SetDefaultChunkSize(t *testing.T) { customSize := 20 * 1024 * 1024 // 20MB builder := NewBuilder().SetDefaultChunkSize(customSize) - if builder.defaultChunkSize != customSize { - t.Errorf("defaultChunkSize = %d, want %d", builder.defaultChunkSize, customSize) - } + assert.Equal(t, customSize, builder.defaultChunkSize, "default chunk size should be set to custom size") }) t.Run("zero or negative size ignored", func(t *testing.T) { @@ -188,15 +154,11 @@ func TestDBBuilder_SetDefaultChunkSize(t *testing.T) { // Zero should be ignored builder.SetDefaultChunkSize(0) - if builder.defaultChunkSize != defaultSize { - t.Errorf("defaultChunkSize = %d, want %d (should not change)", builder.defaultChunkSize, defaultSize) - } + assert.Equal(t, defaultSize, builder.defaultChunkSize, "chunk size should not change when set to zero") // Negative should be ignored builder.SetDefaultChunkSize(-1) - if builder.defaultChunkSize != defaultSize { - t.Errorf("defaultChunkSize = %d, want %d (should not change)", builder.defaultChunkSize, defaultSize) - } + assert.Equal(t, defaultSize, builder.defaultChunkSize, "chunk size should not change when set to negative") }) } @@ -209,9 +171,7 @@ func TestDBBuilder_Build(t *testing.T) { t.Parallel() builder := NewBuilder() _, err := builder.Build(ctx) - if err == nil { - t.Error("Build() should return error for no inputs") - } + assert.Error(t, err, "Build() should return error for no inputs") }) t.Run("reader with nil reader error", func(t *testing.T) { @@ -224,12 +184,8 @@ func TestDBBuilder_Build(t *testing.T) { }) _, err := builder.Build(ctx) - if err == nil { - t.Error("Build() should return error for nil reader") - } - if !strings.Contains(err.Error(), "reader cannot be nil") { - t.Errorf("Expected 'reader cannot be nil' error, got: %v", err) - } + assert.Error(t, err, "Build() should return error for nil reader") + assert.Contains(t, err.Error(), "reader cannot be nil", "error message should mention nil reader") }) t.Run("reader with empty table name error", func(t *testing.T) { @@ -243,12 +199,8 @@ func TestDBBuilder_Build(t *testing.T) { }) _, err := builder.Build(ctx) - if err == nil { - t.Error("Build() should return error for empty table name") - } - if !strings.Contains(err.Error(), "table name must be specified") { - t.Errorf("Expected 'table name must be specified' error, got: %v", err) - } + assert.Error(t, err, "Build() should return error for empty table name") + assert.Contains(t, err.Error(), "table name must be specified", "error message should mention table name requirement") }) t.Run("reader with unsupported file type error", func(t *testing.T) { @@ -262,12 +214,8 @@ func TestDBBuilder_Build(t *testing.T) { }) _, err := builder.Build(ctx) - if err == nil { - t.Error("Build() should return error for unsupported file type") - } - if !strings.Contains(err.Error(), "file type must be specified") { - t.Errorf("Expected 'file type must be specified' error, got: %v", err) - } + assert.Error(t, err, "Build() should return error for unsupported file type") + assert.Contains(t, err.Error(), "file type must be specified", "error message should mention file type requirement") }) t.Run("reader with valid CSV data", func(t *testing.T) { @@ -277,17 +225,10 @@ func TestDBBuilder_Build(t *testing.T) { builder := NewBuilder().AddReader(reader, "users", FileTypeCSV) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Errorf("Build() error = %v", err) - } - if validatedBuilder == nil { - t.Error("Build() returned nil builder") - return - } + assert.NoError(t, err, "Build() should succeed with valid CSV data") + require.NotNil(t, validatedBuilder, "Build() should not return nil builder") // Readers don't create temp files anymore - they use direct streaming - if len(validatedBuilder.readers) != 1 { - t.Errorf("Build() should have 1 reader input, got %d", len(validatedBuilder.readers)) - } + assert.Len(t, validatedBuilder.readers, 1, "Build() should have 1 reader input") // Clean up temp files }) @@ -300,12 +241,8 @@ func TestDBBuilder_Build(t *testing.T) { builder := NewBuilder().AddReader(reader, "logs", FileTypeCSV) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Errorf("Build() error = %v", err) - } - if validatedBuilder == nil { - t.Error("Build() returned nil builder") - } + assert.NoError(t, err, "Build() should succeed with compressed type") + assert.NotNil(t, validatedBuilder, "Build() should not return nil builder") // Clean up temp files }) @@ -320,17 +257,10 @@ func TestDBBuilder_Build(t *testing.T) { AddReader(reader2, "table2", FileTypeTSV) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Errorf("Build() error = %v", err) - } - if validatedBuilder == nil { - t.Error("Build() returned nil builder") - return - } + assert.NoError(t, err, "Build() should succeed with multiple readers") + require.NotNil(t, validatedBuilder, "Build() should not return nil builder") // Readers don't create temp files anymore - they use direct streaming - if len(validatedBuilder.readers) != 2 { - t.Errorf("Build() should have 2 reader inputs, got %d", len(validatedBuilder.readers)) - } + assert.Len(t, validatedBuilder.readers, 2, "Build() should have 2 reader inputs") // Clean up temp files }) @@ -339,9 +269,7 @@ func TestDBBuilder_Build(t *testing.T) { t.Parallel() builder := NewBuilder().AddPath(filepath.Join("nonexistent", "file.csv")) _, err := builder.Build(ctx) - if err == nil { - t.Error("Build() should return error for nonexistent path") - } + assert.Error(t, err, "Build() should return error for nonexistent path") }) t.Run("unsupported file type error", func(t *testing.T) { @@ -349,15 +277,12 @@ func TestDBBuilder_Build(t *testing.T) { // Create a temporary unsupported file tempDir := t.TempDir() unsupportedFile := filepath.Join(tempDir, "test.txt") - if err := os.WriteFile(unsupportedFile, []byte("test"), 0600); err != nil { - t.Fatal(err) - } + err := os.WriteFile(unsupportedFile, []byte("test"), 0600) + require.NoError(t, err, "should create test file") builder := NewBuilder().AddPath(unsupportedFile) - _, err := builder.Build(ctx) - if err == nil { - t.Error("Build() should return error for unsupported file type") - } + _, err = builder.Build(ctx) + assert.Error(t, err, "Build() should return error for unsupported file type") }) t.Run("valid CSV file", func(t *testing.T) { @@ -366,18 +291,13 @@ func TestDBBuilder_Build(t *testing.T) { tempDir := t.TempDir() csvFile := filepath.Join(tempDir, "test.csv") content := "col1,col2\nval1,val2\n" - if err := os.WriteFile(csvFile, []byte(content), 0600); err != nil { - t.Fatal(err) - } + err := os.WriteFile(csvFile, []byte(content), 0600) + require.NoError(t, err, "should create CSV file") builder := NewBuilder().AddPath(csvFile) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Errorf("Build() error = %v", err) - } - if validatedBuilder == nil { - t.Error("Build() returned nil builder") - } + assert.NoError(t, err, "Build() should succeed with valid CSV file") + assert.NotNil(t, validatedBuilder, "Build() should not return nil builder") }) t.Run("valid directory", func(t *testing.T) { @@ -387,18 +307,13 @@ func TestDBBuilder_Build(t *testing.T) { // Create a valid CSV file in the temp directory csvFile := filepath.Join(tempDir, "test.csv") csvContent := "id,name,age\n1,John,30\n2,Jane,25\n" - if err := os.WriteFile(csvFile, []byte(csvContent), 0600); err != nil { - t.Fatalf("Failed to create test CSV file: %v", err) - } + err := os.WriteFile(csvFile, []byte(csvContent), 0600) + require.NoError(t, err, "Failed to create test CSV file") builder := NewBuilder().AddPath(tempDir) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Errorf("Build() error = %v", err) - } - if validatedBuilder == nil { - t.Error("Build() returned nil builder") - } + assert.NoError(t, err, "Build() should succeed with valid directory") + assert.NotNil(t, validatedBuilder, "Build() should not return nil builder") }) t.Run("FS with valid files", func(t *testing.T) { @@ -412,17 +327,11 @@ func TestDBBuilder_Build(t *testing.T) { builder := NewBuilder().AddFS(mockFS) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Errorf("Build() error = %v", err) - } - if validatedBuilder == nil { - t.Error("Build() returned nil builder") - } + assert.NoError(t, err, "Build() should succeed with FS containing valid files") + require.NotNil(t, validatedBuilder, "Build() should not return nil builder") // Should have found 3 files (csv, tsv, ltsv) and ignored txt // fs.FS files are now stored as readers instead of collectedPaths - if validatedBuilder != nil && len(validatedBuilder.readers) != 3 { - t.Errorf("Build() should have 3 readers from fs.FS, got %d", len(validatedBuilder.readers)) - } + assert.Len(t, validatedBuilder.readers, 3, "Build() should have 3 readers from fs.FS") }) t.Run("FS with nil filesystem error", func(t *testing.T) { @@ -431,9 +340,7 @@ func TestDBBuilder_Build(t *testing.T) { builder.filesystems = append(builder.filesystems, nil) _, err := builder.Build(ctx) - if err == nil { - t.Error("Build() should return error for nil FS") - } + assert.Error(t, err, "Build() should return error for nil FS") }) t.Run("FS with no supported files error", func(t *testing.T) { @@ -445,9 +352,7 @@ func TestDBBuilder_Build(t *testing.T) { builder := NewBuilder().AddFS(mockFS) _, err := builder.Build(ctx) - if err == nil { - t.Error("Build() should return error for FS with no supported files") - } + assert.Error(t, err, "Build() should return error for FS with no supported files") }) } @@ -477,28 +382,17 @@ func TestDBBuilder_ChunkedReading(t *testing.T) { ctx := context.Background() validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Build() should succeed") db, err := validatedBuilder.Open(ctx) - if err != nil { - t.Errorf("Open() error = %v", err) - } - if db == nil { - t.Error("Open() returned nil database") - } else { - // Verify the data was loaded correctly - var count int - err := db.QueryRowContext(ctx, "SELECT COUNT(*) FROM large_table").Scan(&count) - if err != nil { - t.Errorf("Count query failed: %v", err) - } - if count != 10000 { - t.Errorf("Expected 10000 rows, got %d", count) - } - _ = db.Close() - } + assert.NoError(t, err, "Open() should succeed") + require.NotNil(t, db, "Open() should not return nil database") + // Verify the data was loaded correctly + var count int + err = db.QueryRowContext(ctx, "SELECT COUNT(*) FROM large_table").Scan(&count) + assert.NoError(t, err, "Count query should succeed") + assert.Equal(t, 10000, count, "Should have 10000 rows") + _ = db.Close() // Clean up temp files }) @@ -513,29 +407,17 @@ func TestDBBuilder_Open_WithReader(t *testing.T) { builder := NewBuilder().AddReader(reader, "users", FileTypeCSV) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Build() should succeed") db, err := validatedBuilder.Open(ctx) - if err != nil { - t.Errorf("Open() error = %v", err) - } - if db == nil { - t.Error("Open() returned nil database") - } else { - // Verify we can query the data - rows, err := db.QueryContext(ctx, "SELECT * FROM users") - if err != nil { - t.Errorf("Query failed: %v", err) - } else { - defer rows.Close() - if err := rows.Err(); err != nil { - t.Errorf("Rows error: %v", err) - } - } - _ = db.Close() - } + assert.NoError(t, err, "Open() should succeed") + require.NotNil(t, db, "Open() should not return nil database") + // Verify we can query the data + rows, err := db.QueryContext(ctx, "SELECT * FROM users") + assert.NoError(t, err, "Query should succeed") + defer rows.Close() + assert.NoError(t, rows.Err(), "Rows should not have errors") + _ = db.Close() // Clean up temp files }) @@ -545,9 +427,8 @@ func TestDBBuilder_Open_WithReader(t *testing.T) { tempDir := t.TempDir() csvFile := filepath.Join(tempDir, "orders.csv") fileContent := "order_id,amount\n1,100\n2,200\n" - if err := os.WriteFile(csvFile, []byte(fileContent), 0600); err != nil { - t.Fatal(err) - } + err := os.WriteFile(csvFile, []byte(fileContent), 0600) + require.NoError(t, err, "should create orders CSV file") // Create a reader with different data readerData := "product_id,name\n1,Laptop\n2,Mouse\n" @@ -558,31 +439,19 @@ func TestDBBuilder_Open_WithReader(t *testing.T) { AddReader(reader, "products", FileTypeCSV) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Build() should succeed with mixed inputs") db, err := validatedBuilder.Open(ctx) - if err != nil { - t.Errorf("Open() error = %v", err) - } - if db == nil { - t.Error("Open() returned nil database") - } else { - // Verify both tables exist - for _, table := range []string{"orders", "products"} { - rows, err := db.QueryContext(ctx, "SELECT * FROM "+table) // #nosec G202 -- table name is safe - if err != nil { - t.Errorf("Query %s failed: %v", table, err) - } else { - if err := rows.Err(); err != nil { - t.Errorf("Rows error for %s: %v", table, err) - } - _ = rows.Close() // Close immediately in the loop - } - } - _ = db.Close() + assert.NoError(t, err, "Open() should succeed") + require.NotNil(t, db, "Open() should not return nil database") + // Verify both tables exist + for _, table := range []string{"orders", "products"} { + rows, err := db.QueryContext(ctx, "SELECT * FROM "+table) // #nosec G202 -- table name is safe + assert.NoError(t, err, "Query %s should succeed", table) + assert.NoError(t, rows.Err(), "Rows should not have errors for %s", table) + _ = rows.Close() // Close immediately in the loop } + _ = db.Close() // Clean up temp files }) @@ -595,16 +464,12 @@ func TestDBBuilder_Open(t *testing.T) { builder := NewBuilder().AddPath("test.csv") // Call Open without calling Build first db, err := builder.Open(ctx) - if err == nil { - if db != nil { - _ = db.Close() - } - t.Error("Open() without Build() should return error") + if db != nil { + _ = db.Close() } + assert.Error(t, err, "Open() without Build() should return error") expectedErrMsg := "no valid input files found, did you call Build()?" - if !strings.Contains(err.Error(), expectedErrMsg) { - t.Errorf("Expected error message containing '%s', got: %s", expectedErrMsg, err.Error()) - } + assert.Contains(t, err.Error(), expectedErrMsg, "error message should mention Build() requirement") }) t.Run("successful open with CSV file", func(t *testing.T) { @@ -612,23 +477,17 @@ func TestDBBuilder_Open(t *testing.T) { tempDir := t.TempDir() csvFile := filepath.Join(tempDir, "test.csv") content := "col1,col2\nval1,val2\n" - if err := os.WriteFile(csvFile, []byte(content), 0600); err != nil { - t.Fatal(err) - } + err := os.WriteFile(csvFile, []byte(content), 0600) + require.NoError(t, err, "should create CSV file") builder := NewBuilder().AddPath(csvFile) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Build() should succeed") db, err := validatedBuilder.Open(ctx) - if err != nil { - t.Errorf("Open() error = %v", err) - } - if db == nil { - t.Error("Open() returned nil database") - } else { + assert.NoError(t, err, "Open() should succeed") + assert.NotNil(t, db, "Open() should not return nil database") + if db != nil { _ = db.Close() } }) @@ -640,17 +499,12 @@ func TestDBBuilder_Open(t *testing.T) { builder := NewBuilder().AddFS(mockFS) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Build() should succeed") db, err := validatedBuilder.Open(ctx) - if err != nil { - t.Errorf("Open() error = %v", err) - } - if db == nil { - t.Error("Open() returned nil database") - } else { + assert.NoError(t, err, "Open() should succeed") + assert.NotNil(t, db, "Open() should not return nil database") + if db != nil { _ = db.Close() // Clean up temp files } @@ -664,17 +518,12 @@ func TestDBBuilder_Open(t *testing.T) { builder := NewBuilder().AddFS(mockFS) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Build() should succeed") db, err := validatedBuilder.Open(ctx) - if err != nil { - t.Errorf("Open() error = %v", err) - } - if db == nil { - t.Error("Open() returned nil database") - } else { + assert.NoError(t, err, "Open() should succeed") + assert.NotNil(t, db, "Open() should not return nil database") + if db != nil { _ = db.Close() // Clean up temp files } @@ -698,12 +547,8 @@ func TestDBBuilder_processFSInput(t *testing.T) { builder := NewBuilder() readers, err := builder.processFSToReaders(ctx, mockFS) - if err != nil { - t.Errorf("processFSToReaders() error = %v", err) - } - if len(readers) != 3 { - t.Errorf("processFSToReaders() returned %d readers, want 3", len(readers)) - } + assert.NoError(t, err, "processFSToReaders() should succeed") + assert.Len(t, readers, 3, "should return 3 readers") // Close all readers for _, reader := range readers { @@ -723,12 +568,8 @@ func TestDBBuilder_processFSInput(t *testing.T) { builder := NewBuilder() readers, err := builder.processFSToReaders(ctx, mockFS) - if err != nil { - t.Errorf("processFSToReaders() error = %v", err) - } - if len(readers) != 2 { - t.Errorf("processFSToReaders() returned %d readers, want 2", len(readers)) - } + assert.NoError(t, err, "processFSToReaders() should succeed with compressed files") + assert.Len(t, readers, 2, "should return 2 readers for compressed files") // Close all readers for _, reader := range readers { @@ -744,39 +585,25 @@ func TestIntegrationWithEmbedFS(t *testing.T) { // Use embedded test data from embed_test subdirectory subFS, err := fs.Sub(testFS, "testdata/embed_test") - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "should create sub filesystem") // Test loading all supported files from embedded FS builder := NewBuilder().AddFS(subFS) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Build() should succeed with embedded FS") db, err := validatedBuilder.Open(ctx) - if err != nil { - t.Errorf("Open() with embed.FS error = %v", err) - } - if db == nil { - t.Error("Open() with embed.FS returned nil database") - } else { - // Verify we can query the database - rows, err := db.QueryContext(ctx, "SELECT name FROM sqlite_master WHERE type='table'") - if err != nil { - t.Errorf("Failed to query database: %v", err) - } else { - defer rows.Close() - if err := rows.Err(); err != nil { - t.Errorf("Rows error: %v", err) - } - } - - _ = db.Close() - // Clean up temp files - } + assert.NoError(t, err, "Open() with embed.FS should succeed") + require.NotNil(t, db, "Open() with embed.FS should not return nil database") + // Verify we can query the database + rows, err := db.QueryContext(ctx, "SELECT name FROM sqlite_master WHERE type='table'") + assert.NoError(t, err, "should be able to query database") + defer rows.Close() + assert.NoError(t, rows.Err(), "rows should not have errors") + + _ = db.Close() + // Clean up temp files } func TestAutoSave_OnClose(t *testing.T) { @@ -786,15 +613,13 @@ func TestAutoSave_OnClose(t *testing.T) { // Create test CSV file csvPath := filepath.Join(tmpDir, "test.csv") csvContent := "name,age\nAlice,25\nBob,30\n" - if err := os.WriteFile(csvPath, []byte(csvContent), 0600); err != nil { - t.Fatalf("Failed to write test CSV: %v", err) - } + err := os.WriteFile(csvPath, []byte(csvContent), 0600) + require.NoError(t, err, "Failed to write test CSV") // Create output directory outputDir := filepath.Join(tmpDir, "output") - if err := os.MkdirAll(outputDir, 0750); err != nil { - t.Fatalf("Failed to create output dir: %v", err) - } + err = os.MkdirAll(outputDir, 0750) + require.NoError(t, err, "Failed to create output dir") // Build database with auto-save on close ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) @@ -805,41 +630,29 @@ func TestAutoSave_OnClose(t *testing.T) { EnableAutoSave(outputDir) validatedBuilder, err := builder.Build(ctx) - if err != nil { - t.Fatalf("Build failed: %v", err) - } + require.NoError(t, err, "Build should succeed") db, err := validatedBuilder.Open(ctx) - if err != nil { - t.Fatalf("Open failed: %v", err) - } + require.NoError(t, err, "Open should succeed") // Modify data _, err = db.ExecContext(ctx, "INSERT INTO test (name, age) VALUES ('Charlie', 35)") - if err != nil { - t.Fatalf("Insert failed: %v", err) - } + require.NoError(t, err, "Insert should succeed") // Close database (should trigger auto-save) - if err := db.Close(); err != nil { - t.Fatalf("Close failed: %v", err) - } + err = db.Close() + require.NoError(t, err, "Close should succeed") // Check if file was saved outputFile := filepath.Join(outputDir, "test.csv") - if _, err := os.Stat(outputFile); os.IsNotExist(err) { - t.Fatalf("Auto-save file not created: %s", outputFile) - } + _, err = os.Stat(outputFile) + assert.False(t, os.IsNotExist(err), "Auto-save file should be created: %s", outputFile) // Verify content includes the new record content, err := os.ReadFile(outputFile) //nolint:gosec // Test file path is safe - if err != nil { - t.Fatalf("Failed to read output file: %v", err) - } + require.NoError(t, err, "should be able to read output file") - if !strings.Contains(string(content), "Charlie") { - t.Errorf("Auto-saved file should contain inserted data. Got: %s", string(content)) - } + assert.Contains(t, string(content), "Charlie", "Auto-saved file should contain inserted data") } func TestAutoSave_OnCommit(t *testing.T) { @@ -849,15 +662,13 @@ func TestAutoSave_OnCommit(t *testing.T) { // Create test CSV file csvPath := filepath.Join(tmpDir, "test.csv") csvContent := "name,age\nAlice,25\n" - if err := os.WriteFile(csvPath, []byte(csvContent), 0600); err != nil { - t.Fatalf("Failed to write test CSV: %v", err) - } + err := os.WriteFile(csvPath, []byte(csvContent), 0600) + require.NoError(t, err, "Failed to write test CSV") // Create output directory outputDir := filepath.Join(tmpDir, "output") - if err := os.MkdirAll(outputDir, 0750); err != nil { - t.Fatalf("Failed to create output dir: %v", err) - } + err = os.MkdirAll(outputDir, 0750) + require.NoError(t, err, "Failed to create output dir") // Build database with auto-save on commit ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) @@ -869,47 +680,44 @@ func TestAutoSave_OnCommit(t *testing.T) { validatedBuilder, err := builder.Build(ctx) if err != nil { - t.Fatalf("Build failed: %v", err) + require.NoError(t, err, "Build should succeed") } db, err := validatedBuilder.Open(ctx) if err != nil { - t.Fatalf("Open failed: %v", err) + require.NoError(t, err, "Open should succeed") } defer db.Close() // Start transaction tx, err := db.BeginTx(ctx, nil) if err != nil { - t.Fatalf("Begin transaction failed: %v", err) + require.NoError(t, err, "Begin transaction should succeed") } // Modify data within transaction _, err = tx.ExecContext(ctx, "INSERT INTO test (name, age) VALUES ('David', 40)") if err != nil { - t.Fatalf("Insert failed: %v", err) + require.NoError(t, err, "Insert should succeed") } // Commit transaction (should trigger auto-save) - if err := tx.Commit(); err != nil { - t.Fatalf("Commit failed: %v", err) - } + err = tx.Commit() + require.NoError(t, err, "Commit should succeed") // Check if file was saved outputFile := filepath.Join(outputDir, "test.csv") if _, err := os.Stat(outputFile); os.IsNotExist(err) { - t.Fatalf("Auto-save file not created: %s", outputFile) + assert.FileExists(t, outputFile, "Auto-save file should be created") } // Verify content includes the new record content, err := os.ReadFile(outputFile) //nolint:gosec // Test file path is safe if err != nil { - t.Fatalf("Failed to read output file: %v", err) + require.NoError(t, err, "should be able to read output file") } - if !strings.Contains(string(content), "David") { - t.Errorf("Auto-saved file should contain committed data. Got: %s", string(content)) - } + assert.Contains(t, string(content), "David", "Auto-saved file should contain committed data") } func TestAutoSave_DisableAutoSave(t *testing.T) { @@ -919,15 +727,13 @@ func TestAutoSave_DisableAutoSave(t *testing.T) { // Create test CSV file csvPath := filepath.Join(tmpDir, "test.csv") csvContent := "name,age\nAlice,25\n" - if err := os.WriteFile(csvPath, []byte(csvContent), 0600); err != nil { - t.Fatalf("Failed to write test CSV: %v", err) - } + err := os.WriteFile(csvPath, []byte(csvContent), 0600) + require.NoError(t, err, "Failed to write test CSV") // Create output directory outputDir := filepath.Join(tmpDir, "output") - if err := os.MkdirAll(outputDir, 0750); err != nil { - t.Fatalf("Failed to create output dir: %v", err) - } + err = os.MkdirAll(outputDir, 0750) + require.NoError(t, err, "Failed to create output dir") // Build database without auto-save (default behavior) ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) @@ -939,29 +745,29 @@ func TestAutoSave_DisableAutoSave(t *testing.T) { validatedBuilder, err := builder.Build(ctx) if err != nil { - t.Fatalf("Build failed: %v", err) + require.NoError(t, err, "Build should succeed") } db, err := validatedBuilder.Open(ctx) if err != nil { - t.Fatalf("Open failed: %v", err) + require.NoError(t, err, "Open should succeed") } // Modify data _, err = db.ExecContext(ctx, "INSERT INTO test (name, age) VALUES ('Echo', 45)") if err != nil { - t.Fatalf("Insert failed: %v", err) + require.NoError(t, err, "Insert should succeed") } // Close database (should NOT trigger auto-save) if err := db.Close(); err != nil { - t.Fatalf("Close failed: %v", err) + require.NoError(t, err, "Close should succeed") } // Check that no output file was created outputFile := filepath.Join(outputDir, "test.csv") if _, err := os.Stat(outputFile); !os.IsNotExist(err) { - t.Errorf("Auto-save file should not have been created when auto-save is disabled") + assert.NoFileExists(t, outputFile, "Auto-save file should not have been created when auto-save is disabled") } } @@ -974,15 +780,13 @@ func TestAutoSave_MultipleCommitsOverwrite(t *testing.T) { // Create test CSV file csvPath := filepath.Join(tmpDir, "test.csv") csvContent := "name,count\nInitial,1\n" - if err := os.WriteFile(csvPath, []byte(csvContent), 0600); err != nil { - t.Fatalf("Failed to write test CSV: %v", err) - } + err := os.WriteFile(csvPath, []byte(csvContent), 0600) + require.NoError(t, err, "Failed to write test CSV") // Create output directory outputDir := filepath.Join(tmpDir, "output") - if err := os.MkdirAll(outputDir, 0750); err != nil { - t.Fatalf("Failed to create output dir: %v", err) - } + err = os.MkdirAll(outputDir, 0750) + require.NoError(t, err, "Failed to create output dir") ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() @@ -994,12 +798,12 @@ func TestAutoSave_MultipleCommitsOverwrite(t *testing.T) { validatedBuilder, err := builder.Build(ctx) if err != nil { - t.Fatalf("Build failed: %v", err) + require.NoError(t, err, "Build should succeed") } db, err := validatedBuilder.Open(ctx) if err != nil { - t.Fatalf("Open failed: %v", err) + require.NoError(t, err, "Open should succeed") } defer db.Close() @@ -1008,62 +812,56 @@ func TestAutoSave_MultipleCommitsOverwrite(t *testing.T) { // First commit: Add first record tx1, err := db.BeginTx(ctx, nil) if err != nil { - t.Fatalf("Begin first transaction failed: %v", err) + require.NoError(t, err, "Begin first transaction should succeed") } _, err = tx1.ExecContext(ctx, "INSERT INTO test (name, count) VALUES ('First', 100)") if err != nil { - t.Fatalf("First insert failed: %v", err) + require.NoError(t, err, "First insert should succeed") } if err := tx1.Commit(); err != nil { - t.Fatalf("First commit failed: %v", err) + require.NoError(t, err, "First commit should succeed") } // Check first commit saved the file if _, err := os.Stat(outputFile); os.IsNotExist(err) { - t.Fatalf("Auto-save file not created after first commit: %s", outputFile) + assert.FileExists(t, outputFile, "Auto-save file should be created after first commit") } // Read content after first commit content1, err := os.ReadFile(outputFile) //nolint:gosec // Test file path is safe if err != nil { - t.Fatalf("Failed to read output file after first commit: %v", err) + require.NoError(t, err, "should be able to read output file after first commit") } - if !strings.Contains(string(content1), "First") { - t.Errorf("File should contain first commit data. Got: %s", string(content1)) - } + assert.Contains(t, string(content1), "First", "File should contain first commit data") // Second commit: Add second record (should overwrite) tx2, err := db.BeginTx(ctx, nil) if err != nil { - t.Fatalf("Begin second transaction failed: %v", err) + require.NoError(t, err, "Begin second transaction should succeed") } _, err = tx2.ExecContext(ctx, "INSERT INTO test (name, count) VALUES ('Second', 200)") if err != nil { - t.Fatalf("Second insert failed: %v", err) + require.NoError(t, err, "Second insert should succeed") } if err := tx2.Commit(); err != nil { - t.Fatalf("Second commit failed: %v", err) + require.NoError(t, err, "Second commit should succeed") } // Read content after second commit content2, err := os.ReadFile(outputFile) //nolint:gosec // Test file path is safe if err != nil { - t.Fatalf("Failed to read output file after second commit: %v", err) + require.NoError(t, err, "should be able to read output file after second commit") } // Verify the file was overwritten and contains both records - if !strings.Contains(string(content2), "First") { - t.Errorf("File should still contain first commit data after second commit. Got: %s", string(content2)) - } + assert.Contains(t, string(content2), "First", "File should still contain first commit data after second commit") - if !strings.Contains(string(content2), "Second") { - t.Errorf("File should contain second commit data. Got: %s", string(content2)) - } + assert.Contains(t, string(content2), "Second", "File should contain second commit data") // Verify the file was actually overwritten (not just appended) // Count lines to make sure we have header + original + two new records @@ -1076,40 +874,34 @@ func TestAutoSave_MultipleCommitsOverwrite(t *testing.T) { } // Should have: header + Initial + First + Second = 4 lines - if nonEmptyLines != 4 { - t.Errorf("Expected 4 lines in overwritten file, got %d. Content: %s", nonEmptyLines, string(content2)) - } + assert.Equal(t, 4, nonEmptyLines, "Expected 4 lines in overwritten file, got %d. Content: %s", nonEmptyLines, string(content2)) // Third commit: Update existing record tx3, err := db.BeginTx(ctx, nil) if err != nil { - t.Fatalf("Begin third transaction failed: %v", err) + require.NoError(t, err, "Begin third transaction should succeed") } _, err = tx3.ExecContext(ctx, "UPDATE test SET count = 999 WHERE name = 'Initial'") if err != nil { - t.Fatalf("Update failed: %v", err) + require.NoError(t, err, "Update should succeed") } if err := tx3.Commit(); err != nil { - t.Fatalf("Third commit failed: %v", err) + require.NoError(t, err, "Third commit should succeed") } // Read content after third commit content3, err := os.ReadFile(outputFile) //nolint:gosec // Test file path is safe if err != nil { - t.Fatalf("Failed to read output file after third commit: %v", err) + require.NoError(t, err, "should be able to read output file after third commit") } // Verify the update was saved - if !strings.Contains(string(content3), "999") { - t.Errorf("File should contain updated count (999). Got: %s", string(content3)) - } + assert.Contains(t, string(content3), "999", "File should contain updated count (999)") // Verify original count (1) was overwritten - if strings.Contains(string(content3), "Initial,1") { - t.Errorf("File should not contain old count (1) after update. Got: %s", string(content3)) - } + assert.NotContains(t, string(content3), "Initial,1", "File should not contain old count (1) after update") } func TestAutoSave_ExplicitDisable(t *testing.T) { @@ -1121,15 +913,13 @@ func TestAutoSave_ExplicitDisable(t *testing.T) { // Create test CSV file csvPath := filepath.Join(tmpDir, "test.csv") csvContent := "name,age\nAlice,25\n" - if err := os.WriteFile(csvPath, []byte(csvContent), 0600); err != nil { - t.Fatalf("Failed to write test CSV: %v", err) - } + err := os.WriteFile(csvPath, []byte(csvContent), 0600) + require.NoError(t, err, "Failed to write test CSV") // Create output directory outputDir := filepath.Join(tmpDir, "output") - if err := os.MkdirAll(outputDir, 0750); err != nil { - t.Fatalf("Failed to create output dir: %v", err) - } + err = os.MkdirAll(outputDir, 0750) + require.NoError(t, err, "Failed to create output dir") ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() @@ -1142,29 +932,29 @@ func TestAutoSave_ExplicitDisable(t *testing.T) { validatedBuilder, err := builder.Build(ctx) if err != nil { - t.Fatalf("Build failed: %v", err) + require.NoError(t, err, "Build should succeed") } db, err := validatedBuilder.Open(ctx) if err != nil { - t.Fatalf("Open failed: %v", err) + require.NoError(t, err, "Open should succeed") } // Modify data _, err = db.ExecContext(ctx, "INSERT INTO test (name, age) VALUES ('Disabled', 99)") if err != nil { - t.Fatalf("Insert failed: %v", err) + require.NoError(t, err, "Insert should succeed") } // Close database (should NOT trigger auto-save due to DisableAutoSave) if err := db.Close(); err != nil { - t.Fatalf("Close failed: %v", err) + require.NoError(t, err, "Close should succeed") } // Check that no output file was created outputFile := filepath.Join(outputDir, "test.csv") if _, err := os.Stat(outputFile); !os.IsNotExist(err) { - t.Errorf("Auto-save file should not have been created when explicitly disabled") + assert.NoFileExists(t, outputFile, "Auto-save file should not have been created when explicitly disabled") } } @@ -1178,7 +968,7 @@ func TestBuilder_ErrorCases(t *testing.T) { builder := NewBuilder() _, err := builder.Build(ctx) if err == nil { - t.Error("Build() with no inputs should return error") + assert.Error(t, err, "Build() with no inputs should return error") } }) @@ -1187,7 +977,7 @@ func TestBuilder_ErrorCases(t *testing.T) { builder := NewBuilder().AddPath("") _, err := builder.Build(ctx) if err == nil { - t.Error("Build() with empty path should return error") + assert.Error(t, err, "Build() with empty path should return error") } }) @@ -1196,7 +986,7 @@ func TestBuilder_ErrorCases(t *testing.T) { builder := NewBuilder().AddPath(filepath.Join("non", "existent", "file.csv")) _, err := builder.Build(ctx) if err == nil { - t.Error("Build() with non-existent path should return error") + assert.Error(t, err, "Build() with non-existent path should return error") } }) @@ -1205,7 +995,7 @@ func TestBuilder_ErrorCases(t *testing.T) { tmpDir := t.TempDir() csvPath := filepath.Join(tmpDir, "test.csv") if err := os.WriteFile(csvPath, []byte("col1\nval1\n"), 0600); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } // Test with empty string for output directory - should use overwrite mode @@ -1224,7 +1014,7 @@ func TestBuilder_ErrorCases(t *testing.T) { tmpDir := t.TempDir() csvPath := filepath.Join(tmpDir, "test.csv") if err := os.WriteFile(csvPath, []byte("col1\nval1\n"), 0600); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } // Test with empty string for output directory - should use overwrite mode @@ -1263,9 +1053,9 @@ func TestBuilder_ErrorCases(t *testing.T) { // Build should fail with empty CSV data if err == nil { - t.Error("Build should fail with empty reader") + assert.Error(t, err, "Build should fail with empty reader") } else if !strings.Contains(err.Error(), "empty CSV data") { - t.Errorf("Expected 'empty CSV data' error, got: %v", err) + assert.Contains(t, err.Error(), "empty CSV data", "Expected 'empty CSV data' error") } }) @@ -1280,7 +1070,7 @@ func TestBuilder_ErrorCases(t *testing.T) { _, err := builder.Build(ctx) if err != nil { - t.Errorf("Build should handle small chunk size, got error: %v", err) + assert.NoError(t, err, "Build should handle small chunk size") } }) } @@ -1292,7 +1082,7 @@ func TestBuilder_AddPaths_ErrorCases(t *testing.T) { t.Parallel() builder := NewBuilder().AddPaths("file1.csv", "file2.tsv", "file3.ltsv") if len(builder.paths) != 3 { - t.Errorf("AddPaths should add all paths, got %d", len(builder.paths)) + assert.Len(t, builder.paths, 3, "AddPaths should add all paths") } expectedPaths := []string{"file1.csv", "file2.tsv", "file3.ltsv"} for i, expectedPath := range expectedPaths { @@ -1306,7 +1096,7 @@ func TestBuilder_AddPaths_ErrorCases(t *testing.T) { t.Parallel() builder := NewBuilder().AddPaths("valid.csv", "", "another.csv") if len(builder.paths) != 3 { - t.Errorf("AddPaths should add all paths including empty ones, got %d", len(builder.paths)) + assert.Len(t, builder.paths, 3, "AddPaths should add all paths including empty ones") } if builder.paths[1] != "" { t.Errorf("AddPaths should preserve empty string, got %s", builder.paths[1]) @@ -1317,7 +1107,7 @@ func TestBuilder_AddPaths_ErrorCases(t *testing.T) { t.Parallel() builder := NewBuilder().AddPaths() if len(builder.paths) != 0 { - t.Errorf("AddPaths() with no arguments should not add any paths, got %d", len(builder.paths)) + assert.Len(t, builder.paths, 0, "AddPaths() with no arguments should not add any paths") } }) } @@ -1567,7 +1357,7 @@ func TestDriverMethods(t *testing.T) { connector := &directConnector{} driver := connector.Driver() if driver == nil { - t.Error("Expected non-nil driver") + assert.NotNil(t, driver, "Expected non-nil driver") } }) @@ -1577,7 +1367,7 @@ func TestDriverMethods(t *testing.T) { connector := &autoSaveConnector{} driver := connector.Driver() if driver == nil { - t.Error("Expected non-nil driver") + assert.NotNil(t, driver, "Expected non-nil driver") } }) } @@ -1590,7 +1380,7 @@ func TestTransactionMethods(t *testing.T) { csvFile := filepath.Join(tempDir, "test.csv") csvContent := "id,name\n1,Alice\n2,Bob\n" if err := os.WriteFile(csvFile, []byte(csvContent), 0600); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } t.Run("Begin and Rollback transaction", func(t *testing.T) { @@ -1601,29 +1391,29 @@ func TestTransactionMethods(t *testing.T) { EnableAutoSaveOnCommit(tempDir). Build(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db, err := validatedBuilder.Open(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer db.Close() ctx := context.Background() tx, err := db.BeginTx(ctx, nil) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } _, err = tx.ExecContext(ctx, "UPDATE test SET name = 'Charlie' WHERE id = 1") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } err = tx.Rollback() if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } }) @@ -1635,12 +1425,12 @@ func TestTransactionMethods(t *testing.T) { EnableAutoSaveOnCommit(tempDir). Build(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db, err := validatedBuilder.Open(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer db.Close() @@ -1648,7 +1438,7 @@ func TestTransactionMethods(t *testing.T) { ctx := context.Background() tx, err := db.BeginTx(ctx, nil) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer tx.Rollback() }) @@ -1661,31 +1451,31 @@ func TestTransactionMethods(t *testing.T) { EnableAutoSaveOnCommit(""). // Empty string triggers overwrite Build(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db, err := validatedBuilder.Open(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer db.Close() ctx := context.Background() tx, err := db.BeginTx(ctx, nil) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } _, err = tx.ExecContext(ctx, "UPDATE test SET name = 'Diana' WHERE id = 1") if err != nil { _ = tx.Rollback() //nolint:errcheck - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } // This should trigger overwriteOriginalFiles err = tx.Commit() if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } }) } @@ -1698,7 +1488,7 @@ func TestAutoSavePaths(t *testing.T) { csvFile := filepath.Join(tempDir, "test.csv") csvContent := "id,name\n1,Alice\n2,Bob\n" if err := os.WriteFile(csvFile, []byte(csvContent), 0600); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } t.Run("Close connection with auto-save", func(t *testing.T) { @@ -1709,24 +1499,24 @@ func TestAutoSavePaths(t *testing.T) { EnableAutoSave(tempDir). Build(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db, err := validatedBuilder.Open(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } ctx := context.Background() _, err = db.ExecContext(ctx, "UPDATE test SET name = 'Eve' WHERE id = 1") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } // Close should trigger auto-save err = db.Close() if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } }) @@ -1738,12 +1528,12 @@ func TestAutoSavePaths(t *testing.T) { AddReader(strings.NewReader("col1,col2\n"), "empty_test", FileTypeCSV). Build(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db, err := validatedBuilder.Open(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer db.Close() @@ -1751,7 +1541,7 @@ func TestAutoSavePaths(t *testing.T) { var count int err = db.QueryRowContext(context.Background(), "SELECT COUNT(*) FROM empty_test").Scan(&count) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } if count != 0 { t.Errorf("Expected empty table, got %d rows", count) @@ -1773,19 +1563,19 @@ func TestAutoSavePaths(t *testing.T) { SetDefaultChunkSize(1). // Very small chunk to simulate header-only parsing Build(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db, err := validatedBuilder.Open(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer db.Close() // Check table was created rows, err := db.QueryContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name='parsed_empty'") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer rows.Close() @@ -1794,7 +1584,7 @@ func TestAutoSavePaths(t *testing.T) { hasTable = true } if err := rows.Err(); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } if !hasTable { @@ -1811,7 +1601,7 @@ func TestAutoSavePaths(t *testing.T) { AddReader(strings.NewReader(duplicateCSV), "duplicate_cols", FileTypeCSV). Build(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } _, err = validatedBuilder.Open(context.Background()) @@ -1833,20 +1623,20 @@ func TestAutoSavePaths(t *testing.T) { AddReader(strings.NewReader(brokenCSV), "fallback_test", FileTypeCSV). Build(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } // This should not fail but use the createTableFromHeaders fallback db, err := validatedBuilder.Open(context.Background()) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer db.Close() // Check table exists rows, err := db.QueryContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name='fallback_test'") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer rows.Close() @@ -1855,7 +1645,7 @@ func TestAutoSavePaths(t *testing.T) { hasTable = true } if err := rows.Err(); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } if !hasTable { @@ -1874,7 +1664,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { sqliteDriver := &sqlite.Driver{} conn, err := sqliteDriver.Open(":memory:") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db := sql.OpenDB(&directConnector{conn: conn}) defer db.Close() @@ -1883,7 +1673,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { xlsxPath := filepath.Join("testdata", "excel", "sample.xlsx") file, err := os.Open(xlsxPath) //nolint:gosec // Test file path is safe if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer file.Close() @@ -1891,13 +1681,13 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { builder := &DBBuilder{} err = builder.streamXLSXFileToSQLite(ctx, db, file, xlsxPath) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } // Verify tables were created rows, err := db.QueryContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer rows.Close() @@ -1905,12 +1695,12 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { for rows.Next() { var tableName string if err := rows.Scan(&tableName); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } tables = append(tables, tableName) } if err := rows.Err(); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } expectedTables := []string{"sample_Sheet1", "sample_Sheet2"} @@ -1921,7 +1711,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { // Verify data in first sheet rows, err = db.QueryContext(context.Background(), "SELECT * FROM sample_Sheet1 ORDER BY id") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer rows.Close() @@ -1929,7 +1719,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { for rows.Next() { var id, name string if err := rows.Scan(&id, &name); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } count++ @@ -1949,7 +1739,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { } } if err := rows.Err(); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } if count != 3 { @@ -1959,7 +1749,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { // Verify data in second sheet rows, err = db.QueryContext(context.Background(), "SELECT * FROM sample_Sheet2 ORDER BY id") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer rows.Close() @@ -1967,7 +1757,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { for rows.Next() { var id, mail string if err := rows.Scan(&id, &mail); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } count++ @@ -1987,7 +1777,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { } } if err := rows.Err(); err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } if count != 3 { @@ -2002,7 +1792,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { sqliteDriver := &sqlite.Driver{} conn, err := sqliteDriver.Open(":memory:") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db := sql.OpenDB(&directConnector{conn: conn}) defer db.Close() @@ -2029,7 +1819,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { sqliteDriver := &sqlite.Driver{} conn, err := sqliteDriver.Open(":memory:") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db := sql.OpenDB(&directConnector{conn: conn}) defer db.Close() @@ -2056,7 +1846,7 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { sqliteDriver := &sqlite.Driver{} conn, err := sqliteDriver.Open(":memory:") if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } db := sql.OpenDB(&directConnector{conn: conn}) defer db.Close() @@ -2064,14 +1854,14 @@ func TestStreamXLSXFileToSQLite(t *testing.T) { // Create a table first _, err = db.ExecContext(context.Background(), `CREATE TABLE "sample_Sheet1" (id TEXT, name TEXT)`) if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } // Read sample XLSX file xlsxPath := filepath.Join("testdata", "excel", "sample.xlsx") file, err := os.Open(xlsxPath) //nolint:gosec // Test file path is safe if err != nil { - t.Fatal(err) + require.NoError(t, err, "operation should succeed") } defer file.Close() diff --git a/column_inference_test.go b/column_inference_test.go index 3e4df59..8f613ed 100644 --- a/column_inference_test.go +++ b/column_inference_test.go @@ -2,6 +2,9 @@ package filesql import ( "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestInferColumnType(t *testing.T) { @@ -107,9 +110,7 @@ func TestInferColumnType(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := inferColumnType(tt.values) - if result != tt.expected { - t.Errorf("inferColumnType(%v) = %v, want %v", tt.values, result, tt.expected) - } + assert.Equal(t, tt.expected, result, "inferColumnType failed for values: %v", tt.values) }) } } @@ -135,17 +136,11 @@ func TestInferColumnsInfo(t *testing.T) { {Name: "hire_date", Type: columnTypeDatetime}, } - if len(result) != len(expected) { - t.Fatalf("Expected %d columns, got %d", len(expected), len(result)) - } + require.Len(t, result, len(expected), "Column count mismatch") for i, exp := range expected { - if result[i].Name != exp.Name { - t.Errorf("Column %d: expected name %s, got %s", i, exp.Name, result[i].Name) - } - if result[i].Type != exp.Type { - t.Errorf("Column %d: expected type %s, got %s", i, exp.Type, result[i].Type) - } + assert.Equal(t, exp.Name, result[i].Name, "Column %d name mismatch", i) + assert.Equal(t, exp.Type, result[i].Type, "Column %d type mismatch", i) } }) @@ -155,14 +150,10 @@ func TestInferColumnsInfo(t *testing.T) { result := inferColumnsInfo(header, records) - if len(result) != 2 { - t.Fatalf("Expected 2 columns, got %d", len(result)) - } + require.Len(t, result, 2, "Expected 2 columns for empty records") for i, col := range result { - if col.Type != columnTypeText { - t.Errorf("Column %d: expected TEXT type for empty records, got %s", i, col.Type) - } + assert.Equal(t, columnTypeText, col.Type, "Column %d should be TEXT type for empty records", i) } }) @@ -182,17 +173,11 @@ func TestInferColumnsInfo(t *testing.T) { {Name: "timestamp", Type: columnTypeDatetime}, } - if len(result) != len(expected) { - t.Fatalf("Expected %d columns, got %d", len(expected), len(result)) - } + require.Len(t, result, len(expected), "Datetime column count mismatch") for i, exp := range expected { - if result[i].Name != exp.Name { - t.Errorf("Column %d: expected name %s, got %s", i, exp.Name, result[i].Name) - } - if result[i].Type != exp.Type { - t.Errorf("Column %d: expected type %s, got %s", i, exp.Type, result[i].Type) - } + assert.Equal(t, exp.Name, result[i].Name, "Column %d name mismatch", i) + assert.Equal(t, exp.Type, result[i].Type, "Column %d type mismatch", i) } }) } @@ -239,9 +224,7 @@ func TestIsDatetime(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := isDatetime(tt.value) - if result != tt.expected { - t.Errorf("isDatetime(%q) = %v, want %v", tt.value, result, tt.expected) - } + assert.Equal(t, tt.expected, result, "isDatetime failed for value: %q", tt.value) }) } } diff --git a/dump_options_test.go b/dump_options_test.go index 459fd92..444fa3c 100644 --- a/dump_options_test.go +++ b/dump_options_test.go @@ -2,6 +2,8 @@ package filesql import ( "testing" + + "github.com/stretchr/testify/assert" ) func TestOutputFormat_String(t *testing.T) { @@ -37,9 +39,8 @@ func TestOutputFormat_String(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Parallel() - if got := tt.format.String(); got != tt.want { - t.Errorf("OutputFormat.String() = %v, want %v", got, tt.want) - } + got := tt.format.String() + assert.Equal(t, tt.want, got, "OutputFormat.String() returned unexpected value") }) } } @@ -77,9 +78,8 @@ func TestOutputFormat_Extension(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Parallel() - if got := tt.format.Extension(); got != tt.want { - t.Errorf("OutputFormat.Extension() = %v, want %v", got, tt.want) - } + got := tt.format.Extension() + assert.Equal(t, tt.want, got, "OutputFormat.Extension() returned unexpected value") }) } } @@ -127,9 +127,8 @@ func TestCompressionType_String(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Parallel() - if got := tt.compression.String(); got != tt.want { - t.Errorf("CompressionType.String() = %v, want %v", got, tt.want) - } + got := tt.compression.String() + assert.Equal(t, tt.want, got, "CompressionType.String() returned unexpected value") }) } } @@ -177,9 +176,8 @@ func TestCompressionType_Extension(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Parallel() - if got := tt.compression.Extension(); got != tt.want { - t.Errorf("CompressionType.Extension() = %v, want %v", got, tt.want) - } + got := tt.compression.Extension() + assert.Equal(t, tt.want, got, "CompressionType.Extension() returned unexpected value") }) } } @@ -189,13 +187,8 @@ func TestNewDumpOptions(t *testing.T) { options := NewDumpOptions() - if options.Format != OutputFormatCSV { - t.Errorf("NewDumpOptions().Format = %v, want %v", options.Format, OutputFormatCSV) - } - - if options.Compression != CompressionNone { - t.Errorf("NewDumpOptions().Compression = %v, want %v", options.Compression, CompressionNone) - } + assert.Equal(t, OutputFormatCSV, options.Format, "NewDumpOptions().Format should default to CSV") + assert.Equal(t, CompressionNone, options.Compression, "NewDumpOptions().Compression should default to None") } func TestDumpOptions_WithFormat(t *testing.T) { @@ -205,19 +198,13 @@ func TestDumpOptions_WithFormat(t *testing.T) { newOptions := options.WithFormat(OutputFormatTSV) // Original options should not be modified - if options.Format != OutputFormatCSV { - t.Errorf("Original options modified: Format = %v, want %v", options.Format, OutputFormatCSV) - } + assert.Equal(t, OutputFormatCSV, options.Format, "Original options should not be modified") // New options should have the updated format - if newOptions.Format != OutputFormatTSV { - t.Errorf("WithFormat().Format = %v, want %v", newOptions.Format, OutputFormatTSV) - } + assert.Equal(t, OutputFormatTSV, newOptions.Format, "WithFormat() should update format") // Other fields should remain unchanged - if newOptions.Compression != CompressionNone { - t.Errorf("WithFormat().Compression = %v, want %v", newOptions.Compression, CompressionNone) - } + assert.Equal(t, CompressionNone, newOptions.Compression, "WithFormat() should not change compression") } func TestDumpOptions_WithCompression(t *testing.T) { @@ -227,19 +214,13 @@ func TestDumpOptions_WithCompression(t *testing.T) { newOptions := options.WithCompression(CompressionGZ) // Original options should not be modified - if options.Compression != CompressionNone { - t.Errorf("Original options modified: Compression = %v, want %v", options.Compression, CompressionNone) - } + assert.Equal(t, CompressionNone, options.Compression, "Original options should not be modified") // New options should have the updated compression - if newOptions.Compression != CompressionGZ { - t.Errorf("WithCompression().Compression = %v, want %v", newOptions.Compression, CompressionGZ) - } + assert.Equal(t, CompressionGZ, newOptions.Compression, "WithCompression() should update compression") // Other fields should remain unchanged - if newOptions.Format != OutputFormatCSV { - t.Errorf("WithCompression().Format = %v, want %v", newOptions.Format, OutputFormatCSV) - } + assert.Equal(t, OutputFormatCSV, newOptions.Format, "WithCompression() should not change format") } func TestDumpOptions_FileExtension(t *testing.T) { @@ -290,9 +271,8 @@ func TestDumpOptions_FileExtension(t *testing.T) { Format: tt.format, Compression: tt.compression, } - if got := options.FileExtension(); got != tt.want { - t.Errorf("DumpOptions.FileExtension() = %v, want %v", got, tt.want) - } + got := options.FileExtension() + assert.Equal(t, tt.want, got, "DumpOptions.FileExtension() returned unexpected value") }) } } @@ -316,9 +296,8 @@ func TestOutputFormatStringEdgeCases(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Parallel() - if got := tt.format.String(); got != tt.want { - t.Errorf("OutputFormat.String() = %v, want %v", got, tt.want) - } + got := tt.format.String() + assert.Equal(t, tt.want, got, "OutputFormat.String() returned unexpected value") }) } } @@ -330,16 +309,10 @@ func TestDumpOptions_ChainedMethods(t *testing.T) { WithFormat(OutputFormatLTSV). WithCompression(CompressionZSTD) - if options.Format != OutputFormatLTSV { - t.Errorf("Chained WithFormat().Format = %v, want %v", options.Format, OutputFormatLTSV) - } - - if options.Compression != CompressionZSTD { - t.Errorf("Chained WithCompression().Compression = %v, want %v", options.Compression, CompressionZSTD) - } + assert.Equal(t, OutputFormatLTSV, options.Format, "Chained WithFormat() should work") + assert.Equal(t, CompressionZSTD, options.Compression, "Chained WithCompression() should work") expectedExt := ".ltsv.zst" - if got := options.FileExtension(); got != expectedExt { - t.Errorf("Chained options FileExtension() = %v, want %v", got, expectedExt) - } + got := options.FileExtension() + assert.Equal(t, expectedExt, got, "Chained options FileExtension() should work") } diff --git a/file_test.go b/file_test.go index edd8799..8149205 100644 --- a/file_test.go +++ b/file_test.go @@ -8,6 +8,8 @@ import ( "strings" "testing" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/xuri/excelize/v2" ) @@ -91,12 +93,8 @@ func TestNewFile(t *testing.T) { t.Parallel() file := newFile(tt.path) - if file.getFileType() != tt.expected { - t.Errorf("expected %v, got %v", tt.expected, file.getFileType()) - } - if file.getPath() != tt.path { - t.Errorf("expected %s, got %s", tt.path, file.getPath()) - } + assert.Equal(t, tt.expected, file.getFileType(), "File type mismatch") + assert.Equal(t, tt.path, file.getPath(), "File path mismatch") }) } } @@ -141,9 +139,7 @@ func TestFile_IsCompressed(t *testing.T) { t.Parallel() file := newFile(tt.path) - if file.isCompressed() != tt.expected { - t.Errorf("expected %v, got %v", tt.expected, file.isCompressed()) - } + assert.Equal(t, tt.expected, file.isCompressed(), "Compression check failed") }) } } @@ -206,18 +202,10 @@ func TestFile_CompressionTypes(t *testing.T) { t.Parallel() file := newFile(tt.path) - if file.isGZ() != tt.isGZ { - t.Errorf("IsGZ() expected %v, got %v", tt.isGZ, file.isGZ()) - } - if file.isBZ2() != tt.isBZ2 { - t.Errorf("IsBZ2() expected %v, got %v", tt.isBZ2, file.isBZ2()) - } - if file.isXZ() != tt.isXZ { - t.Errorf("IsXZ() expected %v, got %v", tt.isXZ, file.isXZ()) - } - if file.isZSTD() != tt.isZSTD { - t.Errorf("IsZSTD() expected %v, got %v", tt.isZSTD, file.isZSTD()) - } + assert.Equal(t, tt.isGZ, file.isGZ(), "IsGZ() check failed") + assert.Equal(t, tt.isBZ2, file.isBZ2(), "IsBZ2() check failed") + assert.Equal(t, tt.isXZ, file.isXZ(), "IsXZ() check failed") + assert.Equal(t, tt.isZSTD, file.isZSTD(), "IsZSTD() check failed") }) } } @@ -234,29 +222,19 @@ Alice,30,Osaka Bob,35,Kyoto` err := os.WriteFile(csvFile, []byte(csvContent), 0600) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to write CSV file") file := newFile(csvFile) table, err := file.toTable() - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to convert file to table") expectedHeader := header{"name", "age", "city"} - if !table.getHeader().equal(expectedHeader) { - t.Errorf("expected header %v, got %v", expectedHeader, table.getHeader()) - } + assert.True(t, table.getHeader().equal(expectedHeader), "Header mismatch") - if len(table.getRecords()) != 3 { - t.Errorf("expected 3 records, got %d", len(table.getRecords())) - } + assert.Len(t, table.getRecords(), 3, "Record count mismatch") expectedFirstRecord := record{"John", "25", "Tokyo"} - if !table.getRecords()[0].equal(expectedFirstRecord) { - t.Errorf("expected first record %v, got %v", expectedFirstRecord, table.getRecords()[0]) - } + assert.True(t, table.getRecords()[0].equal(expectedFirstRecord), "First record mismatch") } func TestFile_ToTable_TSV(t *testing.T) { @@ -271,29 +249,19 @@ Alice 30 Osaka Bob 35 Kyoto` err := os.WriteFile(tsvFile, []byte(tsvContent), 0600) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to write TSV file") file := newFile(tsvFile) table, err := file.toTable() - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to convert file to table") expectedHeader := header{"name", "age", "city"} - if !table.getHeader().equal(expectedHeader) { - t.Errorf("expected header %v, got %v", expectedHeader, table.getHeader()) - } + assert.True(t, table.getHeader().equal(expectedHeader), "Header mismatch") - if len(table.getRecords()) != 3 { - t.Errorf("expected 3 records, got %d", len(table.getRecords())) - } + assert.Len(t, table.getRecords(), 3, "Record count mismatch") expectedFirstRecord := record{"John", "25", "Tokyo"} - if !table.getRecords()[0].equal(expectedFirstRecord) { - t.Errorf("expected first record %v, got %v", expectedFirstRecord, table.getRecords()[0]) - } + assert.True(t, table.getRecords()[0].equal(expectedFirstRecord), "First record mismatch") } func TestFile_ToTable_LTSV(t *testing.T) { @@ -307,19 +275,13 @@ name:Alice age:30 city:Osaka name:Bob age:35 city:Kyoto` err := os.WriteFile(ltsvFile, []byte(ltsvContent), 0600) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to write LTSV file") file := newFile(ltsvFile) table, err := file.toTable() - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to convert file to table") - if len(table.getRecords()) != 3 { - t.Errorf("expected 3 records, got %d", len(table.getRecords())) - } + assert.Len(t, table.getRecords(), 3, "Record count mismatch") // LTSV header order may vary due to map iteration header := table.getHeader() @@ -366,18 +328,12 @@ Alice,30,Osaka` file := newFile(csvFile) table, err := file.toTable() - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to convert file to table") expectedHeader := header{"name", "age", "city"} - if !table.getHeader().equal(expectedHeader) { - t.Errorf("expected header %v, got %v", expectedHeader, table.getHeader()) - } + assert.True(t, table.getHeader().equal(expectedHeader), "Header mismatch") - if len(table.getRecords()) != 2 { - t.Errorf("expected 2 records, got %d", len(table.getRecords())) - } + assert.Len(t, table.getRecords(), 2, "Expected 2 records") } func TestFile_ToTable_UnsupportedFormat(t *testing.T) { @@ -393,9 +349,7 @@ func TestFile_ToTable_UnsupportedFormat(t *testing.T) { file := newFile(txtFile) _, err = file.toTable() - if err == nil { - t.Error("expected error for unsupported file format") - } + assert.Error(t, err, "Expected error for unsupported file format") } func TestFile_ToTable_EmptyFile(t *testing.T) { @@ -411,9 +365,7 @@ func TestFile_ToTable_EmptyFile(t *testing.T) { file := newFile(csvFile) _, err = file.toTable() - if err == nil { - t.Error("expected error for empty file") - } + assert.Error(t, err, "Expected error for empty file") } func TestTableFromFilePath(t *testing.T) { @@ -451,9 +403,7 @@ func TestTableFromFilePath(t *testing.T) { t.Parallel() result := tableFromFilePath(tt.filePath) - if result != tt.expected { - t.Errorf("expected %s, got %s", tt.expected, result) - } + assert.Equal(t, tt.expected, result, "tableFromFilePath failed") }) } } @@ -523,17 +473,9 @@ func Test_FileTypeDetectionMethods(t *testing.T) { t.Run(tc.name, func(t *testing.T) { file := newFile(tc.filePath) - if file.isCSV() != tc.expectedCSV { - t.Errorf("IsCSV() = %v, expected %v for %s", file.isCSV(), tc.expectedCSV, tc.filePath) - } - - if file.isTSV() != tc.expectedTSV { - t.Errorf("IsTSV() = %v, expected %v for %s", file.isTSV(), tc.expectedTSV, tc.filePath) - } - - if file.isLTSV() != tc.expectedLTSV { - t.Errorf("IsLTSV() = %v, expected %v for %s", file.isLTSV(), tc.expectedLTSV, tc.filePath) - } + assert.Equal(t, tc.expectedCSV, file.isCSV(), "IsCSV() check failed for %s", tc.filePath) + assert.Equal(t, tc.expectedTSV, file.isTSV(), "IsTSV() check failed for %s", tc.filePath) + assert.Equal(t, tc.expectedLTSV, file.isLTSV(), "IsLTSV() check failed for %s", tc.filePath) }) } } @@ -613,9 +555,7 @@ func Test_OpenReaderEdgeCases(t *testing.T) { t.Run("Invalid gzip file", func(t *testing.T) { // Create a file with .gz extension but invalid gzip content tmpFile, err := os.CreateTemp(t.TempDir(), "invalid_*.csv.gz") - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to create file or perform operation") defer os.Remove(tmpFile.Name()) // Write non-gzip content @@ -682,9 +622,7 @@ func TestFile_ToTable_DuplicateColumns(t *testing.T) { 2,Jane,20,jane@example.com` err := os.WriteFile(csvFile, []byte(csvContent), 0600) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to create file or perform operation") file := newFile(csvFile) _, err = file.toTable() @@ -714,9 +652,7 @@ func TestFile_ToTable_DuplicateColumns(t *testing.T) { 2 Jane 20 jane@example.com` err := os.WriteFile(tsvFile, []byte(tsvContent), 0600) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to create file or perform operation") file := newFile(tsvFile) _, err = file.toTable() @@ -745,9 +681,7 @@ func TestFile_ToTable_DuplicateColumns(t *testing.T) { John,25,Doe,john@example.com,26` err := os.WriteFile(csvFile, []byte(csvContent), 0600) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to create file or perform operation") file := newFile(csvFile) _, err = file.toTable() @@ -772,9 +706,7 @@ John,25,Doe,john@example.com,26` 2,Jane,30,jane@example.com` err := os.WriteFile(csvFile, []byte(csvContent), 0600) - if err != nil { - t.Fatal(err) - } + require.NoError(t, err, "Failed to create file or perform operation") file := newFile(csvFile) table, err := file.toTable() diff --git a/filesql_test.go b/filesql_test.go index 2f82aa7..10c51d7 100644 --- a/filesql_test.go +++ b/filesql_test.go @@ -11,7 +11,6 @@ import ( "io" "os" "path/filepath" - "reflect" "runtime" "strconv" "strings" @@ -19,6 +18,9 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/xuri/excelize/v2" "github.com/apache/arrow/go/v18/arrow" @@ -69,10 +71,11 @@ func TestOpen(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { db, err := Open(tt.paths...) - if (err != nil) != tt.wantErr { - t.Errorf("Open() error = %v, wantErr %v", err, tt.wantErr) + if tt.wantErr { + assert.Error(t, err, "Open() should have failed") return } + assert.NoError(t, err, "Open() should have succeeded") if !tt.wantErr { defer db.Close() @@ -83,26 +86,26 @@ func TestOpen(t *testing.T) { if strings.Contains(tt.paths[0], "sample.csv") || strings.Contains(tt.paths[0], "testdata") { rows, err := db.QueryContext(context.Background(), "SELECT COUNT(*) FROM sample") if err != nil { - t.Errorf("Query() error = %v", err) + assert.Fail(t, "Query() error = %v", err) return } defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error: %v", err) + assert.NoError(t, err, "Rows error") return } var count int if rows.Next() { if err := rows.Scan(&count); err != nil { - t.Errorf("Scan() error = %v", err) + assert.Fail(t, "Scan() error = %v", err) return } } if count != 3 { - t.Errorf("Expected 3 rows, got %d", count) + assert.Fail(t, "Expected 3 rows, got %d", count) } } } @@ -115,9 +118,7 @@ func TestSQLQueries(t *testing.T) { t.Parallel() db, err := Open(filepath.Join("testdata", "sample.csv")) - if err != nil { - t.Fatalf("Failed to open database: %v", err) - } + require.NoError(t, err, "Failed to open database") defer db.Close() tests := []struct { @@ -145,40 +146,33 @@ func TestSQLQueries(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { rows, err := db.QueryContext(context.Background(), tt.query) + assert.NoError(t, err, "Query() error") if err != nil { - t.Errorf("Query() error = %v", err) return } defer rows.Close() - if err := rows.Err(); err != nil { - t.Errorf("Rows error: %v", err) - return - } + assert.NoError(t, rows.Err(), "Rows error") if rows.Next() { var result interface{} if err := rows.Scan(&result); err != nil { - t.Errorf("Scan() error = %v", err) + assert.NoError(t, err, "Scan() error") return } switch expected := tt.expected.(type) { case int: if count, ok := result.(int64); ok { - if int(count) != expected { - t.Errorf("Expected %v, got %v", expected, count) - } + assert.Equal(t, expected, int(count), "Expected count to match") } else { - t.Errorf("Expected int, got %T", result) + assert.Failf(t, "Type assertion failed", "Expected int, got %T", result) } case string: if str, ok := result.(string); ok { - if str != expected { - t.Errorf("Expected %v, got %v", expected, str) - } + assert.Equal(t, expected, str, "Expected string to match") } else { - t.Errorf("Expected string, got %T", result) + assert.Failf(t, "Type assertion failed", "Expected string, got %T", result) } } } @@ -191,9 +185,7 @@ func TestMultipleFiles(t *testing.T) { // Test loading multiple files from directory db, err := Open("testdata") - if err != nil { - t.Fatalf("Failed to open directory: %v", err) - } + require.NoError(t, err, "Failed to open directory") defer db.Close() tests := []struct { @@ -226,27 +218,22 @@ func TestMultipleFiles(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { rows, err := db.QueryContext(context.Background(), tt.query) + assert.NoError(t, err, "Query() error") if err != nil { - t.Errorf("Query() error = %v", err) return } defer rows.Close() - if err := rows.Err(); err != nil { - t.Errorf("Rows error: %v", err) - return - } + assert.NoError(t, rows.Err(), "Rows error") if rows.Next() { var count int64 if err := rows.Scan(&count); err != nil { - t.Errorf("Scan() error = %v", err) + assert.NoError(t, err, "Scan() error") return } - if count == 0 { - t.Errorf("Expected non-zero count for table %s", tt.table) - } + assert.NotEqual(t, int64(0), count, "Expected non-zero count for table %s", tt.table) } }) } @@ -257,9 +244,7 @@ func TestJoinMultipleTables(t *testing.T) { // Test joining tables from multiple files db, err := Open("testdata") - if err != nil { - t.Fatalf("Failed to open directory: %v", err) - } + require.NoError(t, err, "Failed to open directory") defer db.Close() // Test JOIN query across multiple tables @@ -273,13 +258,13 @@ func TestJoinMultipleTables(t *testing.T) { rows, err := db.QueryContext(context.Background(), query) if err != nil { - t.Errorf("JOIN Query() error = %v", err) + assert.Fail(t, "JOIN Query() error = %v", err) return } defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error: %v", err) + assert.NoError(t, err, "Rows error") return } @@ -287,12 +272,12 @@ func TestJoinMultipleTables(t *testing.T) { var name string var count int64 if err := rows.Scan(&name, &count); err != nil { - t.Errorf("Scan() error = %v", err) + assert.Fail(t, "Scan() error = %v", err) return } if name != "Alice" { - t.Errorf("Expected name 'Alice', got '%s'", name) + assert.Fail(t, "Expected name 'Alice', got '%s'", name) } } } @@ -328,14 +313,10 @@ id:3 product:Keyboard price:75` AddReader(strings.NewReader(ltsvData), "products", FileTypeLTSV) validatedBuilder, err := builder.Build(context.Background()) - if err != nil { - t.Fatalf("Build failed: %v", err) - } + require.NoError(t, err, "Build failed") db, err := validatedBuilder.Open(context.Background()) - if err != nil { - t.Fatalf("Open failed: %v", err) - } + require.NoError(t, err, "Open failed") defer db.Close() // Test complex JOIN query across all three tables @@ -349,9 +330,7 @@ id:3 product:Keyboard price:75` ` rows, err := db.QueryContext(context.Background(), query) - if err != nil { - t.Fatalf("Complex query failed: %v", err) - } + require.NoError(t, err, "Complex query failed") defer rows.Close() var results []struct { @@ -365,17 +344,15 @@ id:3 product:Keyboard price:75` salary, price float64 } if err := rows.Scan(&r.name, &r.dept, &r.product, &r.salary, &r.price); err != nil { - t.Fatalf("Scan failed: %v", err) + require.NoError(t, err, "Scan failed") } results = append(results, r) } - if err := rows.Err(); err != nil { - t.Fatalf("Rows iteration error: %v", err) - } + require.NoError(t, rows.Err(), "Rows iteration error") if len(results) != 2 { - t.Errorf("Expected 2 results, got %d", len(results)) + assert.Fail(t, "Expected 2 results, got %d", len(results)) } }) @@ -387,14 +364,10 @@ id:3 product:Keyboard price:75` builder := NewBuilder().AddFS(testFS) validatedBuilder, err := builder.Build(context.Background()) - if err != nil { - t.Fatalf("Build with FS failed: %v", err) - } + require.NoError(t, err, "Build with FS failed") db, err := validatedBuilder.Open(context.Background()) - if err != nil { - t.Fatalf("Open with FS failed: %v", err) - } + require.NoError(t, err, "Open with FS failed") defer db.Close() // Verify tables from embedded files @@ -404,10 +377,10 @@ id:3 product:Keyboard price:75` var count int err := db.QueryRowContext(context.Background(), query).Scan(&count) if err != nil { - t.Errorf("Failed to query table %s: %v", table, err) + assert.Fail(t, "Failed to query table %s: %v", table, err) } if count == 0 { - t.Errorf("Table %s is empty", table) + assert.Fail(t, "Table %s is empty", table) } } @@ -421,9 +394,7 @@ id:3 product:Keyboard price:75` ` rows, err := db.QueryContext(context.Background(), query) - if err != nil { - t.Fatalf("Cross-table query failed: %v", err) - } + require.NoError(t, err, "Cross-table query failed") defer rows.Close() rowCount := 0 @@ -431,14 +402,12 @@ id:3 product:Keyboard price:75` var name string var orderCount int if err := rows.Scan(&name, &orderCount); err != nil { - t.Fatalf("Scan failed: %v", err) + require.NoError(t, err, "Scan failed") } rowCount++ } - if err := rows.Err(); err != nil { - t.Fatalf("Rows iteration error: %v", err) - } + require.NoError(t, rows.Err(), "Rows iteration error") if rowCount == 0 { t.Error("Expected at least one result from cross-table query") @@ -458,14 +427,10 @@ id:3 product:Keyboard price:75` SetDefaultChunkSize(1024 * 50) // 50KB chunks for testing validatedBuilder, err := builder.Build(context.Background()) - if err != nil { - t.Fatalf("Build with large file failed: %v", err) - } + require.NoError(t, err, "Build with large file failed") db, err := validatedBuilder.Open(context.Background()) - if err != nil { - t.Fatalf("Open with large file failed: %v", err) - } + require.NoError(t, err, "Open with large file failed") defer db.Close() // Test aggregation queries on large dataset @@ -492,7 +457,7 @@ id:3 product:Keyboard price:75` start := time.Now() rows, err := db.QueryContext(context.Background(), q.query) if err != nil { - t.Fatalf("Query '%s' failed: %v", q.name, err) + require.NoError(t, err, "Query '%s' failed", q.name) } defer rows.Close() @@ -502,7 +467,7 @@ id:3 product:Keyboard price:75` // Just scan to verify data is accessible cols, err := rows.Columns() if err != nil { - t.Fatalf("Failed to get columns: %v", err) + require.NoError(t, err, "Failed to get columns") } values := make([]interface{}, len(cols)) @@ -512,13 +477,11 @@ id:3 product:Keyboard price:75` } if err := rows.Scan(scanArgs...); err != nil { - t.Fatalf("Scan failed: %v", err) + require.NoError(t, err, "Scan failed") } } - if err := rows.Err(); err != nil { - t.Fatalf("Rows iteration error: %v", err) - } + require.NoError(t, rows.Err(), "Rows iteration error") if !hasResults { t.Error("Query returned no results") @@ -542,14 +505,10 @@ id:3 product:Keyboard price:75` builder := NewBuilder().AddPaths(compressedFiles...) validatedBuilder, err := builder.Build(context.Background()) - if err != nil { - t.Fatalf("Build with compressed files failed: %v", err) - } + require.NoError(t, err, "Build with compressed files failed") db, err := validatedBuilder.Open(context.Background()) - if err != nil { - t.Fatalf("Open with compressed files failed: %v", err) - } + require.NoError(t, err, "Open with compressed files failed") defer db.Close() // Verify all compressed files were loaded correctly @@ -559,10 +518,10 @@ id:3 product:Keyboard price:75` query := "SELECT COUNT(*) FROM " + table // Table name from trusted list err := db.QueryRowContext(context.Background(), query).Scan(&count) if err != nil { - t.Errorf("Failed to query compressed table %s: %v", table, err) + assert.Fail(t, "Failed to query compressed table %s: %v", table, err) } if count == 0 { - t.Errorf("Compressed table %s is empty", table) + assert.Fail(t, "Compressed table %s is empty", table) } } @@ -579,9 +538,7 @@ id:3 product:Keyboard price:75` ` rows, err := db.QueryContext(context.Background(), query) - if err != nil { - t.Fatalf("Union query on compressed files failed: %v", err) - } + require.NoError(t, err, "Union query on compressed files failed") defer rows.Close() results := make(map[string]int) @@ -589,22 +546,20 @@ id:3 product:Keyboard price:75` var source string var count int if err := rows.Scan(&source, &count); err != nil { - t.Fatalf("Scan failed: %v", err) + require.NoError(t, err, "Scan failed") } results[source] = count } - if err := rows.Err(); err != nil { - t.Fatalf("Rows iteration error: %v", err) - } + require.NoError(t, rows.Err(), "Rows iteration error") if len(results) != 4 { - t.Errorf("Expected 4 tables, got %d", len(results)) + assert.Fail(t, "Expected 4 tables, got %d", len(results)) } for table, count := range results { if count == 0 { - t.Errorf("Table %s has zero rows", table) + assert.Fail(t, "Table %s has zero rows", table) } } }) @@ -622,29 +577,21 @@ id:3 product:Keyboard price:75` EnableAutoSave(tempDir, NewDumpOptions().WithFormat(OutputFormatCSV)) validatedBuilder, err := builder.Build(context.Background()) - if err != nil { - t.Fatalf("Build with auto-save failed: %v", err) - } + require.NoError(t, err, "Build with auto-save failed") db, err := validatedBuilder.Open(context.Background()) - if err != nil { - t.Fatalf("Open with auto-save failed: %v", err) - } + require.NoError(t, err, "Open with auto-save failed") // Modify the data _, err = db.ExecContext(context.Background(), "INSERT INTO sample (id, name, age, email) VALUES (99, 'Test User', 42, 'test@example.com')") - if err != nil { - t.Fatalf("INSERT failed: %v", err) - } + require.NoError(t, err, "INSERT failed") _, err = db.ExecContext(context.Background(), "UPDATE users SET role = 'super_admin' WHERE name = 'Alice'") - if err != nil { - t.Fatalf("UPDATE failed: %v", err) - } + require.NoError(t, err, "UPDATE failed") // Close to trigger auto-save if err := db.Close(); err != nil { - t.Errorf("Failed to close database: %v", err) + assert.NoError(t, err, "Failed to close database") } // Verify auto-saved files exist @@ -652,33 +599,25 @@ id:3 product:Keyboard price:75` for _, filename := range expectedFiles { filepath := filepath.Join(tempDir, filename) if _, err := os.Stat(filepath); os.IsNotExist(err) { - t.Errorf("Auto-saved file %s does not exist", filename) + assert.Fail(t, "Auto-saved file %s does not exist", filename) } } // Verify the modifications were saved by opening the auto-saved files newDB, err := Open(tempDir) - if err != nil { - t.Fatalf("Failed to open auto-saved files: %v", err) - } + require.NoError(t, err, "Failed to open auto-saved files") defer newDB.Close() // Check if our modifications are present var testUser string err = newDB.QueryRowContext(context.Background(), "SELECT name FROM sample WHERE id = 99").Scan(&testUser) - if err != nil { - t.Errorf("Failed to find inserted test user: %v", err) - } else if testUser != "Test User" { - t.Errorf("Expected 'Test User', got '%s'", testUser) - } + require.NoError(t, err, "Failed to find inserted test user") + assert.Equal(t, "Test User", testUser, "Expected 'Test User', got '%s'", testUser) var aliceRole string err = newDB.QueryRowContext(context.Background(), "SELECT role FROM users WHERE name = 'Alice'").Scan(&aliceRole) - if err != nil { - t.Errorf("Failed to find updated Alice role: %v", err) - } else if aliceRole != "super_admin" { - t.Errorf("Expected 'super_admin', got '%s'", aliceRole) - } + require.NoError(t, err, "Failed to find updated Alice role") + assert.Equal(t, "super_admin", aliceRole, "Expected 'super_admin', got '%s'", aliceRole) }) t.Run("mixed input sources combination", func(t *testing.T) { @@ -698,14 +637,10 @@ id:3 product:Keyboard price:75` AddPath(filepath.Join("testdata", "sample2.csv")) // Different file to avoid table name conflict validatedBuilder, err := builder.Build(context.Background()) - if err != nil { - t.Fatalf("Build with mixed sources failed: %v", err) - } + require.NoError(t, err, "Build with mixed sources failed") db, err := validatedBuilder.Open(context.Background()) - if err != nil { - t.Fatalf("Open with mixed sources failed: %v", err) - } + require.NoError(t, err, "Open with mixed sources failed") defer db.Close() // Verify all sources are accessible @@ -713,23 +648,19 @@ id:3 product:Keyboard price:75` // Get all table names rows, err := db.QueryContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table'") - if err != nil { - t.Fatalf("Failed to get table names: %v", err) - } + require.NoError(t, err, "Failed to get table names") defer rows.Close() var tableNames []string for rows.Next() { var name string if err := rows.Scan(&name); err != nil { - t.Fatalf("Scan table name failed: %v", err) + require.NoError(t, err, "Scan table name failed") } tableNames = append(tableNames, name) } - if err := rows.Err(); err != nil { - t.Fatalf("Rows iteration error: %v", err) - } + require.NoError(t, rows.Err(), "Rows iteration error") // Count rows in each table for _, tableName := range tableNames { @@ -737,7 +668,7 @@ id:3 product:Keyboard price:75` query := fmt.Sprintf("SELECT COUNT(*) FROM `%s`", tableName) //nolint:gosec // Table name from database metadata err := db.QueryRowContext(context.Background(), query).Scan(&count) if err != nil { - t.Errorf("Failed to count rows in table %s: %v", tableName, err) + assert.Fail(t, "Failed to count rows in table %s: %v", tableName, err) } tableCounts[tableName] = count } @@ -746,9 +677,9 @@ id:3 product:Keyboard price:75` expectedTables := []string{"sample", "custom_orders", "sample2"} for _, expected := range expectedTables { if count, exists := tableCounts[expected]; !exists { - t.Errorf("Expected table %s not found", expected) + assert.Fail(t, "Expected table %s not found", expected) } else if count == 0 { - t.Errorf("Table %s is empty", expected) + assert.Fail(t, "Table %s is empty", expected) } } @@ -766,9 +697,7 @@ id:3 product:Keyboard price:75` ` rows, err = db.QueryContext(context.Background(), query) - if err != nil { - t.Fatalf("Complex mixed-source query failed: %v", err) - } + require.NoError(t, err, "Complex mixed-source query failed") defer rows.Close() hasResults := false @@ -777,15 +706,13 @@ id:3 product:Keyboard price:75` var sampleName, orderCustomer, userName string var matchCount int if err := rows.Scan(&sampleName, &orderCustomer, &userName, &matchCount); err != nil { - t.Fatalf("Scan complex query failed: %v", err) + require.NoError(t, err, "Scan complex query failed") } // Just verify we can read the data } // Note: This query might not return results due to data mismatch, but it should execute without error - if err := rows.Err(); err != nil { - t.Fatalf("Query execution error: %v", err) - } + require.NoError(t, rows.Err(), "Query execution error") // Use hasResults to avoid unused variable error _ = hasResults @@ -797,9 +724,7 @@ id:3 product:Keyboard price:75` benchmarkFile := filepath.Join("testdata", "benchmark", "customers100000.csv") db, err := Open(benchmarkFile) - if err != nil { - t.Fatalf("Failed to open benchmark file: %v", err) - } + require.NoError(t, err, "Failed to open benchmark file") defer db.Close() // Test basic queries @@ -818,7 +743,7 @@ id:3 product:Keyboard price:75` rows, err := db.QueryContext(ctx, tc.query) if err != nil { - t.Fatalf("Query failed: %v", err) + require.NoError(t, err, "Query failed") } defer rows.Close() @@ -826,7 +751,7 @@ id:3 product:Keyboard price:75` for rows.Next() { cols, err := rows.Columns() if err != nil { - t.Fatalf("Get columns failed: %v", err) + require.NoError(t, err, "Get columns failed") } values := make([]any, len(cols)) @@ -836,13 +761,11 @@ id:3 product:Keyboard price:75` } if err := rows.Scan(scanArgs...); err != nil { - t.Fatalf("Scan failed: %v", err) + require.NoError(t, err, "Scan failed") } } - if err := rows.Err(); err != nil { - t.Fatalf("Rows error: %v", err) - } + require.NoError(t, rows.Err(), "Rows error") }) } }) @@ -863,9 +786,7 @@ func TestDumpDatabase(t *testing.T) { setupFunc: func(t *testing.T) *sql.DB { t.Helper() db, err := Open(filepath.Join("testdata", "sample.csv")) - if err != nil { - t.Fatalf("Failed to open database: %v", err) - } + require.NoError(t, err, "Failed to open database") return db }, expectError: false, @@ -876,9 +797,7 @@ func TestDumpDatabase(t *testing.T) { setupFunc: func(t *testing.T) *sql.DB { t.Helper() db, err := Open(filepath.Join("testdata", "sample.csv"), filepath.Join("testdata", "users.csv")) - if err != nil { - t.Fatalf("Failed to open database: %v", err) - } + require.NoError(t, err, "Failed to open database") return db }, expectError: false, @@ -889,9 +808,7 @@ func TestDumpDatabase(t *testing.T) { setupFunc: func(t *testing.T) *sql.DB { t.Helper() db, err := Open("testdata") - if err != nil { - t.Fatalf("Failed to open database: %v", err) - } + require.NoError(t, err, "Failed to open database") return db }, expectError: false, @@ -902,14 +819,12 @@ func TestDumpDatabase(t *testing.T) { setupFunc: func(t *testing.T) *sql.DB { t.Helper() db, err := Open(filepath.Join("testdata", "sample.csv")) - if err != nil { - t.Fatalf("Failed to open database: %v", err) - } + require.NoError(t, err, "Failed to open database") // Modify data to test persistence _, err = db.ExecContext(context.Background(), "INSERT INTO sample (id, name, age, email) VALUES (4, 'Test User', 40, 'test@example.com')") if err != nil { - t.Fatalf("Failed to insert test data: %v", err) + require.NoError(t, err, "Failed to insert test data") } return db }, @@ -934,7 +849,7 @@ func TestDumpDatabase(t *testing.T) { // Check error expectation if (err != nil) != tc.expectError { - t.Errorf("DumpDatabase() error = %v, expectError %v", err, tc.expectError) + assert.Fail(t, "DumpDatabase() error = %v, expectError %v", err, tc.expectError) return } @@ -943,31 +858,31 @@ func TestDumpDatabase(t *testing.T) { for _, fileName := range tc.checkFiles { filePath := filepath.Join(tempDir, fileName) if _, err := os.Stat(filePath); os.IsNotExist(err) { - t.Errorf("Expected file %s was not created", fileName) + assert.Fail(t, "Expected file %s was not created", fileName) continue } // Read and verify file content content, err := os.ReadFile(filePath) //nolint:gosec // Safe: filePath is from controlled test data if err != nil { - t.Errorf("Failed to read dumped file %s: %v", fileName, err) + assert.Fail(t, "Failed to read dumped file %s: %v", fileName, err) continue } // Basic validation: file should have content and CSV header if len(content) == 0 { - t.Errorf("Dumped file %s is empty", fileName) + assert.Fail(t, "Dumped file %s is empty", fileName) } contentStr := string(content) if !strings.Contains(contentStr, "\n") { - t.Errorf("Dumped file %s should contain newlines (header + data)", fileName) + assert.Fail(t, "Dumped file %s should contain newlines (header + data)", fileName) } // For the modified data test, check if new data is present if tc.name == "Modified data dump" && fileName == "sample.csv" { if !strings.Contains(contentStr, "Test User") { - t.Errorf("Modified data not found in dumped file") + assert.Fail(t, "Modified data not found in dumped file") } } } @@ -1002,7 +917,7 @@ func TestDumpDatabaseErrors(t *testing.T) { // Should get "no tables found" error since it's an empty database expectedErrorMsg := "no tables found in database" if err.Error() != expectedErrorMsg { - t.Errorf("expected error message '%s', got: %v", expectedErrorMsg, err) + assert.Fail(t, "expected error message '%s', got: %v", expectedErrorMsg, err) } }) @@ -1011,7 +926,7 @@ func TestDumpDatabaseErrors(t *testing.T) { db, err := Open(filepath.Join("testdata", "sample.csv")) if err != nil { - t.Fatalf("Failed to open database: %v", err) + require.NoError(t, err, "Failed to open database") } defer db.Close() @@ -1042,7 +957,7 @@ func TestDumpDatabaseErrors(t *testing.T) { strings.Contains(errorMsg, "cannot create") if !hasExpectedError { - t.Errorf("expected permission or directory creation error, got: %v", err) + assert.NoError(t, err, "expected permission or directory creation error, got") } }) } @@ -1053,7 +968,7 @@ func TestDumpDatabaseCSVFormat(t *testing.T) { db, err := Open(filepath.Join("testdata", "sample.csv")) if err != nil { - t.Fatalf("Failed to open database: %v", err) + require.NoError(t, err, "Failed to open database") } defer db.Close() @@ -1062,14 +977,14 @@ func TestDumpDatabaseCSVFormat(t *testing.T) { // Dump the database err = DumpDatabase(db, tempDir) if err != nil { - t.Fatalf("DumpDatabase() failed: %v", err) + require.NoError(t, err, "DumpDatabase() failed") } // Read the dumped file dumpedFile := filepath.Join(tempDir, "sample.csv") content, err := os.ReadFile(dumpedFile) //nolint:gosec // Safe: dumpedFile is from controlled test output if err != nil { - t.Fatalf("Failed to read dumped file: %v", err) + require.NoError(t, err, "Failed to read dumped file") } contentStr := string(content) @@ -1077,20 +992,20 @@ func TestDumpDatabaseCSVFormat(t *testing.T) { // Should have header + 3 data rows if len(lines) != 4 { - t.Errorf("Expected 4 lines (header + 3 data), got %d", len(lines)) + assert.Fail(t, "Expected 4 lines (header + 3 data), got %d", len(lines)) } // Check header expectedHeader := "id,name,age,email" if lines[0] != expectedHeader { - t.Errorf("Expected header %q, got %q", expectedHeader, lines[0]) + assert.Fail(t, "Expected header %q, got %q", expectedHeader, lines[0]) } // Check that data rows have the correct number of columns for i, line := range lines[1:] { columns := strings.Split(line, ",") if len(columns) != 4 { - t.Errorf("Data row %d has %d columns, expected 4: %q", i+1, len(columns), line) + assert.Fail(t, "Data row %d has %d columns, expected 4: %q", i+1, len(columns), line) } } } @@ -1101,7 +1016,7 @@ func TestDumpDatabaseSpecialCharacters(t *testing.T) { db, err := Open(filepath.Join("testdata", "sample.csv")) if err != nil { - t.Fatalf("Failed to open database: %v", err) + require.NoError(t, err, "Failed to open database") } defer db.Close() @@ -1111,7 +1026,7 @@ func TestDumpDatabaseSpecialCharacters(t *testing.T) { (11, 'Name "with quotes"', 26, 'test2@example.com'), (12, 'Name' || char(10) || 'with newline', 27, 'test3@example.com')`) if err != nil { - t.Fatalf("Failed to insert test data: %v", err) + require.NoError(t, err, "Failed to insert test data") } tempDir := t.TempDir() @@ -1119,14 +1034,14 @@ func TestDumpDatabaseSpecialCharacters(t *testing.T) { // Dump the database err = DumpDatabase(db, tempDir) if err != nil { - t.Fatalf("DumpDatabase() failed: %v", err) + require.NoError(t, err, "DumpDatabase() failed") } // Read the dumped file dumpedFile := filepath.Join(tempDir, "sample.csv") content, err := os.ReadFile(dumpedFile) //nolint:gosec // Safe: dumpedFile is from controlled test output if err != nil { - t.Fatalf("Failed to read dumped file: %v", err) + require.NoError(t, err, "Failed to read dumped file") } contentStr := string(content) @@ -1152,8 +1067,7 @@ func TestDumpDatabaseSpecialCharacters(t *testing.T) { for _, tc := range testCases { if !strings.Contains(contentStr, tc.shouldFind) { - t.Errorf("CSV escaping test failed: %s - expected to find %q in content", - tc.description, tc.shouldFind) + assert.Contains(t, contentStr, tc.shouldFind, "CSV escaping test failed: %s - expected to find %q in content", tc.description, tc.shouldFind) } } } @@ -1202,20 +1116,21 @@ func TestOpenErrorCases(t *testing.T) { if tt.name == "Empty directory" { emptyDir := filepath.Join("testdata", "empty_dir") if err := os.MkdirAll(emptyDir, 0750); err != nil { - t.Fatalf("Failed to create empty directory: %v", err) + require.NoError(t, err, "Failed to create") } defer os.RemoveAll(emptyDir) } db, err := Open(tt.paths...) - if (err != nil) != tt.wantErr { - t.Errorf("Open() error = %v, wantErr %v", err, tt.wantErr) + if tt.wantErr { + assert.Error(t, err, "Open() should have failed") return } + assert.NoError(t, err, "Open() should have succeeded") if tt.wantErr && err != nil { if !strings.Contains(err.Error(), tt.errorString) { - t.Errorf("Open() error = %v, expected to contain %q", err, tt.errorString) + assert.Fail(t, "Open() error = %v, expected to contain %q", err, tt.errorString) } } @@ -1298,15 +1213,14 @@ func TestOpenContext(t *testing.T) { } db, err := OpenContext(ctx, tt.paths...) - if (err != nil) != tt.wantErr { - t.Errorf("OpenContext() error = %v, wantErr %v", err, tt.wantErr) - return + if tt.wantErr { + assert.Error(t, err, "OpenContext() should have failed") + } else { + assert.NoError(t, err, "OpenContext() should have succeeded") } if tt.wantErr && err != nil && tt.errContains != "" { - if !strings.Contains(err.Error(), tt.errContains) { - t.Errorf("OpenContext() error = %v, expected to contain %q", err, tt.errContains) - } + assert.Contains(t, err.Error(), tt.errContains, "OpenContext() error should contain expected string") } if !tt.wantErr && db != nil { @@ -1314,7 +1228,7 @@ func TestOpenContext(t *testing.T) { // Verify the database is functional if err := db.PingContext(t.Context()); err != nil { - t.Errorf("Failed to ping database after OpenContext: %v", err) + assert.NoError(t, err, "Failed to ping database after OpenContext") } } }) @@ -1359,7 +1273,7 @@ func TestOpenContextConcurrent(t *testing.T) { // Check for any errors for err := range errors { - t.Errorf("Concurrent OpenContext error: %v", err) + assert.NoError(t, err, "Concurrent OpenContext error") } } @@ -1426,37 +1340,21 @@ func Test_FileFormatDetection(t *testing.T) { t.Run(tc.name, func(t *testing.T) { file := newFile(tc.fileName) - if file.getFileType() != tc.expectedType { - t.Errorf("Expected file type %v, got %v", tc.expectedType, file.getFileType()) - } + assert.Equal(t, tc.expectedType, file.getFileType(), "Expected file type %v, got %v", tc.expectedType, file.getFileType()) - if isSupportedFile(tc.fileName) != tc.isSupported { - t.Errorf("Expected supported=%v, got %v", tc.isSupported, isSupportedFile(tc.fileName)) - } + assert.Equal(t, tc.isSupported, isSupportedFile(tc.fileName), "Expected supported=%v, got %v", tc.isSupported, isSupportedFile(tc.fileName)) // Test type-specific methods switch tc.expectedType.baseType() { case FileTypeCSV: - if !file.isCSV() { - t.Errorf("isCSV() should return true for CSV file") - } - if file.isTSV() || file.isLTSV() { - t.Errorf("Type methods should be exclusive") - } + assert.True(t, file.isCSV(), "isCSV() should return true for CSV file") + assert.False(t, file.isTSV() || file.isLTSV(), "Type methods should be exclusive") case FileTypeTSV: - if !file.isTSV() { - t.Errorf("isTSV() should return true for TSV file") - } - if file.isCSV() || file.isLTSV() { - t.Errorf("Type methods should be exclusive") - } + assert.True(t, file.isTSV(), "isTSV() should return true for TSV file") + assert.False(t, file.isCSV() || file.isLTSV(), "Type methods should be exclusive") case FileTypeLTSV: - if !file.isLTSV() { - t.Errorf("isLTSV() should return true for LTSV file") - } - if file.isCSV() || file.isTSV() { - t.Errorf("Type methods should be exclusive") - } + assert.True(t, file.isLTSV(), "isLTSV() should return true for LTSV file") + assert.False(t, file.isCSV() || file.isTSV(), "Type methods should be exclusive") } }) } @@ -1519,7 +1417,7 @@ func Test_TableNameSecurity(t *testing.T) { t.Run(tc.name, func(t *testing.T) { tableName := tableFromFilePath(tc.filePath) if tableName != tc.expectedName { - t.Errorf("Expected table name %q, got %q", tc.expectedName, tableName) + assert.Fail(t, "Expected table name %q, got %q", tc.expectedName, tableName) } }) } @@ -1596,7 +1494,7 @@ func Test_MalformedCSVHandling(t *testing.T) { return } if !tc.expectError && err != nil { - t.Errorf("Unexpected error: %v", err) + assert.NoError(t, err, "Unexpected error") return } @@ -1610,7 +1508,7 @@ func Test_MalformedCSVHandling(t *testing.T) { var count int err = db.QueryRowContext(context.Background(), query).Scan(&count) if err != nil && !tc.expectError { - t.Errorf("Query failed: %v", err) + assert.NoError(t, err, "Query failed") } } }) @@ -1709,17 +1607,13 @@ func Test_ResourceExhaustion(t *testing.T) { _ = tmpFile.Close() // Ignore close error in test cleanup db, err := Open(tmpFile.Name()) - if err != nil { - t.Fatalf("Failed to open file with many columns: %v", err) - } + require.NoError(t, err, "Failed to open file with many columns") defer db.Close() tableName := tableFromFilePath(tmpFile.Name()) var count int err = db.QueryRowContext(context.Background(), fmt.Sprintf("SELECT COUNT(*) FROM [%s]", tableName)).Scan(&count) - if err != nil { - t.Errorf("Failed to query table with many columns: %v", err) - } + assert.NoError(t, err, "Failed to query table with many columns") }) // Test 2: Large number of rows (controlled for test speed) @@ -1757,19 +1651,15 @@ func Test_ResourceExhaustion(t *testing.T) { _ = tmpFile.Close() // Ignore close error in test cleanup db, err := Open(tmpFile.Name()) - if err != nil { - t.Fatalf("Failed to open file with many rows: %v", err) - } + require.NoError(t, err, "Failed to open file with many rows") defer db.Close() tableName := tableFromFilePath(tmpFile.Name()) var count int err = db.QueryRowContext(context.Background(), fmt.Sprintf("SELECT COUNT(*) FROM [%s]", tableName)).Scan(&count) - if err != nil { - t.Errorf("Failed to query table with many rows: %v", err) - } + assert.NoError(t, err, "Failed to query table with many rows") if count != 10000 { - t.Errorf("Expected 10000 rows, got %d", count) + assert.Fail(t, "Expected 10000 rows, got %d", count) } }) } @@ -1888,9 +1778,7 @@ func Test_UnicodeAndEncoding(t *testing.T) { _ = tmpFile.Close() // Ignore close error in test cleanup db, err := Open(tmpFile.Name()) - if err != nil { - t.Fatalf("Failed to open Unicode file: %v", err) - } + require.NoError(t, err, "Failed to open Unicode file") defer db.Close() tableName := tableFromFilePath(tmpFile.Name()) @@ -1898,29 +1786,21 @@ func Test_UnicodeAndEncoding(t *testing.T) { // Test basic query var count int err = db.QueryRowContext(context.Background(), fmt.Sprintf("SELECT COUNT(*) FROM [%s]", tableName)).Scan(&count) - if err != nil { - t.Errorf("Failed to query Unicode table: %v", err) - } + assert.NoError(t, err, "Failed to query Unicode table") // Test data retrieval rows, err := db.QueryContext(context.Background(), fmt.Sprintf("SELECT * FROM [%s] LIMIT 1", tableName)) - if err != nil { - t.Errorf("Failed to select from Unicode table: %v", err) - return - } + assert.NoError(t, err, "Failed to select from Unicode table") defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error: %v", err) + assert.NoError(t, err, "Rows error") return } if rows.Next() { columns, err := rows.Columns() - if err != nil { - t.Errorf("Failed to get columns: %v", err) - return - } + assert.NoError(t, err, "Failed to get columns") values := make([]interface{}, len(columns)) valuePtrs := make([]interface{}, len(columns)) @@ -1929,7 +1809,7 @@ func Test_UnicodeAndEncoding(t *testing.T) { } if err := rows.Scan(valuePtrs...); err != nil { - t.Errorf("Failed to scan Unicode data: %v", err) + assert.NoError(t, err, "Failed to scan Unicode data") } } }) @@ -1956,7 +1836,7 @@ func Test_ConnectionLifecycle(t *testing.T) { for i := range 100 { db, err := Open(tmpFile.Name()) if err != nil { - t.Fatalf("Failed to open database on iteration %d: %v", i, err) + require.NoError(t, err, "Failed to open database on iteration %d", i) } tableName := tableFromFilePath(tmpFile.Name()) @@ -1964,11 +1844,11 @@ func Test_ConnectionLifecycle(t *testing.T) { err = db.QueryRowContext(context.Background(), fmt.Sprintf("SELECT COUNT(*) FROM [%s]", tableName)).Scan(&count) if err != nil { _ = db.Close() // Ignore close error in test cleanup - t.Fatalf("Query failed on iteration %d: %v", i, err) + require.NoError(t, err, "Query failed on iteration %d", i) } if err := db.Close(); err != nil { - t.Fatalf("Close failed on iteration %d: %v", i, err) + require.NoError(t, err, "Close failed on iteration %d", i) } } }) @@ -1988,9 +1868,7 @@ func Test_ConnectionLifecycle(t *testing.T) { query := "SELECT COUNT(*) FROM [" + tableName + "]" var count int err = db.QueryRowContext(ctx, query).Scan(&count) - if err != nil { - t.Errorf("Query with context failed: %v", err) - } + assert.NoError(t, err, "Query with context failed") }) t.Run("Double close safety", func(t *testing.T) { @@ -2001,12 +1879,12 @@ func Test_ConnectionLifecycle(t *testing.T) { // First close if err := db.Close(); err != nil { - t.Errorf("First close failed: %v", err) + assert.NoError(t, err, "First close failed") } // Second close should not panic or error if err := db.Close(); err != nil { - t.Errorf("Second close failed: %v", err) + assert.NoError(t, err, "Second close failed") } }) } @@ -2097,13 +1975,13 @@ func Test_SQLReservedWordsAsFilenames(t *testing.T) { // Create CSV file with reserved word as filename csvContent := "id,name,value\n1,test1,100\n2,test2,200\n3,test3,300" if err := os.WriteFile(filePath, []byte(csvContent), 0600); err != nil { - t.Fatalf("Failed to create test file %s: %v", rw.filename, err) + require.NoError(t, err, "Failed to create test file %s", rw.filename) } // Test 1: Open file and verify table creation db, err := Open(filePath) if err != nil { - t.Fatalf("Failed to open file with reserved word filename %s: %v", rw.filename, err) + require.NoError(t, err, "Failed to open file with reserved word filename %s", rw.filename) } defer db.Close() @@ -2112,11 +1990,11 @@ func Test_SQLReservedWordsAsFilenames(t *testing.T) { var actualTableName string err = db.QueryRowContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name = ?", expectedTableName).Scan(&actualTableName) if err != nil { - t.Fatalf("Table for reserved word filename %s not found: %v", rw.filename, err) + require.NoError(t, err, "Table for reserved word filename %s not found", rw.filename) } if actualTableName != expectedTableName { - t.Errorf("Expected table name %q, got %q for file %s", expectedTableName, actualTableName, rw.filename) + assert.Fail(t, "Expected table name %q, got %q for file %s", expectedTableName, actualTableName, rw.filename) } // Test 3: Query the table using bracket notation (safe for reserved words) @@ -2125,11 +2003,11 @@ func Test_SQLReservedWordsAsFilenames(t *testing.T) { var count int err = db.QueryRowContext(context.Background(), query).Scan(&count) if err != nil { - t.Errorf("Failed to query table with reserved word name [%s]: %v", expectedTableName, err) + assert.Fail(t, "Failed to query table with reserved word name [%s]: %v", expectedTableName, err) } if count != 3 { - t.Errorf("Expected 3 rows in table [%s], got %d", expectedTableName, count) + assert.Fail(t, "Expected 3 rows in table [%s], got %d", expectedTableName, count) } // Test 4: Verify we can select specific data @@ -2137,11 +2015,11 @@ func Test_SQLReservedWordsAsFilenames(t *testing.T) { var name string err = db.QueryRowContext(context.Background(), query).Scan(&name) if err != nil { - t.Errorf("Failed to select specific data from table [%s]: %v", expectedTableName, err) + assert.Fail(t, "Failed to select specific data from table [%s]: %v", expectedTableName, err) } if name != "test1" { - t.Errorf("Expected 'test1', got %q from table [%s]", name, expectedTableName) + assert.Fail(t, "Expected 'test1', got %q from table [%s]", name, expectedTableName) } // Test 5: Verify we can perform complex queries @@ -2149,12 +2027,12 @@ func Test_SQLReservedWordsAsFilenames(t *testing.T) { var avgValue float64 err = db.QueryRowContext(context.Background(), query).Scan(&avgValue) if err != nil { - t.Errorf("Failed to perform aggregate query on table [%s]: %v", expectedTableName, err) + assert.Fail(t, "Failed to perform aggregate query on table [%s]: %v", expectedTableName, err) } expectedAvg := 250.0 // (200 + 300) / 2 = 500 / 2 = 250 if avgValue != expectedAvg { - t.Errorf("Expected average %.1f, got %.1f for table [%s]", expectedAvg, avgValue, expectedTableName) + assert.Fail(t, "Expected average %.1f, got %.1f for table [%s]", expectedAvg, avgValue, expectedTableName) } }) } @@ -2193,15 +2071,13 @@ func Test_SQLReservedWordsMultipleFiles(t *testing.T) { for _, file := range files { filePath := filepath.Join(tmpDir, file.name) if err := os.WriteFile(filePath, []byte(file.content), 0600); err != nil { - t.Fatalf("Failed to create file %s: %v", file.name, err) + require.NoError(t, err, "Failed to create file %s", file.name) } } // Test 1: Load all files from directory db, err := Open(tmpDir) - if err != nil { - t.Fatalf("Failed to open directory with reserved word files: %v", err) - } + require.NoError(t, err, "Failed to open directory with reserved word files") defer db.Close() // Test 2: Verify all tables exist @@ -2210,7 +2086,7 @@ func Test_SQLReservedWordsMultipleFiles(t *testing.T) { var name string err := db.QueryRowContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name = ?", tableName).Scan(&name) if err != nil { - t.Errorf("Table for reserved word file %s not found: %v", file.name, err) + assert.Fail(t, "Table for reserved word file %s not found: %v", file.name, err) continue } @@ -2220,11 +2096,11 @@ func Test_SQLReservedWordsMultipleFiles(t *testing.T) { query := "SELECT COUNT(*) FROM [" + tableName + "]" err = db.QueryRowContext(context.Background(), query).Scan(&count) if err != nil { - t.Errorf("Failed to query reserved word table [%s]: %v", tableName, err) + assert.Fail(t, "Failed to query reserved word table [%s]: %v", tableName, err) } if count != 2 { - t.Errorf("Expected 2 rows in table [%s], got %d", tableName, count) + assert.Fail(t, "Expected 2 rows in table [%s], got %d", tableName, count) } } @@ -2240,9 +2116,7 @@ func Test_SQLReservedWordsMultipleFiles(t *testing.T) { var queryType, tableName, condition, joinType string err = db.QueryRowContext(context.Background(), query).Scan(&queryType, &tableName, &condition, &joinType) - if err != nil { - t.Errorf("Failed to perform cross-table query with reserved word tables: %v", err) - } + assert.NoError(t, err, "Failed to perform cross-table query with reserved word tables") // Verify results expectedValues := map[string]string{ @@ -2261,7 +2135,7 @@ func Test_SQLReservedWordsMultipleFiles(t *testing.T) { for field, expected := range expectedValues { if actual := actualValues[field]; actual != expected { - t.Errorf("Expected %s=%q, got %q", field, expected, actual) + assert.Fail(t, "Expected %s=%q, got %q", field, expected, actual) } } } @@ -2330,13 +2204,13 @@ func Test_SQLReservedWordsEdgeCases(t *testing.T) { // Create test file csvContent := "id,data\n1,value1\n2,value2" if err := os.WriteFile(filePath, []byte(csvContent), 0600); err != nil { - t.Fatalf("Failed to create test file %s: %v", tc.filename, err) + require.NoError(t, err, "Failed to create test file %s", tc.filename) } // Test opening the file db, err := Open(filePath) if tc.expectError && err == nil { - t.Errorf("Expected error for %s but got none", tc.description) + assert.Fail(t, "Expected error for %s but got none", tc.description) if db != nil { _ = db.Close() // Ignore close error in test cleanup } @@ -2344,7 +2218,7 @@ func Test_SQLReservedWordsEdgeCases(t *testing.T) { } if !tc.expectError && err != nil { - t.Errorf("Unexpected error for %s: %v", tc.description, err) + assert.NoError(t, err, "Unexpected error for %s", tc.description) return } @@ -2358,7 +2232,7 @@ func Test_SQLReservedWordsEdgeCases(t *testing.T) { var name string err := db.QueryRowContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name = ?", tableName).Scan(&name) if err != nil { - t.Errorf("Table not found for %s: %v", tc.description, err) + assert.Fail(t, "Table not found for %s: %v", tc.description, err) return } @@ -2368,12 +2242,12 @@ func Test_SQLReservedWordsEdgeCases(t *testing.T) { var count int err = db.QueryRowContext(context.Background(), query).Scan(&count) if err != nil { - t.Errorf("Failed to query table for %s: %v", tc.description, err) + assert.Fail(t, "Failed to query table for %s: %v", tc.description, err) return } if count != 2 { - t.Errorf("Expected 2 rows for %s, got %d", tc.description, count) + assert.Fail(t, "Expected 2 rows for %s, got %d", tc.description, count) } // Test more complex operations @@ -2381,17 +2255,17 @@ func Test_SQLReservedWordsEdgeCases(t *testing.T) { insertQuery := "INSERT INTO [" + tableName + "] (id, data) VALUES (3, 'value3')" //nolint:gosec // Safe: tableName is from controlled test data _, err = db.ExecContext(context.Background(), insertQuery) if err != nil { - t.Errorf("Failed to insert into table for %s: %v", tc.description, err) + assert.Fail(t, "Failed to insert into table for %s: %v", tc.description, err) } // Verify insert worked err = db.QueryRowContext(context.Background(), query).Scan(&count) if err != nil { - t.Errorf("Failed to verify insert for %s: %v", tc.description, err) + assert.Fail(t, "Failed to verify insert for %s: %v", tc.description, err) } if count != 3 { - t.Errorf("Expected 3 rows after insert for %s, got %d", tc.description, count) + assert.Fail(t, "Expected 3 rows after insert for %s, got %d", tc.description, count) } } }) @@ -2484,7 +2358,7 @@ func Test_ErrorMessageQuality(t *testing.T) { _, err := Open(filePath) if err == nil { - t.Errorf("Expected error but got none for %s", tc.description) + assert.Fail(t, "Expected error but got none for %s", tc.description) return } @@ -2498,8 +2372,7 @@ func Test_ErrorMessageQuality(t *testing.T) { } if !foundExpected { - t.Errorf("Error message %q should contain one of %v for %s", - errorMsg, tc.expectedErrors, tc.description) + assert.Fail(t, "Error message %q should contain one of %v for %s", errorMsg, tc.expectedErrors, tc.description) } }) } @@ -2522,9 +2395,7 @@ func Test_TableCreationEdgeCases(t *testing.T) { _ = tmpFile.Close() // Ignore close error in test cleanup db, err := Open(tmpFile.Name()) - if err != nil { - t.Fatalf("Failed to open file with reserved keywords: %v", err) - } + require.NoError(t, err, "Failed to open file with reserved keywords") defer db.Close() tableName := tableFromFilePath(tmpFile.Name()) @@ -2533,21 +2404,18 @@ func Test_TableCreationEdgeCases(t *testing.T) { // Use bracket notation for table name (safe in controlled test environment) query := "SELECT [select], [from], [where] FROM [" + tableName + "]" //nolint:gosec // Safe: tableName is from controlled test data rows, err := db.QueryContext(context.Background(), query) - if err != nil { - t.Errorf("Failed to query table with reserved keyword columns: %v", err) - return - } + assert.NoError(t, err, "Failed to query table with reserved keyword columns") defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error: %v", err) + assert.NoError(t, err, "Rows error") return } if rows.Next() { var col1, col2, col3 string if err := rows.Scan(&col1, &col2, &col3); err != nil { - t.Errorf("Failed to scan reserved keyword columns: %v", err) + assert.NoError(t, err, "Failed to scan reserved keyword columns") } } }) @@ -2574,7 +2442,7 @@ func Test_TableCreationEdgeCases(t *testing.T) { db, err := Open(tmpFile.Name()) if err != nil { - t.Errorf("Failed to open file %s: %v", pattern, err) + assert.Fail(t, "Failed to open file %s: %v", pattern, err) return } defer db.Close() @@ -2584,7 +2452,7 @@ func Test_TableCreationEdgeCases(t *testing.T) { query := "SELECT COUNT(*) FROM [" + tableName + "]" var count int if err := db.QueryRowContext(context.Background(), query).Scan(&count); err != nil { - t.Errorf("Failed to query table from file %s: %v", pattern, err) + assert.Fail(t, "Failed to query table from file %s: %v", pattern, err) } }) } @@ -2613,53 +2481,41 @@ func Test_TableCreationEdgeCases(t *testing.T) { // Test transaction rollback tx, err := db.BeginTx(context.Background(), nil) - if err != nil { - t.Fatalf("Failed to begin transaction: %v", err) - } + require.NoError(t, err, "Failed to begin transaction") // Insert data in transaction _, err = tx.ExecContext(context.Background(), fmt.Sprintf("INSERT INTO [%s] (id, name) VALUES (2, 'transaction')", tableName)) - if err != nil { - t.Errorf("Failed to insert in transaction: %v", err) - } + assert.NoError(t, err, "Failed to insert in transaction") // Rollback if err := tx.Rollback(); err != nil { - t.Errorf("Failed to rollback transaction: %v", err) + assert.NoError(t, err, "Failed to rollback transaction") } // Verify data was rolled back var count int err = db.QueryRowContext(context.Background(), fmt.Sprintf("SELECT COUNT(*) FROM [%s]", tableName)).Scan(&count) - if err != nil { - t.Errorf("Failed to count after rollback: %v", err) - } + assert.NoError(t, err, "Failed to count after rollback") if count != 1 { - t.Errorf("Expected 1 row after rollback, got %d", count) + assert.Fail(t, "Expected 1 row after rollback, got %d", count) } // Test transaction commit tx, err = db.BeginTx(context.Background(), nil) - if err != nil { - t.Fatalf("Failed to begin second transaction: %v", err) - } + require.NoError(t, err, "Failed to begin second transaction") _, err = tx.ExecContext(context.Background(), fmt.Sprintf("INSERT INTO [%s] (id, name) VALUES (2, 'committed')", tableName)) - if err != nil { - t.Errorf("Failed to insert in second transaction: %v", err) - } + assert.NoError(t, err, "Failed to insert in second transaction") if err := tx.Commit(); err != nil { - t.Errorf("Failed to commit transaction: %v", err) + assert.NoError(t, err, "Failed to commit transaction") } // Verify data was committed err = db.QueryRowContext(context.Background(), fmt.Sprintf("SELECT COUNT(*) FROM [%s]", tableName)).Scan(&count) - if err != nil { - t.Errorf("Failed to count after commit: %v", err) - } + assert.NoError(t, err, "Failed to count after commit") if count != 2 { - t.Errorf("Expected 2 rows after commit, got %d", count) + assert.Fail(t, "Expected 2 rows after commit, got %d", count) } }) } @@ -2732,7 +2588,7 @@ func TestComprehensiveFileFormats(t *testing.T) { // Open database with single file db, err := Open(filePath) if err != nil { - t.Fatalf("Open(%s) failed: %v", filePath, err) + require.NoError(t, err, "Open(%s) failed", filePath) } defer db.Close() @@ -2740,31 +2596,29 @@ func TestComprehensiveFileFormats(t *testing.T) { var tableName string err = db.QueryRowContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name = ?", tc.expectTable).Scan(&tableName) if err != nil { - t.Fatalf("Table %s not found: %v", tc.expectTable, err) + require.NoError(t, err, "Table %s not found", tc.expectTable) } // Count rows var count int err = db.QueryRowContext(context.Background(), "SELECT COUNT(*) FROM ["+tc.expectTable+"]").Scan(&count) if err != nil { - t.Fatalf("Failed to count rows in %s: %v", tc.expectTable, err) + require.NoError(t, err, "Failed to count rows in %s", tc.expectTable) } if count != tc.expectRows { - t.Errorf("Expected %d rows in %s, got %d", tc.expectRows, tc.expectTable, count) + assert.Fail(t, "Expected %d rows in %s, got %d", tc.expectRows, tc.expectTable, count) } // Test basic SELECT // Use bracket notation for table name (safe in controlled test environment) query := "SELECT * FROM [" + tc.expectTable + "] LIMIT 1" //nolint:gosec // Safe: tc.expectTable is from controlled test data rows, err := db.QueryContext(context.Background(), query) - if err != nil { - t.Fatalf("SELECT query failed: %v", err) - } + require.NoError(t, err, "SELECT query failed") defer rows.Close() if err := rows.Err(); err != nil { - t.Fatalf("Rows error: %v", err) + require.NoError(t, err, "Rows error") } if !rows.Next() { @@ -2780,27 +2634,23 @@ func TestDirectoryLoading(t *testing.T) { // Open database with directory path db, err := Open("testdata") - if err != nil { - t.Fatalf("Open(testdata) failed: %v", err) - } + require.NoError(t, err, "Open(testdata) failed") defer db.Close() // Get all table names rows, err := db.QueryContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name") - if err != nil { - t.Fatalf("Failed to get table names: %v", err) - } + require.NoError(t, err, "Failed to get table names") defer rows.Close() if err := rows.Err(); err != nil { - t.Fatalf("Rows error: %v", err) + require.NoError(t, err, "Rows error") } var tables []string for rows.Next() { var tableName string if err := rows.Scan(&tableName); err != nil { - t.Fatalf("Failed to scan table name: %v", err) + require.NoError(t, err, "Failed to scan table name") } tables = append(tables, tableName) } @@ -2816,16 +2666,14 @@ func TestDirectoryLoading(t *testing.T) { } } if !found { - t.Errorf("Expected table %s not found in tables: %v", expected, tables) + assert.Fail(t, "Expected table %s not found in tables: %v", expected, tables) } } // Test cross-table query var count int err = db.QueryRowContext(context.Background(), "SELECT COUNT(*) FROM sample s JOIN products p ON s.id = p.id").Scan(&count) - if err != nil { - t.Fatalf("Cross-table JOIN query failed: %v", err) - } + require.NoError(t, err, "Cross-table JOIN query failed") if count == 0 { t.Error("Expected at least one matching row in JOIN query") @@ -2838,9 +2686,7 @@ func TestMultipleFilePaths(t *testing.T) { // Open database with multiple files db, err := Open(filepath.Join("testdata", "sample.csv"), filepath.Join("testdata", "products.tsv"), filepath.Join("testdata", "logs.ltsv")) - if err != nil { - t.Fatalf("Open with multiple files failed: %v", err) - } + require.NoError(t, err, "Open with multiple files failed") defer db.Close() // Verify all expected tables exist @@ -2849,7 +2695,7 @@ func TestMultipleFilePaths(t *testing.T) { var name string err := db.QueryRowContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name = ?", tableName).Scan(&name) if err != nil { - t.Errorf("Table %s not found: %v", tableName, err) + assert.Fail(t, "Table %s not found: %v", tableName, err) } } @@ -2863,20 +2709,18 @@ func TestMultipleFilePaths(t *testing.T) { ` rows, err := db.QueryContext(context.Background(), query) - if err != nil { - t.Fatalf("Multi-table query failed: %v", err) - } + require.NoError(t, err, "Multi-table query failed") defer rows.Close() if err := rows.Err(); err != nil { - t.Fatalf("Rows error: %v", err) + require.NoError(t, err, "Rows error") } // Just verify we can execute the query without error for rows.Next() { var name, productName, level string if err := rows.Scan(&name, &productName, &level); err != nil { - t.Fatalf("Failed to scan multi-table query result: %v", err) + require.NoError(t, err, "Failed to scan multi-table query result") } } } @@ -2886,9 +2730,7 @@ func TestCTEQueries(t *testing.T) { t.Parallel() db, err := Open(filepath.Join("testdata", "sample.csv"), filepath.Join("testdata", "products.tsv")) - if err != nil { - t.Fatalf("Open failed: %v", err) - } + require.NoError(t, err, "Open failed") defer db.Close() testCases := []struct { @@ -2953,12 +2795,12 @@ func TestCTEQueries(t *testing.T) { rows, err := db.QueryContext(context.Background(), tc.query) if err != nil { - t.Fatalf("CTE query failed: %v\nQuery: %s", err, tc.query) + require.Fail(t, "CTE query failed: %v\nQuery: %s", err, tc.query) } defer rows.Close() if err := rows.Err(); err != nil { - t.Fatalf("Rows error: %v", err) + require.NoError(t, err, "Rows error") } // Verify we can read results @@ -2967,16 +2809,14 @@ func TestCTEQueries(t *testing.T) { hasRows = true // Get column count to scan appropriately cols, err := rows.Columns() - if err != nil { - t.Fatalf("Failed to get columns: %v", err) - } + require.NoError(t, err, "Failed to get columns") values := make([]interface{}, len(cols)) for i := range values { values[i] = new(interface{}) } if err := rows.Scan(values...); err != nil { - t.Fatalf("Failed to scan CTE query result: %v", err) + require.NoError(t, err, "Failed to scan CTE query result") } } @@ -2996,39 +2836,31 @@ func TestMixedDirectoryAndFiles(t *testing.T) { content := "id,category,value\n1,A,100\n2,B,200\n" if err := os.WriteFile(tempFile, []byte(content), 0600); err != nil { - t.Fatalf("Failed to create temp test file: %v", err) + require.NoError(t, err, "Failed to create") } defer os.Remove(tempFile) // Open with mixed paths: directory + specific file db, err := Open("testdata", tempFile) - if err != nil { - t.Fatalf("Open with mixed paths failed: %v", err) - } + require.NoError(t, err, "Open with mixed paths failed") defer db.Close() // Verify the temp file table exists var tableName string err = db.QueryRowContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name = ?", "mixed_test").Scan(&tableName) - if err != nil { - t.Fatalf("Table mixed_test not found: %v", err) - } + require.NoError(t, err, "Table mixed_test not found") // Verify original directory tables also exist err = db.QueryRowContext(context.Background(), "SELECT name FROM sqlite_master WHERE type='table' AND name = ?", "sample").Scan(&tableName) - if err != nil { - t.Fatalf("Table sample from directory not found: %v", err) - } + require.NoError(t, err, "Table sample from directory not found") // Test query across mixed sources var count int err = db.QueryRowContext(context.Background(), "SELECT COUNT(*) FROM mixed_test").Scan(&count) - if err != nil { - t.Fatalf("Query on mixed_test table failed: %v", err) - } + require.NoError(t, err, "Query on mixed_test table failed") if count != 2 { - t.Errorf("Expected 2 rows in mixed_test, got %d", count) + assert.Fail(t, "Expected 2 rows in mixed_test, got %d", count) } } @@ -3061,7 +2893,7 @@ func TestErrorCases(t *testing.T) { // Create unsupported file for test unsupportedFile := filepath.Join("testdata", "unsupported.txt") if err := os.WriteFile(unsupportedFile, []byte("test content"), 0600); err != nil { - t.Fatalf("Failed to create unsupported test file: %v", err) + require.NoError(t, err, "Failed to create") } defer os.Remove(unsupportedFile) @@ -3074,11 +2906,11 @@ func TestErrorCases(t *testing.T) { if db != nil { _ = db.Close() // Ignore close error in test cleanup } - t.Fatalf("Expected error containing '%s', but got nil", tc.expectError) + require.Fail(t, "Expected error containing '%s', but got nil", tc.expectError) } if !strings.Contains(err.Error(), tc.expectError) { - t.Errorf("Expected error containing '%s', got: %s", tc.expectError, err.Error()) + assert.Fail(t, "Expected error containing '%s', got: %s", tc.expectError, err.Error()) } }) } @@ -3093,30 +2925,28 @@ func TestSQLiteDumpFunctions(t *testing.T) { // Create a direct SQLite connection db, err := sql.Open("sqlite", ":memory:") if err != nil { - t.Fatalf("Failed to create SQLite connection: %v", err) + require.NoError(t, err, "Failed to create") } defer db.Close() // Create test tables _, err = db.ExecContext(context.Background(), "CREATE TABLE test1 (id INTEGER, name TEXT)") if err != nil { - t.Fatalf("Failed to create test table 1: %v", err) + require.NoError(t, err, "Failed to create") } _, err = db.ExecContext(context.Background(), "CREATE TABLE test2 (id INTEGER, value TEXT)") if err != nil { - t.Fatalf("Failed to create test table 2: %v", err) + require.NoError(t, err, "Failed to create") } // Test getSQLiteTableNames tableNames, err := getSQLiteTableNames(db) - if err != nil { - t.Fatalf("getSQLiteTableNames failed: %v", err) - } + require.NoError(t, err, "getSQLiteTableNames failed") expectedTables := []string{"test1", "test2"} if len(tableNames) != len(expectedTables) { - t.Errorf("Expected %d tables, got %d: %v", len(expectedTables), len(tableNames), tableNames) + assert.Fail(t, "Expected %d tables, got %d: %v", len(expectedTables), len(tableNames), tableNames) } // Verify table names @@ -3129,7 +2959,7 @@ func TestSQLiteDumpFunctions(t *testing.T) { } } if !found { - t.Errorf("Expected table %s not found in %v", expected, tableNames) + assert.Fail(t, "Expected table %s not found in %v", expected, tableNames) } } }) @@ -3140,31 +2970,29 @@ func TestSQLiteDumpFunctions(t *testing.T) { // Create a direct SQLite connection db, err := sql.Open("sqlite", ":memory:") if err != nil { - t.Fatalf("Failed to create SQLite connection: %v", err) + require.NoError(t, err, "Failed to create") } defer db.Close() // Create test table with known columns _, err = db.ExecContext(context.Background(), "CREATE TABLE test_table (id INTEGER PRIMARY KEY, name TEXT NOT NULL, age INTEGER, salary REAL)") if err != nil { - t.Fatalf("Failed to create test table: %v", err) + require.NoError(t, err, "Failed to create") } // Test getSQLiteTableColumns columns, err := getSQLiteTableColumns(db, "test_table") - if err != nil { - t.Fatalf("getSQLiteTableColumns failed: %v", err) - } + require.NoError(t, err, "getSQLiteTableColumns failed") expectedColumns := []string{"id", "name", "age", "salary"} if len(columns) != len(expectedColumns) { - t.Errorf("Expected %d columns, got %d: %v", len(expectedColumns), len(columns), columns) + assert.Fail(t, "Expected %d columns, got %d: %v", len(expectedColumns), len(columns), columns) } // Verify column names for i, expected := range expectedColumns { if i >= len(columns) || columns[i] != expected { - t.Errorf("Expected column %s at index %d, got %s", expected, i, columns[i]) + assert.Fail(t, "Expected column %s at index %d, got %s", expected, i, columns[i]) } } }) @@ -3175,19 +3003,19 @@ func TestSQLiteDumpFunctions(t *testing.T) { // Create a direct SQLite connection db, err := sql.Open("sqlite", ":memory:") if err != nil { - t.Fatalf("Failed to create SQLite connection: %v", err) + require.NoError(t, err, "Failed to create") } defer db.Close() // Create test table and insert data _, err = db.ExecContext(context.Background(), "CREATE TABLE employees (id INTEGER, name TEXT, department TEXT)") if err != nil { - t.Fatalf("Failed to create test table: %v", err) + require.NoError(t, err, "Failed to create") } _, err = db.ExecContext(context.Background(), "INSERT INTO employees VALUES (1, 'Alice', 'Engineering'), (2, 'Bob', 'Marketing'), (3, 'Charlie', 'Sales')") if err != nil { - t.Fatalf("Failed to insert test data: %v", err) + require.NoError(t, err, "Failed to insert test data") } // Test dump to directory @@ -3195,15 +3023,13 @@ func TestSQLiteDumpFunctions(t *testing.T) { options := NewDumpOptions() err = dumpSQLiteDatabase(db, tempDir, options) - if err != nil { - t.Fatalf("dumpSQLiteDatabase failed: %v", err) - } + require.NoError(t, err, "dumpSQLiteDatabase failed") // Verify file was created dumpedFile := filepath.Join(tempDir, "employees.csv") content, err := os.ReadFile(dumpedFile) //nolint:gosec // dumpedFile is created in test with controlled path if err != nil { - t.Fatalf("Failed to read dumped file: %v", err) + require.NoError(t, err, "Failed to read dumped file") } contentStr := string(content) @@ -3211,19 +3037,19 @@ func TestSQLiteDumpFunctions(t *testing.T) { // Should have header + 3 data rows if len(lines) != 4 { - t.Errorf("Expected 4 lines (header + 3 data), got %d", len(lines)) + assert.Fail(t, "Expected 4 lines (header + 3 data), got %d", len(lines)) } // Check header if lines[0] != "id,name,department" { - t.Errorf("Expected header 'id,name,department', got '%s'", lines[0]) + assert.Fail(t, "Expected header 'id,name,department', got '%s'", lines[0]) } // Check data rows contain expected values expectedDataPatterns := []string{"1,Alice,Engineering", "2,Bob,Marketing", "3,Charlie,Sales"} for i, expected := range expectedDataPatterns { if lines[i+1] != expected { - t.Errorf("Expected line %d to be '%s', got '%s'", i+1, expected, lines[i+1]) + assert.Fail(t, "Expected line %d to be '%s', got '%s'", i+1, expected, lines[i+1]) } } }) @@ -3236,35 +3062,31 @@ func TestSQLiteDumpFunctions(t *testing.T) { t.Run("no compression", func(t *testing.T) { file, err := os.Create(filepath.Join(tempDir, "test.txt")) //nolint:gosec // tempDir is created in test if err != nil { - t.Fatalf("Failed to create test file: %v", err) + require.NoError(t, err, "Failed to create") } defer file.Close() writer, closeWriter, err := createCompressedWriter(file, CompressionNone) - if err != nil { - t.Fatalf("createCompressedWriter failed: %v", err) - } + require.NoError(t, err, "createCompressedWriter failed") if writer != file { t.Error("Expected writer to be the same as file for no compression") } if err := closeWriter(); err != nil { - t.Errorf("closeWriter failed: %v", err) + assert.NoError(t, err, "closeWriter failed") } }) t.Run("gzip compression", func(t *testing.T) { file, err := os.Create(filepath.Join(tempDir, "test.gz")) //nolint:gosec // tempDir is created in test if err != nil { - t.Fatalf("Failed to create test file: %v", err) + require.NoError(t, err, "Failed to create") } defer file.Close() writer, closeWriter, err := createCompressedWriter(file, CompressionGZ) - if err != nil { - t.Fatalf("createCompressedWriter failed for gzip: %v", err) - } + require.NoError(t, err, "createCompressedWriter failed for gzip") if writer == file { t.Error("Expected writer to be different from file for gzip compression") @@ -3273,22 +3095,20 @@ func TestSQLiteDumpFunctions(t *testing.T) { // Write some test data testData := "test,data\n1,hello\n2,world\n" n, err := writer.Write([]byte(testData)) - if err != nil { - t.Fatalf("Failed to write to compressed writer: %v", err) - } + require.NoError(t, err, "Failed to write to compressed writer") if n != len(testData) { - t.Errorf("Expected to write %d bytes, wrote %d", len(testData), n) + assert.Fail(t, "Expected to write %d bytes, wrote %d", len(testData), n) } if err := closeWriter(); err != nil { - t.Errorf("closeWriter failed: %v", err) + assert.NoError(t, err, "closeWriter failed") } }) t.Run("bzip2 compression should error", func(t *testing.T) { file, err := os.Create(filepath.Join(tempDir, "test.bz2")) //nolint:gosec // tempDir is created in test if err != nil { - t.Fatalf("Failed to create test file: %v", err) + require.NoError(t, err, "Failed to create") } defer file.Close() @@ -3299,7 +3119,7 @@ func TestSQLiteDumpFunctions(t *testing.T) { expectedError := "bzip2 compression is not supported for writing" if err.Error() != expectedError { - t.Errorf("Expected error '%s', got '%s'", expectedError, err.Error()) + assert.Fail(t, "Expected error '%s', got '%s'", expectedError, err.Error()) } }) }) @@ -3328,37 +3148,29 @@ func TestParquetReadWriteIntegration(t *testing.T) { // Open CSV file and load into database db, err := Open(csvFile) - if err != nil { - t.Fatalf("Failed to open CSV file: %v", err) - } + require.NoError(t, err, "Failed to open CSV file") defer db.Close() // Export to Parquet format parquetOutputDir := filepath.Join(tempDir, "parquet_output") options := NewDumpOptions().WithFormat(OutputFormatParquet) err = DumpDatabase(db, parquetOutputDir, options) - if err != nil { - t.Fatalf("Failed to dump to Parquet: %v", err) - } + require.NoError(t, err, "Failed to dump to Parquet") // Verify Parquet file was created parquetFile := filepath.Join(parquetOutputDir, "test.parquet") if _, err := os.Stat(parquetFile); os.IsNotExist(err) { - t.Fatalf("Parquet file was not created: %s", parquetFile) + require.Fail(t, "Parquet file was not created: %s", parquetFile) } // Read back the Parquet file db2, err := Open(parquetFile) - if err != nil { - t.Fatalf("Failed to open Parquet file: %v", err) - } + require.NoError(t, err, "Failed to open Parquet file") defer db2.Close() // Verify data is correct rows, err := db2.QueryContext(context.Background(), "SELECT id, name, age, email FROM test ORDER BY id") - if err != nil { - t.Fatalf("Failed to query Parquet data: %v", err) - } + require.NoError(t, err, "Failed to query Parquet data") defer rows.Close() expectedData := [][]string{ @@ -3371,27 +3183,25 @@ func TestParquetReadWriteIntegration(t *testing.T) { for rows.Next() { var id, name, age, email string if err := rows.Scan(&id, &name, &age, &email); err != nil { - t.Fatalf("Failed to scan row: %v", err) + require.NoError(t, err, "Failed to scan row") } actualData = append(actualData, []string{id, name, age, email}) } if err := rows.Err(); err != nil { - t.Fatalf("Error during row iteration: %v", err) + require.NoError(t, err, "Error during row iteration") } - if len(actualData) != len(expectedData) { - t.Fatalf("Expected %d rows, got %d", len(expectedData), len(actualData)) - } + require.Equal(t, len(expectedData), len(actualData), "Expected %d rows, got %d", len(expectedData), len(actualData)) for i, expected := range expectedData { if len(actualData[i]) != len(expected) { - t.Errorf("Row %d: expected %d columns, got %d", i, len(expected), len(actualData[i])) + assert.Fail(t, "Row %d: expected %d columns, got %d", i, len(expected), len(actualData[i])) continue } for j, expectedVal := range expected { if actualData[i][j] != expectedVal { - t.Errorf("Row %d, column %d: expected %s, got %s", i, j, expectedVal, actualData[i][j]) + assert.Fail(t, "Row %d, column %d: expected %s, got %s", i, j, expectedVal, actualData[i][j]) } } } @@ -3417,9 +3227,7 @@ Charlie,92.8,true` // Open CSV file db, err := Open(csvFile) - if err != nil { - t.Fatalf("Failed to open CSV file: %v", err) - } + require.NoError(t, err, "Failed to open CSV file") defer db.Close() // Export to Parquet format with GZ compression @@ -3435,7 +3243,7 @@ Charlie,92.8,true` // We expect an error for external compression with Parquet expectedErrMsg := "external compression not supported for Parquet format - use Parquet's built-in compression instead" if !strings.Contains(err.Error(), expectedErrMsg) { - t.Fatalf("Expected error message to contain '%s', got: %v", expectedErrMsg, err) + require.Contains(t, err.Error(), expectedErrMsg, "Expected error message to contain '%s', got: %v", expectedErrMsg, err) } return // Test passed - error was expected } @@ -3479,36 +3287,26 @@ Charlie,92.8,true` // Open CSV and export to Parquet db, err := Open(csvFile) - if err != nil { - t.Fatalf("Failed to open CSV: %v", err) - } + require.NoError(t, err, "Failed to open CSV") defer db.Close() parquetDir := filepath.Join(tempDir, td.name+"_parquet") err = DumpDatabase(db, parquetDir, NewDumpOptions().WithFormat(OutputFormatParquet)) - if err != nil { - t.Fatalf("Failed to export to Parquet: %v", err) - } + require.NoError(t, err, "Failed to export to Parquet") // Read back from Parquet parquetFile := filepath.Join(parquetDir, td.name+".parquet") db2, err := Open(parquetFile) - if err != nil { - t.Fatalf("Failed to open Parquet file: %v", err) - } + require.NoError(t, err, "Failed to open Parquet file") defer db2.Close() // Query all data rows, err := db2.QueryContext(context.Background(), "SELECT * FROM "+td.name+" ORDER BY id") //nolint:gosec - if err != nil { - t.Fatalf("Failed to query: %v", err) - } + require.NoError(t, err, "Failed to query") defer rows.Close() columns, err := rows.Columns() - if err != nil { - t.Fatalf("Failed to get columns: %v", err) - } + require.NoError(t, err, "Failed to get columns") var actualRows []map[string]string for rows.Next() { @@ -3519,7 +3317,7 @@ Charlie,92.8,true` } if err := rows.Scan(valuePtrs...); err != nil { - t.Fatalf("Failed to scan row: %v", err) + require.NoError(t, err, "Failed to scan row") } row := make(map[string]string) @@ -3534,21 +3332,19 @@ Charlie,92.8,true` } if err := rows.Err(); err != nil { - t.Fatalf("Error during row iteration: %v", err) + require.NoError(t, err, "Error during row iteration") } // Compare results - if len(actualRows) != len(td.expected) { - t.Fatalf("Expected %d rows, got %d", len(td.expected), len(actualRows)) - } + require.Equal(t, len(td.expected), len(actualRows), "Expected %d rows, got %d", len(td.expected), len(actualRows)) for i, expectedRow := range td.expected { actualRow := actualRows[i] for col, expectedVal := range expectedRow { if actualVal, ok := actualRow[col]; !ok { - t.Errorf("Row %d: missing column %s", i, col) + assert.Fail(t, "Row %d: missing column %s", i, col) } else if actualVal != expectedVal { - t.Errorf("Row %d, column %s: expected %s, got %s", i, col, expectedVal, actualVal) + assert.Fail(t, "Row %d, column %s: expected %s, got %s", i, col, expectedVal, actualVal) } } } @@ -3580,32 +3376,24 @@ func TestParquetPerformance(t *testing.T) { // Test CSV to Parquet export performance start := time.Now() db, err := Open(csvFile) - if err != nil { - t.Fatalf("Failed to open CSV: %v", err) - } + require.NoError(t, err, "Failed to open CSV") defer db.Close() parquetDir := filepath.Join(tempDir, "perf_parquet") err = DumpDatabase(db, parquetDir, NewDumpOptions().WithFormat(OutputFormatParquet)) - if err != nil { - t.Fatalf("Failed to export to Parquet: %v", err) - } + require.NoError(t, err, "Failed to export to Parquet") exportTime := time.Since(start) // Test Parquet read performance parquetFile := filepath.Join(parquetDir, "large_test.parquet") start = time.Now() db2, err := Open(parquetFile) - if err != nil { - t.Fatalf("Failed to open Parquet: %v", err) - } + require.NoError(t, err, "Failed to open Parquet") defer db2.Close() var count int err = db2.QueryRowContext(context.Background(), "SELECT COUNT(*) FROM large_test").Scan(&count) - if err != nil { - t.Fatalf("Failed to query count: %v", err) - } + require.NoError(t, err, "Failed to query count") readTime := time.Since(start) t.Logf("Performance results:") @@ -3614,7 +3402,7 @@ func TestParquetPerformance(t *testing.T) { t.Logf("Records processed: %d", count) if count != 10000 { - t.Errorf("Expected 10000 records, got %d", count) + assert.Fail(t, "Expected 10000 records, got %d", count) } } @@ -3655,16 +3443,14 @@ func TestParquetDirectParsing(t *testing.T) { // Test using file.toTable() directly to trigger parseParquet f := newFile(parquetFile) table, err := f.toTable() - if err != nil { - t.Fatalf("Failed to parse Parquet file: %v", err) - } + require.NoError(t, err, "Failed to parse Parquet file") if table == nil { t.Fatal("Expected non-nil table from Parquet file") } if len(table.getRecords()) != 3 { - t.Errorf("Expected 3 records, got %d", len(table.getRecords())) + assert.Fail(t, "Expected 3 records, got %d", len(table.getRecords())) } // Also test compressed Parquet to trigger parseCompressedParquet @@ -3693,16 +3479,14 @@ func TestParquetDirectParsing(t *testing.T) { // Test compressed Parquet parsing f2 := newFile(compressedParquetFile) table2, err := f2.toTable() - if err != nil { - t.Fatalf("Failed to parse compressed Parquet file: %v", err) - } + require.NoError(t, err, "Failed to parse compressed Parquet file") if table2 == nil { t.Fatal("Expected non-nil table from compressed Parquet file") } if len(table2.getRecords()) != 3 { - t.Errorf("Expected 3 records from compressed Parquet, got %d", len(table2.getRecords())) + assert.Fail(t, "Expected 3 records from compressed Parquet, got %d", len(table2.getRecords())) } }) @@ -3742,9 +3526,9 @@ func TestParquetDirectParsing(t *testing.T) { _, err := Open(testFile) if tf.shouldWork && err != nil { - t.Errorf("File %s should be supported but got error: %v", tf.filename, err) + assert.Fail(t, "File %s should be supported but got error: %v", tf.filename, err) } else if !tf.shouldWork && err == nil { - t.Errorf("File %s should not be supported but no error occurred", tf.filename) + assert.Fail(t, "File %s should not be supported but no error occurred", tf.filename) } } }) @@ -3798,10 +3582,10 @@ func TestWriteXLSXTableData(t *testing.T) { // Check sheet exists sheets := xlsxFile.GetSheetList() if len(sheets) != 1 { - t.Errorf("Expected 1 sheet, got %d", len(sheets)) + assert.Fail(t, "Expected 1 sheet, got %d", len(sheets)) } if sheets[0] != "output" { - t.Errorf("Expected sheet 'output', got '%s'", sheets[0]) + assert.Fail(t, "Expected sheet 'output', got '%s'", sheets[0]) } // Check data @@ -3812,19 +3596,17 @@ func TestWriteXLSXTableData(t *testing.T) { // Should have header + 3 data rows = 4 total rows if len(sheetRows) != 4 { - t.Errorf("Expected 4 rows (1 header + 3 data), got %d", len(sheetRows)) + assert.Fail(t, "Expected 4 rows (1 header + 3 data), got %d", len(sheetRows)) } // Check header expectedHeaders := []string{"id", "name"} - if !reflect.DeepEqual(sheetRows[0], expectedHeaders) { - t.Errorf("Expected headers %v, got %v", expectedHeaders, sheetRows[0]) - } + assert.Equal(t, expectedHeaders, sheetRows[0], "Expected headers %v, got %v", expectedHeaders, sheetRows[0]) // Check first data row if len(sheetRows) > 1 { if sheetRows[1][0] != "1" || sheetRows[1][1] != "Gina" { - t.Errorf("Expected first row [1, Gina], got %v", sheetRows[1]) + assert.Fail(t, "Expected first row [1, Gina], got %v", sheetRows[1]) } } }) @@ -3893,7 +3675,7 @@ func TestWriteXLSXTableData(t *testing.T) { // Check data sheets := xlsxFile.GetSheetList() if len(sheets) != 1 { - t.Errorf("Expected 1 sheet, got %d", len(sheets)) + assert.Fail(t, "Expected 1 sheet, got %d", len(sheets)) } sheetRows, err := xlsxFile.GetRows(sheets[0]) @@ -3903,14 +3685,12 @@ func TestWriteXLSXTableData(t *testing.T) { // Should have header + 3 data rows = 4 total rows if len(sheetRows) != 4 { - t.Errorf("Expected 4 rows (1 header + 3 data), got %d", len(sheetRows)) + assert.Fail(t, "Expected 4 rows (1 header + 3 data), got %d", len(sheetRows)) } // Check header expectedHeaders := []string{"id", "mail"} - if !reflect.DeepEqual(sheetRows[0], expectedHeaders) { - t.Errorf("Expected headers %v, got %v", expectedHeaders, sheetRows[0]) - } + assert.Equal(t, expectedHeaders, sheetRows[0], "Expected headers %v, got %v", expectedHeaders, sheetRows[0]) }) t.Run("writeXLSXTableData with no columns error", func(t *testing.T) { @@ -3923,7 +3703,7 @@ func TestWriteXLSXTableData(t *testing.T) { t.Error("Expected error for no columns") } if !strings.Contains(err.Error(), "no columns defined") { - t.Errorf("Expected 'no columns defined' error, got: %v", err) + assert.NoError(t, err, "Expected 'no columns defined' error, got") } }) @@ -3957,7 +3737,7 @@ func TestWriteXLSXTableData(t *testing.T) { t.Error("Expected error for unsupported bz2 compression") } if !strings.Contains(err.Error(), "bzip2 compression is not supported") { - t.Errorf("Expected 'bzip2 compression is not supported' error, got: %v", err) + assert.NoError(t, err, "Expected 'bzip2 compression is not supported' error, got") } }) @@ -4005,7 +3785,7 @@ func TestWriteXLSXTableData(t *testing.T) { t.Error("Compressed output file is empty") } if fileInfo.Size() < 100 { - t.Errorf("Compressed file seems too small: %d bytes", fileInfo.Size()) + assert.Fail(t, "Compressed file seems too small: %d bytes", fileInfo.Size()) } }) } @@ -4021,7 +3801,7 @@ func TestBytesReaderAt(t *testing.T) { expectedSize := int64(len(testData)) if size != expectedSize { - t.Errorf("Expected size %d, got %d", expectedSize, size) + assert.Fail(t, "Expected size %d, got %d", expectedSize, size) } }) @@ -4032,7 +3812,7 @@ func TestBytesReaderAt(t *testing.T) { expectedSize := int64(0) if size != expectedSize { - t.Errorf("Expected size %d, got %d", expectedSize, size) + assert.Fail(t, "Expected size %d, got %d", expectedSize, size) } }) @@ -4045,15 +3825,15 @@ func TestBytesReaderAt(t *testing.T) { n, err := reader.Read(buffer) if !errors.Is(err, io.EOF) { - t.Errorf("Expected io.EOF, got %v", err) + assert.Equal(t, io.EOF, err, "Expected io.EOF, got %v", err) } if n != len(testData) { - t.Errorf("Expected to read %d bytes, got %d", len(testData), n) + assert.Equal(t, len(testData), n, "Expected to read %d bytes, got %d", len(testData), n) } // Check that data was read correctly if !bytes.Equal(buffer[:n], testData) { - t.Errorf("Expected data %q, got %q", testData, buffer[:n]) + assert.Equal(t, testData, buffer[:n], "Expected data %q, got %q", testData, buffer[:n]) } }) @@ -4065,17 +3845,15 @@ func TestBytesReaderAt(t *testing.T) { buffer := make([]byte, 5) n, err := reader.Read(buffer) - if err != nil { - t.Errorf("Expected no error, got %v", err) - } + assert.NoError(t, err, "Expected no error") if n != 5 { - t.Errorf("Expected to read 5 bytes, got %d", n) + assert.Fail(t, "Expected to read 5 bytes, got %d", n) } // Check that data was read correctly (first 5 bytes) expected := testData[:5] if !bytes.Equal(buffer, expected) { - t.Errorf("Expected data %q, got %q", expected, buffer) + assert.Equal(t, expected, buffer, "Expected data %q, got %q", expected, buffer) } }) @@ -4086,10 +3864,10 @@ func TestBytesReaderAt(t *testing.T) { n, err := reader.Read(buffer) if !errors.Is(err, io.EOF) { - t.Errorf("Expected io.EOF, got %v", err) + assert.Equal(t, io.EOF, err, "Expected io.EOF, got %v", err) } if n != 0 { - t.Errorf("Expected to read 0 bytes, got %d", n) + assert.Fail(t, "Expected to read 0 bytes, got %d", n) } }) @@ -4101,16 +3879,14 @@ func TestBytesReaderAt(t *testing.T) { buffer := make([]byte, 5) n, err := reader.ReadAt(buffer, 7) // Start at "W" - if err != nil { - t.Errorf("Expected no error, got %v", err) - } + assert.NoError(t, err, "Expected no error") if n != 5 { - t.Errorf("Expected to read 5 bytes, got %d", n) + assert.Fail(t, "Expected to read 5 bytes, got %d", n) } expected := []byte("World") if !bytes.Equal(buffer, expected) { - t.Errorf("Expected data %q, got %q", expected, buffer) + assert.Equal(t, expected, buffer, "Expected data %q, got %q", expected, buffer) } }) @@ -4122,10 +3898,10 @@ func TestBytesReaderAt(t *testing.T) { n, err := reader.ReadAt(buffer, 10) // Offset beyond data if !errors.Is(err, io.EOF) { - t.Errorf("Expected io.EOF, got %v", err) + assert.Equal(t, io.EOF, err, "Expected io.EOF, got %v", err) } if n != 0 { - t.Errorf("Expected to read 0 bytes, got %d", n) + assert.Fail(t, "Expected to read 0 bytes, got %d", n) } }) @@ -4137,10 +3913,10 @@ func TestBytesReaderAt(t *testing.T) { n, err := reader.ReadAt(buffer, -1) // Negative offset if !errors.Is(err, io.EOF) { - t.Errorf("Expected io.EOF, got %v", err) + assert.Equal(t, io.EOF, err, "Expected io.EOF, got %v", err) } if n != 0 { - t.Errorf("Expected to read 0 bytes, got %d", n) + assert.Fail(t, "Expected to read 0 bytes, got %d", n) } }) @@ -4150,30 +3926,24 @@ func TestBytesReaderAt(t *testing.T) { // Test SeekStart pos, err := reader.Seek(5, io.SeekStart) - if err != nil { - t.Errorf("Expected no error for SeekStart, got %v", err) - } + assert.NoError(t, err, "Expected no error for SeekStart") if pos != 5 { - t.Errorf("Expected position 5, got %d", pos) + assert.Fail(t, "Expected position 5, got %d", pos) } // Test SeekCurrent pos, err = reader.Seek(3, io.SeekCurrent) - if err != nil { - t.Errorf("Expected no error for SeekCurrent, got %v", err) - } + assert.NoError(t, err, "Expected no error for SeekCurrent") if pos != 0 { - t.Errorf("Expected position 0 (no tracking), got %d", pos) + assert.Fail(t, "Expected position 0 (no tracking), got %d", pos) } // Test SeekEnd pos, err = reader.Seek(-2, io.SeekEnd) - if err != nil { - t.Errorf("Expected no error for SeekEnd, got %v", err) - } + assert.NoError(t, err, "Expected no error for SeekEnd") expected := int64(len(testData)) - 2 if pos != expected { - t.Errorf("Expected position %d, got %d", expected, pos) + assert.Fail(t, "Expected position %d, got %d", expected, pos) } // Test invalid whence @@ -4182,7 +3952,7 @@ func TestBytesReaderAt(t *testing.T) { t.Error("Expected error for invalid whence") } if !strings.Contains(err.Error(), "invalid whence value") { - t.Errorf("Expected 'invalid whence value' error, got: %v", err) + assert.NoError(t, err, "Expected 'invalid whence value' error, got") } }) } @@ -4206,19 +3976,19 @@ func TestExtractValueFromArrowArray(t *testing.T) { // Test true value result := extractValueFromArrowArray(arr, 0) if result != "1" { - t.Errorf("Expected '1' for true, got '%s'", result) + assert.Fail(t, "Expected '1' for true, got '%s'", result) } // Test false value result = extractValueFromArrowArray(arr, 1) if result != "0" { - t.Errorf("Expected '0' for false, got '%s'", result) + assert.Fail(t, "Expected '0' for false, got '%s'", result) } // Test null value result = extractValueFromArrowArray(arr, 2) if result != "" { - t.Errorf("Expected empty string for null, got '%s'", result) + assert.Fail(t, "Expected empty string for null, got '%s'", result) } }) @@ -4233,11 +4003,11 @@ func TestExtractValueFromArrowArray(t *testing.T) { result := extractValueFromArrowArray(int8Arr, 0) if result != "42" { - t.Errorf("Expected '42' for int8, got '%s'", result) + assert.Fail(t, "Expected '42' for int8, got '%s'", result) } result = extractValueFromArrowArray(int8Arr, 1) if result != "" { - t.Errorf("Expected empty string for null int8, got '%s'", result) + assert.Fail(t, "Expected empty string for null int8, got '%s'", result) } // Test Int16 @@ -4249,7 +4019,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result = extractValueFromArrowArray(int16Arr, 0) if result != "1000" { - t.Errorf("Expected '1000' for int16, got '%s'", result) + assert.Fail(t, "Expected '1000' for int16, got '%s'", result) } // Test Int32 @@ -4261,7 +4031,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result = extractValueFromArrowArray(int32Arr, 0) if result != "100000" { - t.Errorf("Expected '100000' for int32, got '%s'", result) + assert.Fail(t, "Expected '100000' for int32, got '%s'", result) } // Test Int64 @@ -4273,7 +4043,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result = extractValueFromArrowArray(int64Arr, 0) if result != "9223372036854775807" { - t.Errorf("Expected '9223372036854775807' for int64, got '%s'", result) + assert.Fail(t, "Expected '9223372036854775807' for int64, got '%s'", result) } }) @@ -4287,7 +4057,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result := extractValueFromArrowArray(uint8Arr, 0) if result != "255" { - t.Errorf("Expected '255' for uint8, got '%s'", result) + assert.Fail(t, "Expected '255' for uint8, got '%s'", result) } // Test Uint16 @@ -4299,7 +4069,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result = extractValueFromArrowArray(uint16Arr, 0) if result != "65535" { - t.Errorf("Expected '65535' for uint16, got '%s'", result) + assert.Fail(t, "Expected '65535' for uint16, got '%s'", result) } // Test Uint32 @@ -4311,7 +4081,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result = extractValueFromArrowArray(uint32Arr, 0) if result != "4294967295" { - t.Errorf("Expected '4294967295' for uint32, got '%s'", result) + assert.Fail(t, "Expected '4294967295' for uint32, got '%s'", result) } // Test Uint64 @@ -4323,7 +4093,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result = extractValueFromArrowArray(uint64Arr, 0) if result != "18446744073709551615" { - t.Errorf("Expected '18446744073709551615' for uint64, got '%s'", result) + assert.Fail(t, "Expected '18446744073709551615' for uint64, got '%s'", result) } }) @@ -4338,11 +4108,11 @@ func TestExtractValueFromArrowArray(t *testing.T) { result := extractValueFromArrowArray(float32Arr, 0) if result != "3.14159" { - t.Errorf("Expected '3.14159' for float32, got '%s'", result) + assert.Fail(t, "Expected '3.14159' for float32, got '%s'", result) } result = extractValueFromArrowArray(float32Arr, 1) if result != "" { - t.Errorf("Expected empty string for null float32, got '%s'", result) + assert.Fail(t, "Expected empty string for null float32, got '%s'", result) } // Test Float64 @@ -4354,7 +4124,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result = extractValueFromArrowArray(float64Arr, 0) if result != "2.718281828459045" { - t.Errorf("Expected '2.718281828459045' for float64, got '%s'", result) + assert.Fail(t, "Expected '2.718281828459045' for float64, got '%s'", result) } }) @@ -4372,19 +4142,19 @@ func TestExtractValueFromArrowArray(t *testing.T) { // Test normal string result := extractValueFromArrowArray(stringArr, 0) if result != "Hello, World!" { - t.Errorf("Expected 'Hello, World!', got '%s'", result) + assert.Fail(t, "Expected 'Hello, World!', got '%s'", result) } // Test empty string result = extractValueFromArrowArray(stringArr, 1) if result != "" { - t.Errorf("Expected empty string, got '%s'", result) + assert.Fail(t, "Expected empty string, got '%s'", result) } // Test null string result = extractValueFromArrowArray(stringArr, 2) if result != "" { - t.Errorf("Expected empty string for null, got '%s'", result) + assert.Fail(t, "Expected empty string for null, got '%s'", result) } }) @@ -4402,13 +4172,13 @@ func TestExtractValueFromArrowArray(t *testing.T) { // Test binary data result := extractValueFromArrowArray(binaryArr, 0) if result != "binary data" { - t.Errorf("Expected 'binary data', got '%s'", result) + assert.Fail(t, "Expected 'binary data', got '%s'", result) } // Test null binary result = extractValueFromArrowArray(binaryArr, 1) if result != "" { - t.Errorf("Expected empty string for null binary, got '%s'", result) + assert.Fail(t, "Expected empty string for null binary, got '%s'", result) } }) @@ -4422,7 +4192,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result := extractValueFromArrowArray(date32Arr, 0) if result != "18628" { - t.Errorf("Expected '18628' for date32, got '%s'", result) + assert.Fail(t, "Expected '18628' for date32, got '%s'", result) } // Test Date64 @@ -4434,7 +4204,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result = extractValueFromArrowArray(date64Arr, 0) if result != "1609459200000" { - t.Errorf("Expected '1609459200000' for date64, got '%s'", result) + assert.Fail(t, "Expected '1609459200000' for date64, got '%s'", result) } }) @@ -4447,7 +4217,7 @@ func TestExtractValueFromArrowArray(t *testing.T) { result := extractValueFromArrowArray(timestampArr, 0) if result != "1609459200000" { - t.Errorf("Expected '1609459200000' for timestamp, got '%s'", result) + assert.Fail(t, "Expected '1609459200000' for timestamp, got '%s'", result) } }) @@ -4570,13 +4340,13 @@ func TestEdgeCasesEmptyAndMalformedData(t *testing.T) { prefix := strings.TrimSuffix(tt.fileName, ext) tmpFile, err := os.CreateTemp(t.TempDir(), prefix+"*"+ext) if err != nil { - t.Fatalf("Failed to create temp file: %v", err) + require.NoError(t, err, "Failed to create") } defer tmpFile.Close() // Write test content if _, err := tmpFile.WriteString(tt.fileContent); err != nil { - t.Fatalf("Failed to write test content: %v", err) + require.NoError(t, err, "Failed to write test content") } // Test with timeout context @@ -4591,18 +4361,18 @@ func TestEdgeCasesEmptyAndMalformedData(t *testing.T) { if db != nil { _ = db.Close() // Ignore error in test cleanup } - t.Errorf("Expected error for %s, but got none", tt.description) + assert.Error(t, err, "Expected error for %s, but got none", tt.description) } return } if err != nil { - t.Errorf("Unexpected error for %s: %v", tt.description, err) + assert.NoError(t, err, "Unexpected error for %s", tt.description) return } if db == nil { - t.Errorf("Expected valid db for %s, but got nil", tt.description) + assert.Fail(t, "Expected valid db for %s, but got nil", tt.description) return } defer db.Close() @@ -4613,19 +4383,19 @@ func TestEdgeCasesEmptyAndMalformedData(t *testing.T) { query := fmt.Sprintf("SELECT COUNT(*) FROM \"%s\"", tableName) //nolint:gosec // Table name is from test data //nolint:gosec // Table name is from test data rows, err := db.QueryContext(ctx, query) if err != nil { - t.Errorf("Query failed for %s: %v", tt.description, err) + assert.Fail(t, "Query failed for %s: %v", tt.description, err) return } defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error for %s: %v", tt.description, err) + assert.Fail(t, "Rows error for %s: %v", tt.description, err) return } var count int if rows.Next() { if err := rows.Scan(&count); err != nil { - t.Errorf("Scan failed for %s: %v", tt.description, err) + assert.Fail(t, "Scan failed for %s: %v", tt.description, err) } } }) @@ -4674,19 +4444,19 @@ func TestEdgeCasesReaderInput(t *testing.T) { if tt.expectedErr { if err == nil { - t.Errorf("Expected error for %s, but got none", tt.name) + assert.Error(t, err, "Expected error for %s, but got none", tt.name) } return } if err != nil { - t.Errorf("Unexpected error for %s: %v", tt.name, err) + assert.NoError(t, err, "Unexpected error for %s", tt.name) return } db, err := validatedBuilder.Open(ctx) if err != nil { - t.Errorf("Failed to open database for %s: %v", tt.name, err) + assert.Fail(t, "Failed to open database for %s: %v", tt.name, err) return } defer db.Close() @@ -4723,12 +4493,12 @@ func TestEdgeCasesCompression(t *testing.T) { tmpFile, err := os.CreateTemp(t.TempDir(), "test_*.csv.gz") if err != nil { - t.Fatalf("Failed to create temp file: %v", err) + require.NoError(t, err, "Failed to create") } defer tmpFile.Close() if _, err := tmpFile.Write(tt.content); err != nil { - t.Fatalf("Failed to write test content: %v", err) + require.NoError(t, err, "Failed to write test content") } ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) @@ -4741,13 +4511,13 @@ func TestEdgeCasesCompression(t *testing.T) { if db != nil { _ = db.Close() // Ignore error in test cleanup } - t.Errorf("Expected error for %s, but got none", tt.description) + assert.Error(t, err, "Expected error for %s, but got none", tt.description) } return } if err != nil { - t.Errorf("Unexpected error for %s: %v", tt.description, err) + assert.NoError(t, err, "Unexpected error for %s", tt.description) return } @@ -4784,22 +4554,19 @@ func TestEdgeCasesMemoryLimits(t *testing.T) { tmpFile, err := os.CreateTemp(t.TempDir(), "wide_file_*.csv") if err != nil { - t.Fatalf("Failed to create temp file: %v", err) + require.NoError(t, err, "Failed to create") } defer tmpFile.Close() if _, err := tmpFile.WriteString(content); err != nil { - t.Fatalf("Failed to write test content: %v", err) + require.NoError(t, err, "Failed to write test content") } ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() db, err := OpenContext(ctx, tmpFile.Name()) - if err != nil { - t.Errorf("Failed to handle wide file: %v", err) - return - } + assert.NoError(t, err, "Failed to handle wide file") defer db.Close() // Verify we can query the wide table @@ -4809,13 +4576,10 @@ func TestEdgeCasesMemoryLimits(t *testing.T) { query := fmt.Sprintf("SELECT COUNT(*) FROM \"%s\"", tableName) //nolint:gosec // Table name is from test data rows, err := db.QueryContext(ctx, query) - if err != nil { - t.Errorf("Query failed on wide file: %v", err) - return - } + assert.NoError(t, err, "Query failed on wide file") defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error on wide file: %v", err) + assert.NoError(t, err, "Rows error on wide file") return } }) @@ -4835,22 +4599,19 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { tmpFile, err := os.CreateTemp(t.TempDir(), "large_cell_*.csv") if err != nil { - t.Fatalf("Failed to create temp file: %v", err) + require.NoError(t, err, "Failed to create") } defer tmpFile.Close() if _, err := tmpFile.WriteString(content); err != nil { - t.Fatalf("Failed to write test content: %v", err) + require.NoError(t, err, "Failed to write test content") } ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() db, err := OpenContext(ctx, tmpFile.Name()) - if err != nil { - t.Errorf("Failed to handle large cell content: %v", err) - return - } + assert.NoError(t, err, "Failed to handle large cell content") defer db.Close() // Verify we can query the data @@ -4858,22 +4619,19 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { tableName = strings.TrimSuffix(tableName, ".csv") // Handle double extensions rows, err := db.QueryContext(ctx, fmt.Sprintf("SELECT LENGTH(col1) FROM \"%s\"", tableName)) - if err != nil { - t.Errorf("Query failed on large cell: %v", err) - return - } + assert.NoError(t, err, "Query failed on large cell") defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error on large cell: %v", err) + assert.NoError(t, err, "Rows error on large cell") return } var length int if rows.Next() { if err := rows.Scan(&length); err != nil { - t.Errorf("Scan failed: %v", err) + assert.NoError(t, err, "Scan failed") } else if length != cellSize { - t.Errorf("Expected cell length %d, got %d", cellSize, length) + assert.Fail(t, "Expected cell length %d, got %d", cellSize, length) } } }) @@ -4890,20 +4648,20 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { tmpFile, err := os.CreateTemp(t.TempDir(), "many_rows_*.csv") if err != nil { - t.Fatalf("Failed to create temp file: %v", err) + require.NoError(t, err, "Failed to create") } defer tmpFile.Close() // Write header if _, err := tmpFile.WriteString("id,name,value\n"); err != nil { - t.Fatalf("Failed to write header: %v", err) + require.NoError(t, err, "Failed to write header") } // Write many rows for i := range numRows { line := fmt.Sprintf("%d,name_%d,value_%d\n", i, i, i) if _, err := tmpFile.WriteString(line); err != nil { - t.Fatalf("Failed to write row %d: %v", i, err) + require.NoError(t, err, "Failed to write row %d", i) } } @@ -4911,10 +4669,7 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { defer cancel() db, err := OpenContext(ctx, tmpFile.Name()) - if err != nil { - t.Errorf("Failed to handle many rows: %v", err) - return - } + assert.NoError(t, err, "Failed to handle many rows") defer db.Close() // Test aggregation query @@ -4922,22 +4677,19 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { tableName = strings.TrimSuffix(tableName, ".csv") rows, err := db.QueryContext(ctx, fmt.Sprintf("SELECT COUNT(*) FROM \"%s\"", tableName)) - if err != nil { - t.Errorf("Count query failed: %v", err) - return - } + assert.NoError(t, err, "Count query failed") defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error on count query: %v", err) + assert.NoError(t, err, "Rows error on count query") return } var count int if rows.Next() { if err := rows.Scan(&count); err != nil { - t.Errorf("Count scan failed: %v", err) + assert.NoError(t, err, "Count scan failed") } else if count != numRows { - t.Errorf("Expected %d rows, got %d", numRows, count) + assert.Fail(t, "Expected %d rows, got %d", numRows, count) } } }) @@ -4956,12 +4708,12 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { tmpFile, err := os.CreateTemp(t.TempDir(), fmt.Sprintf("memory_test_%d_*.csv", i)) if err != nil { - t.Fatalf("Failed to create temp file: %v", err) + require.NoError(t, err, "Failed to create") } if _, err := tmpFile.WriteString(content); err != nil { _ = tmpFile.Close() // Ignore error in test cleanup - t.Fatalf("Failed to write content: %v", err) + require.NoError(t, err, "Failed to write content") } _ = tmpFile.Close() // Ignore error in test cleanup @@ -4970,7 +4722,7 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { db, err := OpenContext(ctx, tmpFile.Name()) if err != nil { cancel() - t.Errorf("Failed to open database %d: %v", i, err) + assert.Fail(t, "Failed to open database %d: %v", i, err) continue } @@ -4982,14 +4734,14 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { if err != nil { _ = db.Close() // Ignore error in test cleanup cancel() - t.Errorf("Query failed for database %d: %v", i, err) + assert.Fail(t, "Query failed for database %d: %v", i, err) continue } if err := rows.Err(); err != nil { _ = rows.Close() // Ignore error in test cleanup _ = db.Close() // Ignore error in test cleanup cancel() - t.Errorf("Rows error for database %d: %v", i, err) + assert.Fail(t, "Rows error for database %d: %v", i, err) continue } @@ -5019,7 +4771,7 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { } if memoryIncrease > threshold { - t.Errorf("Memory usage increased by %d bytes (%.2f MB), may indicate memory leak (threshold: %.2f MB)", + assert.LessOrEqual(t, memoryIncrease, threshold, "Memory usage increased by %d bytes (%.2f MB), may indicate memory leak (threshold: %.2f MB)", memoryIncrease, float64(memoryIncrease)/(1024*1024), float64(threshold)/(1024*1024)) } }) @@ -5037,19 +4789,19 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { tmpFile, err := os.CreateTemp(t.TempDir(), "cancellation_test_*.csv") if err != nil { - t.Fatalf("Failed to create temp file: %v", err) + require.NoError(t, err, "Failed to create") } defer tmpFile.Close() // Write header and many rows if _, err := tmpFile.WriteString("id,data\n"); err != nil { - t.Fatalf("Failed to write header: %v", err) + require.NoError(t, err, "Failed to write header") } for i := range numRows { line := fmt.Sprintf("%d,%s\n", i, strings.Repeat("data", 100)) if _, err := tmpFile.WriteString(line); err != nil { - t.Fatalf("Failed to write row %d: %v", i, err) + require.NoError(t, err, "Failed to write row %d", i) } } @@ -5086,12 +4838,12 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { tmpFile, err := os.CreateTemp(t.TempDir(), "chunk_test_*.csv") if err != nil { - t.Fatalf("Failed to create temp file: %v", err) + require.NoError(t, err, "Failed to create") } defer tmpFile.Close() if _, err := tmpFile.WriteString(content); err != nil { - t.Fatalf("Failed to write content: %v", err) + require.NoError(t, err, "Failed to write content") } ctx := context.Background() @@ -5107,13 +4859,13 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { validatedBuilder, err := builder.Build(ctx) if err != nil { - t.Errorf("Build failed with chunk size %d: %v", chunkSize, err) + assert.Fail(t, "Build failed with chunk size %d: %v", chunkSize, err) return } db, err := validatedBuilder.Open(ctx) if err != nil { - t.Errorf("Open failed with chunk size %d: %v", chunkSize, err) + assert.Fail(t, "Open failed with chunk size %d: %v", chunkSize, err) return } defer db.Close() @@ -5124,21 +4876,21 @@ func TestMemoryLimitsAndLargeFiles(t *testing.T) { rows, err := db.QueryContext(ctx, fmt.Sprintf("SELECT COUNT(*) FROM \"%s\"", tableName)) if err != nil { - t.Errorf("Count query failed with chunk size %d: %v", chunkSize, err) + assert.Fail(t, "Count query failed with chunk size %d: %v", chunkSize, err) return } defer rows.Close() if err := rows.Err(); err != nil { - t.Errorf("Rows error with chunk size %d: %v", chunkSize, err) + assert.Fail(t, "Rows error with chunk size %d: %v", chunkSize, err) return } var count int if rows.Next() { if err := rows.Scan(&count); err != nil { - t.Errorf("Count scan failed with chunk size %d: %v", chunkSize, err) + assert.Fail(t, "Count scan failed with chunk size %d: %v", chunkSize, err) } else if count != numRows { - t.Errorf("Expected %d rows with chunk size %d, got %d", numRows, chunkSize, count) + assert.Fail(t, "Expected %d rows with chunk size %d, got %d", numRows, chunkSize, count) } } }) diff --git a/go.mod b/go.mod index de81e7e..3891441 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.24 require ( github.com/apache/arrow/go/v18 v18.0.0-20241007013041-ab95a4d25142 github.com/klauspost/compress v1.18.0 + github.com/stretchr/testify v1.11.1 github.com/ulikunitz/xz v0.5.15 github.com/xuri/excelize/v2 v2.9.1 modernc.org/sqlite v1.38.2 @@ -14,6 +15,7 @@ require ( github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c // indirect github.com/andybalholm/brotli v1.1.0 // indirect github.com/apache/thrift v0.20.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/goccy/go-json v0.10.3 // indirect github.com/golang/snappy v0.0.4 // indirect @@ -26,6 +28,7 @@ require ( github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/richardlehane/mscfb v1.0.4 // indirect github.com/richardlehane/msoleps v1.0.4 // indirect @@ -45,6 +48,7 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de // indirect google.golang.org/grpc v1.63.2 // indirect google.golang.org/protobuf v1.34.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/libc v1.66.3 // indirect modernc.org/mathutil v1.7.1 // indirect modernc.org/memory v1.11.0 // indirect diff --git a/go.sum b/go.sum index 369bf9b..e53e80a 100644 --- a/go.sum +++ b/go.sum @@ -49,8 +49,8 @@ github.com/richardlehane/msoleps v1.0.4 h1:WuESlvhX3gH2IHcd8UqyCuFY5yiq/GR/yqaSM github.com/richardlehane/msoleps v1.0.4/go.mod h1:BWev5JBpU9Ko2WAgmZEuiz4/u3ZYTKbjLycmwiWUfWg= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/tiendc/go-deepcopy v1.6.0 h1:0UtfV/imoCwlLxVsyfUd4hNHnB3drXsfle+wzSCA5Wo= github.com/tiendc/go-deepcopy v1.6.0/go.mod h1:toXoeQoUqXOOS/X4sKuiAoSk6elIdqc0pN7MTgOOo2I= github.com/ulikunitz/xz v0.5.15 h1:9DNdB5s+SgV3bQ2ApL10xRc35ck0DuIX/isZvIk+ubY= @@ -95,6 +95,8 @@ google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM= google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA= google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.26.2 h1:991HMkLjJzYBIfha6ECZdjrIYz2/1ayr+FL8GN+CNzM= diff --git a/stream_test.go b/stream_test.go index d0ccbce..bb9bc32 100644 --- a/stream_test.go +++ b/stream_test.go @@ -7,6 +7,8 @@ import ( "strings" "testing" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/xuri/excelize/v2" ) @@ -20,23 +22,15 @@ func TestStreamingParser_ParseFromReader_CSV(t *testing.T) { parser := newStreamingParser(FileTypeCSV, "users", 1024) table, err := parser.parseFromReader(reader) - if err != nil { - t.Fatalf("ParseFromReader() failed: %v", err) - } + require.NoError(t, err, "ParseFromReader() failed") - if table.getName() != "users" { - t.Errorf("Table name = %s, want users", table.getName()) - } + assert.Equal(t, "users", table.getName(), "Table name mismatch") header := table.getHeader() - if len(header) != 3 { - t.Errorf("Header length = %d, want 3", len(header)) - } + assert.Len(t, header, 3, "Header length mismatch") records := table.getRecords() - if len(records) != 2 { - t.Errorf("Records length = %d, want 2", len(records)) - } + assert.Len(t, records, 2, "Records length mismatch") if records[0][0] != "Alice" { t.Errorf("First record first field = %s, want Alice", records[0][0]) @@ -65,18 +59,12 @@ func TestStreamingParser_ParseFromReader_TSV(t *testing.T) { parser := newStreamingParser(FileTypeTSV, "users", 1024) table, err := parser.parseFromReader(reader) - if err != nil { - t.Fatalf("ParseFromReader() failed: %v", err) - } + require.NoError(t, err, "ParseFromReader() failed") - if table.getName() != "users" { - t.Errorf("Table name = %s, want users", table.getName()) - } + assert.Equal(t, "users", table.getName(), "Table name mismatch") records := table.getRecords() - if len(records) != 2 { - t.Errorf("Records length = %d, want 2", len(records)) - } + assert.Len(t, records, 2, "Records length mismatch") }) } @@ -90,18 +78,12 @@ func TestStreamingParser_ParseFromReader_LTSV(t *testing.T) { parser := newStreamingParser(FileTypeLTSV, "users", 1024) table, err := parser.parseFromReader(reader) - if err != nil { - t.Fatalf("ParseFromReader() failed: %v", err) - } + require.NoError(t, err, "ParseFromReader() failed") - if table.getName() != "users" { - t.Errorf("Table name = %s, want users", table.getName()) - } + assert.Equal(t, "users", table.getName(), "Table name mismatch") records := table.getRecords() - if len(records) != 2 { - t.Errorf("Records length = %d, want 2", len(records)) - } + assert.Len(t, records, 2, "Records length mismatch") }) } @@ -122,14 +104,10 @@ func TestStreamingParser_ParseFromReader_Compressed(t *testing.T) { // but the test demonstrates the compression handling logic parser := newStreamingParser(FileTypeCSV, "users", 1024) // Use uncompressed for now table, err := parser.parseFromReader(reader) - if err != nil { - t.Fatalf("ParseFromReader() failed: %v", err) - } + require.NoError(t, err, "ParseFromReader() failed") records := table.getRecords() - if len(records) != 2 { - t.Errorf("Records length = %d, want 2", len(records)) - } + assert.Len(t, records, 2, "Records length mismatch") _ = buf // Prevent unused variable warning }) @@ -515,9 +493,7 @@ func TestStreamingParser_ParseFromReader_XLSX(t *testing.T) { // Parse using streaming parser - should process first sheet only parser := newStreamingParser(FileTypeXLSX, "test_workbook", 1024) table, err := parser.parseFromReader(&buf) - if err != nil { - t.Fatalf("ParseFromReader() failed: %v", err) - } + require.NoError(t, err, "ParseFromReader() failed") if table.getName() != "test_workbook" { t.Errorf("Table name = %s, want test_workbook", table.getName()) @@ -536,9 +512,7 @@ func TestStreamingParser_ParseFromReader_XLSX(t *testing.T) { // Check records (should be from first sheet only) records := table.getRecords() - if len(records) != 2 { - t.Errorf("Records length = %d, want 2", len(records)) - } + assert.Len(t, records, 2, "Records length mismatch") // First record should contain data from row 2 of first sheet if len(records) > 0 && len(records[0]) >= 1 { diff --git a/table_test.go b/table_test.go index 12fbb8f..a1458ba 100644 --- a/table_test.go +++ b/table_test.go @@ -3,6 +3,8 @@ package filesql import ( "path/filepath" "testing" + + "github.com/stretchr/testify/assert" ) func TestNewTable(t *testing.T) { @@ -19,21 +21,13 @@ func TestNewTable(t *testing.T) { table := newTable("test", header, records) - if table.getName() != "test" { - t.Errorf("expected name 'test', got %s", table.getName()) - } + assert.Equal(t, "test", table.getName(), "Table name mismatch") - if !table.getHeader().equal(header) { - t.Errorf("expected header %v, got %v", header, table.getHeader()) - } + assert.True(t, table.getHeader().equal(header), "Header mismatch") - if len(table.getRecords()) != 2 { - t.Errorf("expected 2 records, got %d", len(table.getRecords())) - } + assert.Len(t, table.getRecords(), 2, "Record count mismatch") - if !table.getRecords()[0].equal(records[0]) { - t.Errorf("expected first record %v, got %v", records[0], table.getRecords()[0]) - } + assert.True(t, table.getRecords()[0].equal(records[0]), "First record mismatch") }) } @@ -53,17 +47,13 @@ func TestTable_Equal(t *testing.T) { t.Run("Equal tables", func(t *testing.T) { t.Parallel() - if !table1.equal(table2) { - t.Error("expected tables to be equal") - } + assert.True(t, table1.equal(table2), "Tables should be equal") }) t.Run("Different names", func(t *testing.T) { t.Parallel() - if table1.equal(table3) { - t.Error("expected tables with different names to be not equal") - } + assert.False(t, table1.equal(table3), "Tables with different names should not be equal") }) t.Run("Different header", func(t *testing.T) { @@ -71,9 +61,7 @@ func TestTable_Equal(t *testing.T) { differentHeader := newHeader([]string{"col1", "col3"}) table4 := newTable("test", differentHeader, records) - if table1.equal(table4) { - t.Error("expected tables with different headers to be not equal") - } + assert.False(t, table1.equal(table4), "Tables with different headers should not be equal") }) t.Run("Different record count", func(t *testing.T) { @@ -83,9 +71,7 @@ func TestTable_Equal(t *testing.T) { newRecord([]string{"val1", "val2"}), } table5 := newTable("test", header, differentRecords) - if table1.equal(table5) { - t.Error("expected tables with different record count to be not equal") - } + assert.False(t, table1.equal(table5), "Tables with different record count should not be equal") }) t.Run("Different record values", func(t *testing.T) { @@ -96,9 +82,7 @@ func TestTable_Equal(t *testing.T) { newRecord([]string{"val3", "different"}), } table6 := newTable("test", header, differentValueRecords) - if table1.equal(table6) { - t.Error("expected tables with different record values to be not equal") - } + assert.False(t, table1.equal(table6), "Tables with different record values should not be equal") }) } @@ -157,9 +141,7 @@ func TestTableFromFilePath_Additional(t *testing.T) { t.Parallel() result := tableFromFilePath(tt.filePath) - if result != tt.expected { - t.Errorf("expected %s, got %s", tt.expected, result) - } + assert.Equal(t, tt.expected, result, "tableFromFilePath failed for %s", tt.filePath) }) } } diff --git a/types_test.go b/types_test.go index 42bfdbc..ec08a6f 100644 --- a/types_test.go +++ b/types_test.go @@ -2,6 +2,8 @@ package filesql import ( "testing" + + "github.com/stretchr/testify/assert" ) func TestNewHeader(t *testing.T) { @@ -13,14 +15,10 @@ func TestNewHeader(t *testing.T) { headerSlice := []string{"col1", "col2", "col3"} header := newHeader(headerSlice) - if len(header) != 3 { - t.Errorf("expected length 3, got %d", len(header)) - } + assert.Len(t, header, 3, "Header length mismatch") for i, expected := range headerSlice { - if header[i] != expected { - t.Errorf("expected %s at index %d, got %s", expected, i, header[i]) - } + assert.Equal(t, expected, header[i], "Header element mismatch at index %d", i) } }) } @@ -71,9 +69,7 @@ func TestHeader_Equal(t *testing.T) { t.Parallel() result := tt.header1.equal(tt.header2) - if result != tt.expected { - t.Errorf("expected %v, got %v", tt.expected, result) - } + assert.Equal(t, tt.expected, result, "Header equality check failed") }) } } @@ -87,14 +83,10 @@ func TestNewRecord(t *testing.T) { recordSlice := []string{"val1", "val2", "val3"} record := newRecord(recordSlice) - if len(record) != 3 { - t.Errorf("expected length 3, got %d", len(record)) - } + assert.Len(t, record, 3, "Record length mismatch") for i, expected := range recordSlice { - if record[i] != expected { - t.Errorf("expected %s at index %d, got %s", expected, i, record[i]) - } + assert.Equal(t, expected, record[i], "Record element mismatch at index %d", i) } }) } @@ -145,9 +137,7 @@ func TestRecord_Equal(t *testing.T) { t.Parallel() result := tt.record1.equal(tt.record2) - if result != tt.expected { - t.Errorf("expected %v, got %v", tt.expected, result) - } + assert.Equal(t, tt.expected, result, "Record equality check failed") }) } } @@ -168,9 +158,7 @@ func TestColumnType_String(t *testing.T) { for _, tt := range tests { t.Run(tt.expected, func(t *testing.T) { result := tt.columnType.string() - if result != tt.expected { - t.Errorf("columnType.string() = %s, want %s", result, tt.expected) - } + assert.Equal(t, tt.expected, result, "columnType.string() returned unexpected value") }) } }