diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e4120d4..6342d6a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -70,22 +70,24 @@ jobs:
set -e
mkdir -p build
- # Compute short commit hash
+ # Compute version info
GIT_COMMIT=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")
VERSION="dev-${GIT_COMMIT}"
+ BUILD_TIME=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
+ MODULE="github.com/fbz-tec/pgxport"
+
+ # ldflags for internal/version package
+ LDFLAGS="-X ${MODULE}/internal/version.AppVersion=${VERSION} \
+ -X ${MODULE}/internal/version.GitCommit=${GIT_COMMIT} \
+ -X ${MODULE}/internal/version.BuildTime=${BUILD_TIME}"
+
+ echo "Building with version: ${VERSION}"
# Build Linux binary
- CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags="-X main.Version=${VERSION} \
- -X main.GitCommit=${GIT_COMMIT} \
- -X main.BuildTime=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" \
- -o build/pgxport
-
+ CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags="${LDFLAGS}" -o build/pgxport
# Build Windows binary
- GOOS=windows GOARCH=amd64 go build -ldflags="-X main.Version=${VERSION} \
- -X main.GitCommit=${GIT_COMMIT} \
- -X main.BuildTime=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" \
- -o build/pgxport.exe
+ GOOS=windows GOARCH=amd64 go build -ldflags="${LDFLAGS}" -o build/pgxport.exe
- name: ๐ค Upload Linux artifact
if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')
@@ -104,7 +106,7 @@ jobs:
retention-days: 3
- name: ๐งน Cleanup build directory
- if: always() # Ensures cleanup runs even if previous step fails
+ if: always()
run: |
echo "๐งน Cleaning up build artifacts..."
rm -rf build
@@ -133,7 +135,12 @@ jobs:
VERSION="${{ github.ref_name }}"
BUILD_TIME=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
GIT_COMMIT=$(git rev-parse --short HEAD)
- LDFLAGS="-s -w -X main.Version=${VERSION} -X main.BuildTime=${BUILD_TIME} -X main.GitCommit=${GIT_COMMIT}"
+ MODULE="github.com/fbz-tec/pgxport"
+
+ LDFLAGS="-s -w \
+ -X ${MODULE}/internal/version.AppVersion=${VERSION} \
+ -X ${MODULE}/internal/version.BuildTime=${BUILD_TIME} \
+ -X ${MODULE}/internal/version.GitCommit=${GIT_COMMIT}"
echo "Building binaries for version ${VERSION}..."
@@ -202,9 +209,10 @@ jobs:
- name: ๐งฎ Generate checksums
run: |
cd dist
- for file in *; do
- sha256sum "$file" > "$file.sha256"
- done
+ sha256sum *.tar.gz *.zip > checksums.txt
+ echo ""
+ echo "โ
Checksums generated:"
+ cat checksums.txt
- name: ๐ Create GitHub Release
uses: softprops/action-gh-release@v2
@@ -240,4 +248,4 @@ jobs:
dist/*.zip
dist/checksums.txt
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 48b3b2a..a02b8ec 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -21,8 +21,6 @@ This is the first pre-release of pgxport.
- Custom date/time formats and timezone support
- SQL export with schema-qualified table names
- Batch INSERT statements for SQL exports (`--insert-batch`) for improved import performance
-- Comprehensive test coverage
-- CI/CD pipeline with automated builds
#### Installation
@@ -30,7 +28,8 @@ This is the first pre-release of pgxport.
go install github.com/fbz-tec/pgxport@v1.0.0
```
-Or download pre-built binaries from [GitHub Releases](https://github.com/fbz-tec/pgxport/releases/tag/untagged-3731b225ccbb85fa3000).
+Or download pre-built binaries from [GitHub Releases](https://github.com/fbz-tec/pgxport/releases/tag/v1.0.0-rc1).
+
---
diff --git a/README.md b/README.md
index f6070b7..2aacd41 100644
--- a/README.md
+++ b/README.md
@@ -153,8 +153,8 @@ pgxport [command] [flags]
| `--delimiter` | `-d` | CSV delimiter character | `,` | No |
| `--no-header` | - | Skip CSV header row in output | `false` | No |
| `--with-copy` | - | Use PostgreSQL native COPY for CSV export (faster for large datasets) | `false` | No |
-| `--xml-root-tag` | - | Sets the root XML element name | `results` | No |
-| `--xml-row-tag` | - | Sets the row XML element name | `row` | No |
+| `--xml-root-tag` | - | Sets the root element name for XML exports | `results` | No |
+| `--xml-row-tag` | - | Sets the row element name for XML exports | `row` | No |
| `--fail-on-empty` | - | Exit with error if query returns 0 rows | `false` | No |
| `--table` | `-t` | Table name for SQL INSERT exports (supports schema.table) | - | For SQL format |
| `--insert-batch` | - | Number of rows per INSERT statement for SQL exports | `1` | No |
@@ -626,69 +626,128 @@ INSERT INTO "users" ("id", "name", "email", "created_at") VALUES
```
pgxport/
-โโโ exporters/ # Modular export package
-โ โโโ exporter.go # Interface and factory
-โ โโโ compression.go # Compression writers (gzip,zip)
-โ โโโ common.go # Shared utilities
-โ โโโ csv_exporter.go # CSV export implementation
-โ โโโ json_exporter.go# JSON export implementation
-โ โโโ xml_exporter.go # XML export implementation
-โ โโโ sql_exporter.go # SQL export implementation
-โโโ logger/ # Logging package
-โ โโโ logger.go # Logger interface and implementation
-โโโ main.go # CLI entry point and orchestration
-โโโ config.go # Configuration management with validation
-โโโ store.go # Database operations (connection, queries)
-โโโ version.go # Version information
-โโโ go.mod # Go module definition
-โโโ go.sum # Go module checksums
-โโโ LICENSE # MIT license file
-โโโ README.md # Documentation
+โโโ cmd/ # CLI entry points
+โ โโโ root.go # Main command + flags
+โ โโโ root_test.go
+โ โโโ version.go # Version subcommand
+โ
+โโโ core/ # Business logic
+โ โโโ exporter/ # Export formats (pluggable)
+โ โ โโโ registry.go # Format registration system
+โ โ โโโ formatting.go # Shared formatting utilities
+โ โ โโโ compression.go # Compression support (gzip/zip)
+โ โ โโโ options.go # Export options struct
+โ โ โโโ testing_helpers.go
+โ โ โโโ csv_exporter.go # CSV export implementation
+โ โ โโโ json_exporter.go # JSON export implementation
+โ โ โโโ xml_exporter.go # XML export implementation
+โ โ โโโ sql_exporter.go # SQL export implementation
+โ โ
+โ โโโ db/ # Database operations
+โ โ โโโ connection.go # PostgreSQL connection management
+โ โ โโโ connection_test.go
+โ โ
+โ โโโ config/ # Configuration management
+โ โ โโโ config.go # Config loading with validation
+โ โ โโโ config_test.go
+โ โ
+โ โโโ validation/ # Input validation
+โ โโโ query_safety.go # Query and parameter validation
+โ โโโ query_safety_test.go
+โ
+โโโ internal/ # Private packages
+โ โโโ logger/ # Logging utilities
+โ โ โโโ logger.go # Structured logging with verbose mode
+โ โโโ version/ # Build information
+โ โโโ version.go # Version, BuildTime, GitCommit
+โ
+โโโ main.go # Application entry point
+โโโ go.mod # Go module definition
+โโโ go.sum # Go module checksums
+โโโ Taskfile.yml # Build automation
+โโโ LICENSE # MIT license
+โโโ README.md # This file
```
## ๐งฉ Architecture
-The project follows a clean, modular architecture with separated concerns:
+The project follows a clean, layered architecture with clear separation of concerns:
```mermaid
flowchart TD
- A[CLI - Cobra] --> B[main.go
Orchestration]
- B --> C[config.go
Configuration]
- B --> D[store.go
DB Operations]
- B --> E[exporters/
Export Logic]
+ A[CLI - Cobra] --> B[cmd/root.go
Command Handler]
+ B --> C[core/config
Configuration]
+ B --> D[core/db
DB Connection]
+ B --> E[core/exporter
Export Logic]
- E --> E1[CSV Exporter]
- E --> E2[JSON Exporter]
- E --> E3[XML Exporter]
- E --> E4[SQL Exporter]
+ E --> E0[registry.go
Format Registry]
+ E0 --> E1[CSV Exporter]
+ E0 --> E2[JSON Exporter]
+ E0 --> E3[XML Exporter]
+ E0 --> E4[SQL Exporter]
- E --> F[compression.go
gzip/zip]
- E --> G[common.go
Shared Utils]
+ E --> F[formatting.go
Shared Utils]
+ E --> G[compression.go
gzip/zip]
- B --> H[logger/
Logging]
+ B --> H[internal/logger
Logging]
+ B --> I[internal/version
Build Info]
+
+ D --> J[core/validation
Query Safety]
style B fill:#e1f5ff
style E fill:#ffe1f5
style D fill:#f5ffe1
+ style C fill:#fff4e1
```
+**Architecture Principles:**
+
+- **Layered Structure**: Clear separation between CLI, business logic, and utilities
+- **Pluggable Exporters**: Registry pattern allows easy addition of new formats
+- **SOLID Principles**: Each package has a single, well-defined responsibility
+- **Testability**: Modular design facilitates comprehensive testing
+
**Component Descriptions:**
-- **`exporters/`**: Modular export package with Strategy pattern
- - **`exporter.go`**: Defines the `Exporter` interface and factory
- - **`compression.go`**: Handles output compression (gzip, zip)
- - **`common.go`**: Shared formatting utilities for all exporters
- - **`csv_exporter.go`**: CSV export implementation
- - **`json_exporter.go`**: JSON export implementation
- - **`xml_exporter.go`**: XML export implementation
- - **`sql_exporter.go`**: SQL INSERT export implementation
-- **`logger/`**: Logging package with structured output
- - **`logger.go`**: Logger interface and singleton implementation with debug/verbose support
-- **`store.go`**: Handles all database operations (connect, query, return results)
-- **`main.go`**: Orchestrates the flow between store and exporters
-- **`config.go`**: Manages configuration with validation, defaults, and `.env` file loading
-
-Each exporter is isolated in its own file, making the codebase easy to maintain, test, and extend with new formats.
+### CLI Layer (`cmd/`)
+- **`root.go`**: Main command orchestration with Cobra framework
+- **`version.go`**: Version information subcommand
+
+### Core Business Logic (`core/`)
+
+**`exporter/`** - Export format implementations
+- **`registry.go`**: Dynamic format registration using factory pattern
+- **`formatting.go`**: Shared formatting utilities (dates, escaping, etc.)
+- **`compression.go`**: Output compression (gzip, zip)
+- **`options.go`**: Export configuration options
+- **`csv_exporter.go`**: CSV format with COPY mode support
+- **`json_exporter.go`**: JSON array format
+- **`xml_exporter.go`**: XML format with customizable tags
+- **`sql_exporter.go`**: SQL INSERT statements with batch support
+
+**`db/`** - PostgreSQL operations
+- **`connection.go`**: Database connection management and query execution
+
+**`config/`** - Application configuration
+- **`config.go`**: Configuration loading with `.env` support and validation
+
+**`validation/`** - Input validation
+- **`query_safety.go`**: Query and parameter validation
+
+### Internal Utilities (`internal/`)
+
+**`logger/`** - Structured logging
+- **`logger.go`**: Logger implementation with verbose mode support
+
+**`version/`** - Build metadata
+- **`version.go`**: Version information set via ldflags during build
+
+### Key Design Patterns
+
+1. **Registry Pattern**: Exporters self-register at init time, enabling dynamic format support
+2. **Factory Pattern**: Each export creates a fresh instance, avoiding state sharing
+3. **Strategy Pattern**: Exporters implement a common interface for interchangeable behavior
+4. **Dependency Injection**: Components receive dependencies rather than creating them
## ๐ ๏ธ Development
@@ -713,9 +772,13 @@ The project uses the following main dependencies:
```bash
go mod download
-go mod tidy
```
+The project structure follows clean architecture principles:
+- `cmd/` - CLI commands and flags
+- `core/` - Business logic (exporter, database, config, validation)
+- `internal/` - Private utilities (logger, version)
+
**3. Configure your database**
Create a `.env` file:
@@ -744,7 +807,17 @@ go build -o pgxport
go build -o pgxport
# Build with version information
-go build -ldflags="-X main.Version=1.0.0" -o pgxport
+VERSION=$(git describe --tags --always --dirty 2>/dev/null || echo "dev")
+BUILD_TIME=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
+GIT_COMMIT=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")
+
+go build -ldflags="-X github.com/fbz-tec/pgxport/internal/version.AppVersion=${VERSION} \
+ -X github.com/fbz-tec/pgxport/internal/version.BuildTime=${BUILD_TIME} \
+ -X github.com/fbz-tec/pgxport/internal/version.GitCommit=${GIT_COMMIT}" \
+ -o pgxport
+
+# Using Taskfile (recommended)
+task build
# Cross-platform builds
GOOS=linux GOARCH=amd64 go build -o pgxport-linux
@@ -844,9 +917,13 @@ Contributions are welcome! Please feel free to submit a Pull Request.
- Follow Go conventions and use `gofmt`
- Add comments for exported functions
-- Keep functions small and focused
-- Separate concerns (database vs export logic)
-- Write tests for new features
+- Keep functions small and focused (single responsibility principle)
+- Follow the layered architecture:
+ - `cmd/` - CLI logic only
+ - `core/` - Business logic
+ - `internal/` - Reusable utilities
+- New export formats should implement the `Exporter` interface and register via `registry.go`
+- Write tests for new features (`*_test.go` files alongside source)
## ๐ License
diff --git a/Taskfile.yml b/Taskfile.yml
index 591d4d1..225acdb 100644
--- a/Taskfile.yml
+++ b/Taskfile.yml
@@ -8,7 +8,7 @@ vars:
sh: date -u '+%Y-%m-%d_%H:%M:%S'
GIT_COMMIT:
sh: git rev-parse --short HEAD 2>/dev/null || echo "unknown"
- LDFLAGS: -ldflags "-X main.Version={{.VERSION}} -X main.BuildTime={{.BUILD_TIME}} -X main.GitCommit={{.GIT_COMMIT}}"
+ LDFLAGS: -ldflags "-X github.com/fbz-tec/pgxport/internal/version.AppVersion={{.VERSION}} -X github.com/fbz-tec/pgxport/internal/version.BuildTime={{.BUILD_TIME}} -X github.com/fbz-tec/pgxport/internal/version.GitCommit={{.GIT_COMMIT}}"
tasks:
default:
diff --git a/cmd/root.go b/cmd/root.go
new file mode 100644
index 0000000..6c122b2
--- /dev/null
+++ b/cmd/root.go
@@ -0,0 +1,337 @@
+package cmd
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/fbz-tec/pgxport/core/config"
+ "github.com/fbz-tec/pgxport/core/db"
+ "github.com/fbz-tec/pgxport/core/exporters"
+ "github.com/fbz-tec/pgxport/core/validation"
+ "github.com/fbz-tec/pgxport/internal/logger"
+ "github.com/fbz-tec/pgxport/internal/version"
+ "github.com/jackc/pgx/v5"
+ "github.com/spf13/cobra"
+)
+
+var (
+ sqlQuery string
+ sqlFile string
+ outputPath string
+ format string
+ delimiter string
+ connString string
+ tableName string
+ compression string
+ timeFormat string
+ timeZone string
+ xmlRootElement string
+ xmlRowElement string
+ withCopy bool
+ failOnEmpty bool
+ noHeader bool
+ verbose bool
+ rowPerStatement int
+)
+
+var rootCmd = &cobra.Command{
+ Use: "pgxport",
+ Short: "Export PostgreSQL query results to CSV, JSON, XML, or SQL formats",
+ Long: `A powerful CLI tool to export PostgreSQL query results.
+It supports direct SQL queries or SQL files, with customizable output options.
+
+Supported output formats:
+ โข CSV โ standard text export with customizable delimiter
+ โข JSON โ structured export for API or data processing
+ โข XML โ hierarchical export for interoperability
+ โข SQL โ generate INSERT statements`,
+ Example: ` # Export with inline query
+ pgxport -s "SELECT * FROM users" -o users.csv
+
+ # Export from SQL file with custom delimiter
+ pgxport -F query.sql -o output.csv -d ";"
+
+ # Use the high-performance COPY mode for large CSV exports
+ pgxport -s "SELECT * FROM events" -o events.csv -f csv --with-copy
+
+ # Export to JSON
+ pgxport -s "SELECT * FROM products" -o products.json -f json
+
+ # Export to XML
+ pgxport -s "SELECT * FROM orders" -o orders.xml -f xml
+
+ # Export to SQL insert statements
+ pgxport -s "SELECT * FROM orders" -o orders.sql -f sql -t orders_table`,
+ RunE: runExport,
+ SilenceUsage: true,
+ SilenceErrors: true,
+}
+
+func init() {
+ rootCmd.Flags().StringVarP(&sqlQuery, "sql", "s", "", "SQL query to execute")
+ rootCmd.Flags().StringVarP(&sqlFile, "sqlfile", "F", "", "Path to SQL file containing the query")
+ rootCmd.Flags().StringVarP(&outputPath, "output", "o", "", "Output file path (required)")
+ rootCmd.Flags().StringVarP(&format, "format", "f", "csv", "Output format (csv, json, xml, sql)")
+ rootCmd.Flags().StringVarP(&timeFormat, "time-format", "T", "yyyy-MM-dd HH:mm:ss", "Custom time format (e.g. yyyy-MM-ddTHH:mm:ss.SSS)")
+ rootCmd.Flags().StringVarP(&timeZone, "time-zone", "Z", "", "Time zone for date/time formatting (e.g. UTC, Europe/Paris). Defaults to local time zone.")
+ rootCmd.Flags().StringVarP(&delimiter, "delimiter", "d", ",", "CSV delimiter character")
+ rootCmd.Flags().StringVarP(&connString, "dsn", "", "", "Database connection string (postgres://user:pass@host:port/dbname)")
+ rootCmd.Flags().StringVarP(&tableName, "table", "t", "", "Table name for SQL insert exports")
+ rootCmd.Flags().StringVarP(&compression, "compression", "z", "none", "Compression to apply to the output file (none, gzip, zip)")
+ rootCmd.Flags().BoolVar(&withCopy, "with-copy", false, "Use PostgreSQL native COPY for CSV export (faster for large datasets)")
+ rootCmd.Flags().BoolVar(&failOnEmpty, "fail-on-empty", false, "Exit with error if query returns 0 rows")
+ rootCmd.Flags().BoolVar(&noHeader, "no-header", false, "Skip header row in CSV output")
+ rootCmd.Flags().StringVarP(&xmlRootElement, "xml-root-tag", "", "results", "Sets the root element name for XML exports")
+ rootCmd.Flags().StringVarP(&xmlRowElement, "xml-row-tag", "", "row", "Sets the row element name for XML exports")
+ rootCmd.Flags().IntVarP(&rowPerStatement, "insert-batch", "", 1, "Number of rows per INSERT statement in SQL export")
+ rootCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "Enable verbose output with detailed information")
+
+ rootCmd.MarkFlagRequired("output")
+
+ rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
+ logger.SetVerbose(verbose)
+ if verbose {
+ logger.Debug("Verbose mode enabled")
+ }
+ }
+
+ rootCmd.AddCommand(versionCmd)
+
+}
+
+func Execute() {
+ if err := rootCmd.Execute(); err != nil {
+ fmt.Fprintf(os.Stderr, "Error: %v\n", err)
+ os.Exit(1)
+ }
+}
+
+func runExport(cmd *cobra.Command, args []string) error {
+
+ logger.Debug("Initializing pgxport execution environment")
+ logger.Debug("Version: %s, Build: %s, Commit: %s", version.AppVersion, version.BuildTime, version.GitCommit)
+
+ logger.Debug("Validating export parameters")
+
+ if err := validateExportParams(); err != nil {
+ return err
+ }
+
+ logger.Debug("Export parameters validated successfully")
+
+ var dbUrl string
+ if connString != "" {
+ logger.Debug("Using connection string from --dsn flag")
+ dbUrl = connString
+ } else {
+ logger.Debug("Loading configuration from environment")
+ cfg := config.LoadConfig()
+ if err := cfg.Validate(); err != nil {
+ return fmt.Errorf("configuration error: %w", err)
+ }
+ dbUrl = cfg.GetConnectionString()
+ logger.Debug("Configuration loaded: host=%s port=%s database=%s user=%s",
+ cfg.DBHost, cfg.DBPort, cfg.DBName, cfg.DBUser)
+ }
+
+ var query string
+ var err error
+ var rowCount int
+ var rows pgx.Rows
+ var exporter exporters.Exporter
+
+ if sqlFile != "" {
+ logger.Debug("Reading SQL from file: %s", sqlFile)
+ query, err = readSQLFromFile(sqlFile)
+ if err != nil {
+ return fmt.Errorf("error reading SQL file: %w", err)
+ }
+ logger.Debug("SQL query loaded from file (%d characters)", len(query))
+ } else {
+ query = sqlQuery
+ logger.Debug("Using inline SQL query (%d characters)", len(query))
+ }
+
+ if err := validation.ValidateQuery(query); err != nil {
+ return err
+ }
+
+ format = strings.ToLower(strings.TrimSpace(format))
+
+ var delimRune rune = ','
+ if format == "csv" {
+ delimRune, err = parseDelimiter(delimiter)
+ if err != nil {
+ return fmt.Errorf("invalid delimiter: %w", err)
+ }
+ logger.Debug("CSV delimiter: %q", string(delimRune))
+ }
+
+ store := db.NewStore()
+
+ if err := store.Open(dbUrl); err != nil {
+ return fmt.Errorf("failed to connect to database: %w", err)
+ }
+
+ defer store.Close()
+
+ options := exporters.ExportOptions{
+ Format: format,
+ Delimiter: delimRune,
+ TableName: tableName,
+ Compression: compression,
+ TimeFormat: timeFormat,
+ TimeZone: timeZone,
+ NoHeader: noHeader,
+ XmlRootElement: xmlRootElement,
+ XmlRowElement: xmlRowElement,
+ RowPerStatement: rowPerStatement,
+ }
+
+ exporter, err = exporters.GetExporter(format)
+ if err != nil {
+ return err
+ }
+
+ if format == "csv" && withCopy {
+ logger.Debug("Using PostgreSQL COPY mode for fast CSV export")
+
+ if copyExp, ok := exporter.(exporters.CopyCapable); ok {
+ rowCount, err = copyExp.ExportCopy(store.GetConnection(), query, outputPath, options)
+ } else {
+ return fmt.Errorf("format %s does not support COPY mode", format)
+ }
+ } else {
+ logger.Debug("Using standard export mode for format: %s", format)
+ rows, err = store.ExecuteQuery(context.Background(), query)
+ if err != nil {
+ return err
+ }
+ defer rows.Close()
+
+ rowCount, err = exporter.Export(rows, outputPath, options)
+ }
+
+ if err != nil {
+ return fmt.Errorf("export failed: %w", err)
+ }
+
+ return handleExportResult(rowCount, outputPath)
+}
+
+func validateExportParams() error {
+ // Validate SQL query source
+ if sqlQuery == "" && sqlFile == "" {
+ return fmt.Errorf("error: Either --sql or --sqlfile must be provided")
+ }
+
+ if sqlQuery != "" && sqlFile != "" {
+ return fmt.Errorf("error: Cannot use both --sql and --sqlfile at the same time")
+ }
+
+ // Normalize and validate format
+ format = strings.ToLower(strings.TrimSpace(format))
+ validFormats := exporters.ListExporters()
+
+ isValid := false
+ for _, f := range validFormats {
+ if format == f {
+ isValid = true
+ break
+ }
+ }
+
+ if !isValid {
+ return fmt.Errorf("error: Invalid format '%s'. Valid formats are: %s",
+ format, strings.Join(validFormats, ", "))
+ }
+
+ compression = strings.ToLower(strings.TrimSpace(compression))
+ if compression == "" {
+ compression = "none"
+ }
+ validCompressions := []string{"none", "gzip", "zip"}
+ compressionValid := false
+ for _, c := range validCompressions {
+ if compression == c {
+ compressionValid = true
+ break
+ }
+ }
+
+ if !compressionValid {
+ return fmt.Errorf("error: Invalid compression '%s'. Valid options are: %s",
+ compression, strings.Join(validCompressions, ", "))
+ }
+
+ // Validate table name for SQL format
+ if format == "sql" && strings.TrimSpace(tableName) == "" {
+ return fmt.Errorf("error: --table (-t) is required when using SQL format")
+ }
+
+ if format == "sql" && rowPerStatement < 1 {
+ return fmt.Errorf("error: --insert-batch must be at least 1")
+ }
+
+ // Validate time format if provided
+ if timeFormat != "" {
+ if err := exporters.ValidateTimeFormat(timeFormat); err != nil {
+ return fmt.Errorf("error: Invalid time format '%s'. Use format like 'yyyy-MM-dd HH:mm:ss'", timeFormat)
+ }
+ }
+
+ // Validate timezone if provided
+ if timeZone != "" {
+ if err := exporters.ValidateTimeZone(timeZone); err != nil {
+ return fmt.Errorf("error: Invalid timezone '%s'. Use format like 'UTC' or 'Europe/Paris'", timeZone)
+ }
+ }
+
+ return nil
+}
+
+func readSQLFromFile(filepath string) (string, error) {
+ content, err := os.ReadFile(filepath)
+ if err != nil {
+ return "", fmt.Errorf("unable to read file: %w", err)
+ }
+ return string(content), nil
+}
+
+func parseDelimiter(delim string) (rune, error) {
+ delim = strings.TrimSpace(delim)
+
+ if delim == "" {
+ return 0, fmt.Errorf("delimiter cannot be empty")
+ }
+
+ if delim == `\t` {
+ return '\t', nil
+ }
+
+ runes := []rune(delim)
+
+ if len(runes) != 1 {
+ return 0, fmt.Errorf("delimiter must be a single character (use \\t for tab)")
+ }
+
+ return runes[0], nil
+}
+
+func handleExportResult(rowCount int, outputPath string) error {
+ if rowCount == 0 {
+
+ if failOnEmpty {
+ return fmt.Errorf("export failed: query returned 0 rows")
+ }
+
+ logger.Warn("Query returned 0 rows. File created at %s but contains no data rows", outputPath)
+
+ } else {
+ logger.Success("Export completed: %d rows -> %s", rowCount, outputPath)
+ }
+
+ return nil
+}
diff --git a/main_test.go b/cmd/root_test.go
similarity index 99%
rename from main_test.go
rename to cmd/root_test.go
index 320e908..5a0ca5e 100644
--- a/main_test.go
+++ b/cmd/root_test.go
@@ -1,4 +1,4 @@
-package main
+package cmd
import (
"os"
diff --git a/cmd/version.go b/cmd/version.go
new file mode 100644
index 0000000..b0d22c6
--- /dev/null
+++ b/cmd/version.go
@@ -0,0 +1,16 @@
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/fbz-tec/pgxport/internal/version"
+ "github.com/spf13/cobra"
+)
+
+var versionCmd = &cobra.Command{
+ Use: "version",
+ Short: "Print version information",
+ Run: func(cmd *cobra.Command, args []string) {
+ fmt.Println(version.GetInfo())
+ },
+}
diff --git a/config.go b/core/config/config.go
similarity index 99%
rename from config.go
rename to core/config/config.go
index 0ef8f29..bc8a838 100644
--- a/config.go
+++ b/core/config/config.go
@@ -1,4 +1,4 @@
-package main
+package config
import (
"fmt"
diff --git a/config_test.go b/core/config/config_test.go
similarity index 99%
rename from config_test.go
rename to core/config/config_test.go
index 165c5a2..c8684c9 100644
--- a/config_test.go
+++ b/core/config/config_test.go
@@ -1,4 +1,4 @@
-package main
+package config
import (
"os"
diff --git a/store.go b/core/db/connection.go
similarity index 73%
rename from store.go
rename to core/db/connection.go
index 3e8aab6..0a9b863 100644
--- a/store.go
+++ b/core/db/connection.go
@@ -1,13 +1,12 @@
-package main
+package db
import (
"context"
"fmt"
"net/url"
- "strings"
"time"
- "github.com/fbz-tec/pgexport/logger"
+ "github.com/fbz-tec/pgxport/internal/logger"
"github.com/jackc/pgx/v5"
)
@@ -120,38 +119,3 @@ func sanitizeURL(dbUrl string) string {
return fmt.Sprintf("%s://%s%s%s", u.Scheme, userInfo, u.Host, path)
}
-
-// ValidateQuery checks if the query is safe for export (read-only)
-func validateQuery(query string) error {
- // Normalize query to uppercase for checking
- normalized := strings.ToUpper(strings.TrimSpace(query))
-
- // List of forbidden SQL commands
- forbiddenCommands := []string{
- "DELETE",
- "DROP",
- "TRUNCATE",
- "INSERT",
- "UPDATE",
- "ALTER",
- "CREATE",
- "GRANT",
- "REVOKE",
- }
-
- // Check if query starts with forbidden command
- for _, cmd := range forbiddenCommands {
- if strings.HasPrefix(normalized, cmd) {
- return fmt.Errorf("forbidden SQL command detected: %s (read-only mode)", cmd)
- }
- }
-
- // Additional check: detect forbidden keywords anywhere in query
- for _, cmd := range forbiddenCommands {
- if strings.Contains(normalized, cmd+" ") || strings.Contains(normalized, cmd+";") {
- return fmt.Errorf("forbidden SQL command detected in query: %s", cmd)
- }
- }
-
- return nil
-}
diff --git a/store_test.go b/core/db/connection_test.go
similarity index 91%
rename from store_test.go
rename to core/db/connection_test.go
index 84bd910..b75e6ab 100644
--- a/store_test.go
+++ b/core/db/connection_test.go
@@ -1,4 +1,4 @@
-package main
+package db
import (
"context"
@@ -367,49 +367,6 @@ func TestConnectionReuse(t *testing.T) {
}
}
-func TestValidateQuery(t *testing.T) {
- tests := []struct {
- name string
- query string
- wantErr bool
- }{
- {
- name: "valid SELECT",
- query: "SELECT * FROM users",
- wantErr: false,
- },
- {
- name: "forbidden DELETE",
- query: "DELETE FROM users",
- wantErr: true,
- },
- {
- name: "forbidden DROP",
- query: "DROP TABLE users",
- wantErr: true,
- },
- {
- name: "chained DELETE",
- query: "SELECT 1; DELETE FROM users",
- wantErr: true,
- },
- {
- name: "lowercase delete",
- query: "delete from users",
- wantErr: true,
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- err := validateQuery(tt.query)
- if (err != nil) != tt.wantErr {
- t.Errorf("validateQuery() error = %v, wantErr %v", err, tt.wantErr)
- }
- })
- }
-}
-
// Helper function to get test database URL from environment
// Set DB_TEST_URL environment variable to run integration tests
// Example: export DB_TEST_URL="postgres://user:pass@localhost:5432/testdb"
diff --git a/exporters/compression.go b/core/exporters/compression.go
similarity index 98%
rename from exporters/compression.go
rename to core/exporters/compression.go
index 0be1f23..f421d08 100644
--- a/exporters/compression.go
+++ b/core/exporters/compression.go
@@ -10,7 +10,7 @@ import (
"strings"
"time"
- "github.com/fbz-tec/pgexport/logger"
+ "github.com/fbz-tec/pgxport/internal/logger"
)
const (
diff --git a/exporters/compression_test.go b/core/exporters/compression_test.go
similarity index 100%
rename from exporters/compression_test.go
rename to core/exporters/compression_test.go
diff --git a/exporters/csv_exporter.go b/core/exporters/csv_exporter.go
similarity index 90%
rename from exporters/csv_exporter.go
rename to core/exporters/csv_exporter.go
index b61ab19..ce89221 100644
--- a/exporters/csv_exporter.go
+++ b/core/exporters/csv_exporter.go
@@ -8,12 +8,14 @@ import (
"strings"
"time"
- "github.com/fbz-tec/pgexport/logger"
+ "github.com/fbz-tec/pgxport/internal/logger"
"github.com/jackc/pgx/v5"
)
-// exportToCSV writes query results to a CSV file with buffered I/O
-func (e *dataExporter) writeCSV(rows pgx.Rows, csvPath string, options ExportOptions) (int, error) {
+type csvExporter struct{}
+
+// Export writes query results to a CSV file with buffered I/O.
+func (e *csvExporter) Export(rows pgx.Rows, csvPath string, options ExportOptions) (int, error) {
start := time.Now()
logger.Debug("Preparing CSV export (delimiter=%q, noHeader=%v, compression=%s)",
@@ -126,7 +128,7 @@ func (e *dataExporter) writeCSV(rows pgx.Rows, csvPath string, options ExportOpt
return rowCount, nil
}
-func (e *dataExporter) writeCopyCSV(conn *pgx.Conn, query string, csvPath string, options ExportOptions) (int, error) {
+func (e *csvExporter) ExportCopy(conn *pgx.Conn, query string, csvPath string, options ExportOptions) (int, error) {
start := time.Now()
logger.Debug("Starting PostgreSQL COPY export (noHeader=%v, compression=%s)", options.NoHeader, options.Compression)
@@ -151,3 +153,7 @@ func (e *dataExporter) writeCopyCSV(conn *pgx.Conn, query string, csvPath string
return rowCount, nil
}
+
+func init() {
+ MustRegisterExporter(FormatCSV, func() Exporter { return &csvExporter{} })
+}
diff --git a/exporters/csv_exporter_test.go b/core/exporters/csv_exporter_test.go
similarity index 91%
rename from exporters/csv_exporter_test.go
rename to core/exporters/csv_exporter_test.go
index 9e3f1e7..3672b6e 100644
--- a/exporters/csv_exporter_test.go
+++ b/core/exporters/csv_exporter_test.go
@@ -12,7 +12,7 @@ import (
"github.com/jackc/pgx/v5"
)
-func TestWriteCSV(t *testing.T) {
+func TestExportCSV(t *testing.T) {
conn, cleanup := setupTestDB(t)
defer cleanup()
@@ -183,7 +183,10 @@ func TestWriteCSV(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatCSV)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatCSV,
Delimiter: tt.delimiter,
@@ -192,10 +195,10 @@ func TestWriteCSV(t *testing.T) {
TimeZone: "",
}
- _, err = exporter.writeCSV(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if (err != nil) != tt.wantErr {
- t.Errorf("writeCSV() error = %v, wantErr %v", err, tt.wantErr)
+ t.Errorf("Export() error = %v, wantErr %v", err, tt.wantErr)
return
}
@@ -266,7 +269,10 @@ func TestWriteCSVTimeFormatting(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatCSV)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatCSV,
Delimiter: ',',
@@ -275,9 +281,9 @@ func TestWriteCSVTimeFormatting(t *testing.T) {
TimeZone: tt.timeZone,
}
- _, err = exporter.writeCSV(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeCSV() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -315,7 +321,10 @@ func TestWriteCSVDataTypes(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatCSV)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatCSV,
Delimiter: ',',
@@ -324,9 +333,9 @@ func TestWriteCSVDataTypes(t *testing.T) {
TimeZone: "",
}
- rowCount, err := exporter.writeCSV(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeCSV() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 1 {
@@ -432,14 +441,24 @@ func TestWriteCopyCSV(t *testing.T) {
tmpDir := t.TempDir()
outputPath := filepath.Join(tmpDir, "output.csv")
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatCSV)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
+
options := ExportOptions{
Format: FormatCSV,
Delimiter: tt.delimiter,
Compression: "none",
}
- rowCount, err := exporter.writeCopyCSV(conn, tt.query, outputPath, options)
+ copyExp, ok := exporter.(CopyCapable)
+
+ if !ok {
+ t.Fatalf("Copy mode is not supported: %v", err)
+ }
+
+ rowCount, err := copyExp.ExportCopy(conn, tt.query, outputPath, options)
if (err != nil) != tt.wantErr {
t.Errorf("writeCopyCSV() error = %v, wantErr %v", err, tt.wantErr)
@@ -480,7 +499,10 @@ func TestWriteCSVLargeDataset(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatCSV)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatCSV,
Delimiter: ',',
@@ -490,11 +512,11 @@ func TestWriteCSVLargeDataset(t *testing.T) {
}
start := time.Now()
- rowCount, err := exporter.writeCSV(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
duration := time.Since(start)
if err != nil {
- t.Fatalf("writeCSV() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 10000 {
@@ -688,7 +710,10 @@ func TestWriteCSVNoHeader(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatCSV)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatCSV,
Delimiter: ',',
@@ -698,9 +723,9 @@ func TestWriteCSVNoHeader(t *testing.T) {
NoHeader: tt.noHeader,
}
- _, err = exporter.writeCSV(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeCSV() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
tt.checkFunc(t, outputPath, tt.noHeader)
@@ -805,7 +830,11 @@ func TestWriteCopyCSVNoHeader(t *testing.T) {
tmpDir := t.TempDir()
outputPath := filepath.Join(tmpDir, "output.csv")
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatCSV)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
+
options := ExportOptions{
Format: FormatCSV,
Delimiter: ',',
@@ -813,7 +842,14 @@ func TestWriteCopyCSVNoHeader(t *testing.T) {
NoHeader: tt.noHeader,
}
- _, err := exporter.writeCopyCSV(conn, tt.query, outputPath, options)
+ copyExp, ok := exporter.(CopyCapable)
+
+ if !ok {
+ t.Fatalf("Copy mode is not supported by this exporter")
+ }
+
+ _, err = copyExp.ExportCopy(conn, tt.query, outputPath, options)
+
if err != nil {
t.Fatalf("writeCopyCSV() error: %v", err)
}
@@ -823,7 +859,7 @@ func TestWriteCopyCSVNoHeader(t *testing.T) {
}
}
-func BenchmarkWriteCSV(b *testing.B) {
+func BenchmarkExportCSV(b *testing.B) {
testURL := os.Getenv("DB_TEST_URL")
if testURL == "" {
b.Skip("Skipping benchmark: DB_TEST_URL not set")
@@ -837,7 +873,10 @@ func BenchmarkWriteCSV(b *testing.B) {
defer conn.Close(ctx)
tmpDir := b.TempDir()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatCSV)
+ if err != nil {
+ b.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatCSV,
Delimiter: ',',
@@ -855,7 +894,7 @@ func BenchmarkWriteCSV(b *testing.B) {
b.Fatalf("Query failed: %v", err)
}
- _, err = exporter.writeCSV(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
b.Fatalf("writeCSV failed: %v", err)
}
diff --git a/core/exporters/exporter.go b/core/exporters/exporter.go
new file mode 100644
index 0000000..ae39ba8
--- /dev/null
+++ b/core/exporters/exporter.go
@@ -0,0 +1,36 @@
+package exporters
+
+import (
+ "github.com/jackc/pgx/v5"
+)
+
+const (
+ FormatCSV = "csv"
+ FormatJSON = "json"
+ FormatXML = "xml"
+ FormatSQL = "sql"
+)
+
+// ExportOptions holds export configuration
+type ExportOptions struct {
+ Format string
+ Delimiter rune
+ TableName string
+ Compression string
+ TimeFormat string
+ TimeZone string
+ NoHeader bool
+ XmlRootElement string
+ XmlRowElement string
+ RowPerStatement int
+}
+
+// Exporter interface defines export operations
+type Exporter interface {
+ Export(rows pgx.Rows, outputPath string, options ExportOptions) (int, error)
+}
+
+// Optional capability interface for exporters that can use PostgreSQL COPY
+type CopyCapable interface {
+ ExportCopy(conn *pgx.Conn, query string, outputPath string, options ExportOptions) (int, error)
+}
diff --git a/exporters/common.go b/core/exporters/formatting.go
similarity index 100%
rename from exporters/common.go
rename to core/exporters/formatting.go
diff --git a/exporters/common_test.go b/core/exporters/formatting_test.go
similarity index 100%
rename from exporters/common_test.go
rename to core/exporters/formatting_test.go
diff --git a/exporters/json_exporter.go b/core/exporters/json_exporter.go
similarity index 89%
rename from exporters/json_exporter.go
rename to core/exporters/json_exporter.go
index 88ad45c..af1d411 100644
--- a/exporters/json_exporter.go
+++ b/core/exporters/json_exporter.go
@@ -8,12 +8,14 @@ import (
"strings"
"time"
- "github.com/fbz-tec/pgexport/logger"
+ "github.com/fbz-tec/pgxport/internal/logger"
"github.com/jackc/pgx/v5"
)
-// exportToJSON writes query results to a JSON file with buffered I/O
-func (e *dataExporter) writeJSON(rows pgx.Rows, jsonPath string, options ExportOptions) (int, error) {
+type jsonExporter struct{}
+
+// writes query results to a JSON file with buffered I/O
+func (e *jsonExporter) Export(rows pgx.Rows, jsonPath string, options ExportOptions) (int, error) {
start := time.Now()
logger.Debug("Preparing JSON export (indent=2 spaces, compression=%s)", options.Compression)
@@ -109,3 +111,7 @@ func (e *dataExporter) writeJSON(rows pgx.Rows, jsonPath string, options ExportO
return rowCount, nil
}
+
+func init() {
+ MustRegisterExporter(FormatJSON, func() Exporter { return &jsonExporter{} })
+}
diff --git a/exporters/json_exporter_test.go b/core/exporters/json_exporter_test.go
similarity index 90%
rename from exporters/json_exporter_test.go
rename to core/exporters/json_exporter_test.go
index 625463c..adb3a8f 100644
--- a/exporters/json_exporter_test.go
+++ b/core/exporters/json_exporter_test.go
@@ -12,7 +12,7 @@ import (
"github.com/jackc/pgx/v5"
)
-func TestWriteJSON(t *testing.T) {
+func TestExportJSON(t *testing.T) {
conn, cleanup := setupTestDB(t)
defer cleanup()
@@ -176,7 +176,10 @@ func TestWriteJSON(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatJSON)
+ if err != nil {
+ t.Fatalf("Failed to get json exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatJSON,
Compression: tt.compression,
@@ -184,10 +187,10 @@ func TestWriteJSON(t *testing.T) {
TimeZone: "",
}
- _, err = exporter.writeJSON(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if (err != nil) != tt.wantErr {
- t.Errorf("writeJSON() error = %v, wantErr %v", err, tt.wantErr)
+ t.Errorf("Export() error = %v, wantErr %v", err, tt.wantErr)
return
}
@@ -280,7 +283,10 @@ func TestWriteJSONTimeFormatting(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatJSON)
+ if err != nil {
+ t.Fatalf("Failed to get json exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatJSON,
Compression: "none",
@@ -288,9 +294,9 @@ func TestWriteJSONTimeFormatting(t *testing.T) {
TimeZone: tt.timeZone,
}
- _, err = exporter.writeJSON(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeJSON() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -334,7 +340,10 @@ func TestWriteJSONDataTypes(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatJSON)
+ if err != nil {
+ t.Fatalf("Failed to get json exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatJSON,
Compression: "none",
@@ -342,9 +351,9 @@ func TestWriteJSONDataTypes(t *testing.T) {
TimeZone: "",
}
- rowCount, err := exporter.writeJSON(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeJSON() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 1 {
@@ -402,7 +411,10 @@ func TestWriteJSONPrettyPrint(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatJSON)
+ if err != nil {
+ t.Fatalf("Failed to get json exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatJSON,
Compression: "none",
@@ -410,9 +422,9 @@ func TestWriteJSONPrettyPrint(t *testing.T) {
TimeZone: "",
}
- _, err = exporter.writeJSON(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeJSON() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -457,7 +469,10 @@ func TestWriteJSONLargeDataset(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatJSON)
+ if err != nil {
+ t.Fatalf("Failed to get json exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatJSON,
Compression: "none",
@@ -466,11 +481,11 @@ func TestWriteJSONLargeDataset(t *testing.T) {
}
start := time.Now()
- rowCount, err := exporter.writeJSON(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
duration := time.Since(start)
if err != nil {
- t.Fatalf("writeJSON() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 1000 {
@@ -505,7 +520,7 @@ func TestWriteJSONLargeDataset(t *testing.T) {
}
}
-func BenchmarkWriteJSON(b *testing.B) {
+func BenchmarkExportJSON(b *testing.B) {
testURL := os.Getenv("DB_TEST_URL")
if testURL == "" {
b.Skip("Skipping benchmark: DB_TEST_URL not set")
@@ -519,7 +534,10 @@ func BenchmarkWriteJSON(b *testing.B) {
defer conn.Close(ctx)
tmpDir := b.TempDir()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatJSON)
+ if err != nil {
+ b.Fatalf("Failed to get json exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatJSON,
Compression: "none",
@@ -536,7 +554,7 @@ func BenchmarkWriteJSON(b *testing.B) {
b.Fatalf("Query failed: %v", err)
}
- _, err = exporter.writeJSON(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
b.Fatalf("writeJSON failed: %v", err)
}
diff --git a/core/exporters/registry.go b/core/exporters/registry.go
new file mode 100644
index 0000000..c5f85b9
--- /dev/null
+++ b/core/exporters/registry.go
@@ -0,0 +1,44 @@
+package exporters
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+)
+
+type ExporterFactory func() Exporter
+
+var exportersRegistry = map[string]ExporterFactory{}
+
+func RegisterExporter(format string, factory ExporterFactory) error {
+ format = strings.ToLower(strings.TrimSpace(format))
+ if _, exists := exportersRegistry[format]; exists {
+ return fmt.Errorf("exporter: format %q already registered", format)
+ }
+ exportersRegistry[format] = factory
+ return nil
+}
+
+func GetExporter(format string) (Exporter, error) {
+ factory, ok := exportersRegistry[format]
+ if !ok {
+ return nil, fmt.Errorf("unsupported format: %q (available: %s)",
+ format, strings.Join(ListExporters(), ", "))
+ }
+ return factory(), nil
+}
+
+func ListExporters() []string {
+ formats := make([]string, 0, len(exportersRegistry))
+ for name := range exportersRegistry {
+ formats = append(formats, name)
+ }
+ sort.Strings(formats)
+ return formats
+}
+
+func MustRegisterExporter(format string, factory ExporterFactory) {
+ if err := RegisterExporter(format, factory); err != nil {
+ panic(err)
+ }
+}
diff --git a/exporters/sql_exporter.go b/core/exporters/sql_exporter.go
similarity index 91%
rename from exporters/sql_exporter.go
rename to core/exporters/sql_exporter.go
index 715c49c..9fefb8e 100644
--- a/exporters/sql_exporter.go
+++ b/core/exporters/sql_exporter.go
@@ -6,11 +6,13 @@ import (
"strings"
"time"
- "github.com/fbz-tec/pgexport/logger"
+ "github.com/fbz-tec/pgxport/internal/logger"
"github.com/jackc/pgx/v5"
)
-func (e *dataExporter) writeSQL(rows pgx.Rows, sqlPath string, options ExportOptions) (int, error) {
+type sqlExporter struct{}
+
+func (e *sqlExporter) Export(rows pgx.Rows, sqlPath string, options ExportOptions) (int, error) {
start := time.Now()
logger.Debug("Preparing SQL export (table=%s, compression=%s, rows-per-statement=%d)",
@@ -116,3 +118,7 @@ func writeBatchInsert(writer *bufio.Writer, table string, columns []string, rows
_, err := writer.WriteString(stmt.String())
return err
}
+
+func init() {
+ MustRegisterExporter(FormatSQL, func() Exporter { return &sqlExporter{} })
+}
diff --git a/exporters/sql_exporter_test.go b/core/exporters/sql_exporter_test.go
similarity index 92%
rename from exporters/sql_exporter_test.go
rename to core/exporters/sql_exporter_test.go
index e4ca14b..c17c1d8 100644
--- a/exporters/sql_exporter_test.go
+++ b/core/exporters/sql_exporter_test.go
@@ -12,7 +12,7 @@ import (
"github.com/jackc/pgx/v5"
)
-func TestWriteSQL(t *testing.T) {
+func TestExportSQL(t *testing.T) {
conn, cleanup := setupTestDB(t)
defer cleanup()
@@ -218,7 +218,10 @@ func TestWriteSQL(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: tt.tableName,
@@ -226,10 +229,10 @@ func TestWriteSQL(t *testing.T) {
RowPerStatement: 1,
}
- _, err = exporter.writeSQL(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if (err != nil) != tt.wantErr {
- t.Errorf("writeSQL() error = %v, wantErr %v", err, tt.wantErr)
+ t.Errorf("Export() error = %v, wantErr %v", err, tt.wantErr)
return
}
@@ -270,7 +273,10 @@ func TestWriteSQLDataTypes(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: "test_types",
@@ -278,9 +284,9 @@ func TestWriteSQLDataTypes(t *testing.T) {
RowPerStatement: 1,
}
- rowCount, err := exporter.writeSQL(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeSQL() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 1 {
@@ -358,7 +364,10 @@ func TestWriteSQLColumnOrder(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: "test_table",
@@ -366,9 +375,9 @@ func TestWriteSQLColumnOrder(t *testing.T) {
RowPerStatement: 1,
}
- _, err = exporter.writeSQL(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeSQL() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -442,7 +451,10 @@ func TestWriteSQLEscaping(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: "test_escape",
@@ -450,9 +462,9 @@ func TestWriteSQLEscaping(t *testing.T) {
RowPerStatement: 1,
}
- _, err = exporter.writeSQL(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeSQL() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -494,7 +506,10 @@ func TestWriteSQLLargeDataset(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: "large_table",
@@ -503,11 +518,11 @@ func TestWriteSQLLargeDataset(t *testing.T) {
}
start := time.Now()
- rowCount, err := exporter.writeSQL(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
duration := time.Since(start)
if err != nil {
- t.Fatalf("writeSQL() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 1000 {
@@ -553,7 +568,10 @@ func TestWriteSQLStatementFormat(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: "test_table",
@@ -561,9 +579,9 @@ func TestWriteSQLStatementFormat(t *testing.T) {
RowPerStatement: 1,
}
- _, err = exporter.writeSQL(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeSQL() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -622,7 +640,10 @@ func TestWriteSQLBuffering(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: "buffer_test",
@@ -630,9 +651,9 @@ func TestWriteSQLBuffering(t *testing.T) {
RowPerStatement: 1,
}
- rowCount, err := exporter.writeSQL(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeSQL() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 15000 {
@@ -851,7 +872,10 @@ func TestWriteSQLWithBatchInsert(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: tt.tableName,
@@ -859,9 +883,9 @@ func TestWriteSQLWithBatchInsert(t *testing.T) {
RowPerStatement: tt.insertBatch,
}
- rowCount, err := exporter.writeSQL(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeSQL() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != tt.expectedRows {
@@ -909,7 +933,10 @@ func TestWriteSQLBatchInsertLargeDataset(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ t.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: "large_batch_table",
@@ -918,11 +945,11 @@ func TestWriteSQLBatchInsertLargeDataset(t *testing.T) {
}
start := time.Now()
- rowCount, err := exporter.writeSQL(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
duration := time.Since(start)
if err != nil {
- t.Fatalf("writeSQL() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 10000 {
@@ -988,7 +1015,10 @@ func BenchmarkWriteSQLBatchComparison(b *testing.B) {
for _, bm := range benchmarks {
b.Run(bm.name, func(b *testing.B) {
tmpDir := b.TempDir()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ b.Fatalf("Failed to get sql exporter: %v", err)
+ }
query := fmt.Sprintf("SELECT generate_series(1, %d) as id, 'data_' || generate_series(1, %d) as data", bm.rowCount, bm.rowCount)
b.ResetTimer()
@@ -1007,7 +1037,7 @@ func BenchmarkWriteSQLBatchComparison(b *testing.B) {
RowPerStatement: bm.batchSize,
}
- _, err = exporter.writeSQL(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
b.Fatalf("writeSQL failed: %v", err)
}
@@ -1018,7 +1048,7 @@ func BenchmarkWriteSQLBatchComparison(b *testing.B) {
}
}
-func BenchmarkWriteSQL(b *testing.B) {
+func BenchmarkExportSQL(b *testing.B) {
testURL := os.Getenv("DB_TEST_URL")
if testURL == "" {
b.Skip("Skipping benchmark: DB_TEST_URL not set")
@@ -1032,7 +1062,10 @@ func BenchmarkWriteSQL(b *testing.B) {
defer conn.Close(ctx)
tmpDir := b.TempDir()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatSQL)
+ if err != nil {
+ b.Fatalf("Failed to get sql exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatSQL,
TableName: "bench_table",
@@ -1049,7 +1082,7 @@ func BenchmarkWriteSQL(b *testing.B) {
b.Fatalf("Query failed: %v", err)
}
- _, err = exporter.writeSQL(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
b.Fatalf("writeSQL failed: %v", err)
}
diff --git a/exporters/testing_helpers.go b/core/exporters/testing_helpers.go
similarity index 100%
rename from exporters/testing_helpers.go
rename to core/exporters/testing_helpers.go
diff --git a/exporters/xml_exporter.go b/core/exporters/xml_exporter.go
similarity index 92%
rename from exporters/xml_exporter.go
rename to core/exporters/xml_exporter.go
index 6355147..8275bc1 100644
--- a/exporters/xml_exporter.go
+++ b/core/exporters/xml_exporter.go
@@ -7,12 +7,14 @@ import (
"strings"
"time"
- "github.com/fbz-tec/pgexport/logger"
+ "github.com/fbz-tec/pgxport/internal/logger"
"github.com/jackc/pgx/v5"
)
-// exportToXML writes query results to an XML file with buffered I/O
-func (e *dataExporter) writeXML(rows pgx.Rows, xmlPath string, options ExportOptions) (int, error) {
+type xmlExporter struct{}
+
+// writes query results to an XML file with buffered I/O
+func (e *xmlExporter) Export(rows pgx.Rows, xmlPath string, options ExportOptions) (int, error) {
start := time.Now()
logger.Debug("Preparing XML export (indent=2 spaces, compression=%s)", options.Compression)
@@ -131,3 +133,7 @@ func (e *dataExporter) writeXML(rows pgx.Rows, xmlPath string, options ExportOpt
return rowCount, nil
}
+
+func init() {
+ MustRegisterExporter(FormatXML, func() Exporter { return &xmlExporter{} })
+}
diff --git a/exporters/xml_exporter_test.go b/core/exporters/xml_exporter_test.go
similarity index 88%
rename from exporters/xml_exporter_test.go
rename to core/exporters/xml_exporter_test.go
index 033b459..c827ae1 100644
--- a/exporters/xml_exporter_test.go
+++ b/core/exporters/xml_exporter_test.go
@@ -12,7 +12,7 @@ import (
"github.com/jackc/pgx/v5"
)
-func TestWriteXML(t *testing.T) {
+func TestExportXML(t *testing.T) {
conn, cleanup := setupTestDB(t)
defer cleanup()
@@ -180,7 +180,10 @@ func TestWriteXML(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ t.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: tt.compression,
@@ -190,10 +193,10 @@ func TestWriteXML(t *testing.T) {
XmlRowElement: "row",
}
- _, err = exporter.writeXML(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if (err != nil) != tt.wantErr {
- t.Errorf("writeXML() error = %v, wantErr %v", err, tt.wantErr)
+ t.Errorf("Export() error = %v, wantErr %v", err, tt.wantErr)
return
}
@@ -264,7 +267,10 @@ func TestWriteXMLTimeFormatting(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ t.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: "none",
@@ -274,9 +280,9 @@ func TestWriteXMLTimeFormatting(t *testing.T) {
XmlRowElement: "row",
}
- _, err = exporter.writeXML(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeXML() error: %v", err)
+ t.Fatalf("export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -314,7 +320,10 @@ func TestWriteXMLDataTypes(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ t.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: "none",
@@ -324,9 +333,9 @@ func TestWriteXMLDataTypes(t *testing.T) {
XmlRowElement: "row",
}
- rowCount, err := exporter.writeXML(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeXML() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 1 {
@@ -364,7 +373,10 @@ func TestWriteXMLStructure(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ t.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: "none",
@@ -374,9 +386,9 @@ func TestWriteXMLStructure(t *testing.T) {
XmlRowElement: "row",
}
- _, err = exporter.writeXML(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeXML() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -420,7 +432,10 @@ func TestWriteXMLValidXML(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ t.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: "none",
@@ -430,9 +445,9 @@ func TestWriteXMLValidXML(t *testing.T) {
XmlRowElement: "row",
}
- _, err = exporter.writeXML(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeXML() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
// Try to parse the XML to verify it's valid
@@ -478,7 +493,10 @@ func TestWriteXMLCustomTags(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ t.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: "none",
@@ -488,9 +506,9 @@ func TestWriteXMLCustomTags(t *testing.T) {
XmlRowElement: "record",
}
- _, err = exporter.writeXML(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeXML() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
content, err := os.ReadFile(outputPath)
@@ -536,7 +554,10 @@ func TestWriteXMLLargeDataset(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ t.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: "none",
@@ -547,11 +568,11 @@ func TestWriteXMLLargeDataset(t *testing.T) {
}
start := time.Now()
- rowCount, err := exporter.writeXML(rows, outputPath, options)
+ rowCount, err := exporter.Export(rows, outputPath, options)
duration := time.Since(start)
if err != nil {
- t.Fatalf("writeXML() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
if rowCount != 1000 {
@@ -604,7 +625,10 @@ func TestWriteXMLSpecialXMLCharacters(t *testing.T) {
}
defer rows.Close()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ t.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: "none",
@@ -614,9 +638,9 @@ func TestWriteXMLSpecialXMLCharacters(t *testing.T) {
XmlRowElement: "row",
}
- _, err = exporter.writeXML(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
- t.Fatalf("writeXML() error: %v", err)
+ t.Fatalf("Export() error: %v", err)
}
// Verify the file is valid XML by parsing it
@@ -632,7 +656,7 @@ func TestWriteXMLSpecialXMLCharacters(t *testing.T) {
}
}
-func BenchmarkWriteXML(b *testing.B) {
+func BenchmarkExportXML(b *testing.B) {
testURL := os.Getenv("DB_TEST_URL")
if testURL == "" {
b.Skip("Skipping benchmark: DB_TEST_URL not set")
@@ -646,7 +670,10 @@ func BenchmarkWriteXML(b *testing.B) {
defer conn.Close(ctx)
tmpDir := b.TempDir()
- exporter := &dataExporter{}
+ exporter, err := GetExporter(FormatXML)
+ if err != nil {
+ b.Fatalf("Failed to get xml exporter: %v", err)
+ }
options := ExportOptions{
Format: FormatXML,
Compression: "none",
@@ -665,7 +692,7 @@ func BenchmarkWriteXML(b *testing.B) {
b.Fatalf("Query failed: %v", err)
}
- _, err = exporter.writeXML(rows, outputPath, options)
+ _, err = exporter.Export(rows, outputPath, options)
if err != nil {
b.Fatalf("writeXML failed: %v", err)
}
diff --git a/core/validation/query_safety.go b/core/validation/query_safety.go
new file mode 100644
index 0000000..afe2363
--- /dev/null
+++ b/core/validation/query_safety.go
@@ -0,0 +1,41 @@
+package validation
+
+import (
+ "fmt"
+ "strings"
+)
+
+// ValidateQuery checks if the query is safe for export (read-only)
+func ValidateQuery(query string) error {
+ // Normalize query to uppercase for checking
+ normalized := strings.ToUpper(strings.TrimSpace(query))
+
+ // List of forbidden SQL commands
+ forbiddenCommands := []string{
+ "DELETE",
+ "DROP",
+ "TRUNCATE",
+ "INSERT",
+ "UPDATE",
+ "ALTER",
+ "CREATE",
+ "GRANT",
+ "REVOKE",
+ }
+
+ // Check if query starts with forbidden command
+ for _, cmd := range forbiddenCommands {
+ if strings.HasPrefix(normalized, cmd) {
+ return fmt.Errorf("forbidden SQL command detected: %s (read-only mode)", cmd)
+ }
+ }
+
+ // Additional check: detect forbidden keywords anywhere in query
+ for _, cmd := range forbiddenCommands {
+ if strings.Contains(normalized, cmd+" ") || strings.Contains(normalized, cmd+";") {
+ return fmt.Errorf("forbidden SQL command detected in query: %s", cmd)
+ }
+ }
+
+ return nil
+}
diff --git a/core/validation/query_safety_test.go b/core/validation/query_safety_test.go
new file mode 100644
index 0000000..9da57a9
--- /dev/null
+++ b/core/validation/query_safety_test.go
@@ -0,0 +1,48 @@
+package validation
+
+import (
+ "testing"
+)
+
+func TestValidateQuery(t *testing.T) {
+ tests := []struct {
+ name string
+ query string
+ wantErr bool
+ }{
+ {
+ name: "valid SELECT",
+ query: "SELECT * FROM users",
+ wantErr: false,
+ },
+ {
+ name: "forbidden DELETE",
+ query: "DELETE FROM users",
+ wantErr: true,
+ },
+ {
+ name: "forbidden DROP",
+ query: "DROP TABLE users",
+ wantErr: true,
+ },
+ {
+ name: "chained DELETE",
+ query: "SELECT 1; DELETE FROM users",
+ wantErr: true,
+ },
+ {
+ name: "lowercase delete",
+ query: "delete from users",
+ wantErr: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ err := ValidateQuery(tt.query)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("validateQuery() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ })
+ }
+}
diff --git a/exporters/exporter.go b/exporters/exporter.go
deleted file mode 100644
index 0f5e093..0000000
--- a/exporters/exporter.go
+++ /dev/null
@@ -1,99 +0,0 @@
-package exporters
-
-import (
- "fmt"
-
- "github.com/fbz-tec/pgexport/logger"
- "github.com/jackc/pgx/v5"
-)
-
-const (
- FormatCSV = "csv"
- FormatJSON = "json"
- FormatXML = "xml"
- FormatSQL = "sql"
-)
-
-// ExportOptions holds export configuration
-type ExportOptions struct {
- Format string
- Delimiter rune
- TableName string
- Compression string
- TimeFormat string
- TimeZone string
- NoHeader bool
- XmlRootElement string
- XmlRowElement string
- RowPerStatement int
-}
-
-// Exporter interface defines export operations
-type Exporter interface {
- Export(rows pgx.Rows, outputPath string, options ExportOptions) (int, error)
-}
-
-// Optional capability interface for exporters that can use PostgreSQL COPY
-type CopyCapable interface {
- ExportCopy(conn *pgx.Conn, query string, outputPath string, options ExportOptions) (int, error)
-}
-
-// dataExporter implements Exporter & CopyCapable interfaces
-type dataExporter struct{}
-
-// NewExporter creates a new exporter instance
-func NewExporter() Exporter {
- return &dataExporter{}
-}
-
-func NewCopyExporter() CopyCapable {
- return &dataExporter{}
-}
-
-// Export exports query results to the specified format
-func (e *dataExporter) Export(rows pgx.Rows, outputPath string, options ExportOptions) (int, error) {
-
- logger.Debug("Starting export to %s format โ output: %s", options.Format, outputPath)
-
- var rowCount int
- var err error
-
- switch options.Format {
- case FormatCSV:
- rowCount, err = e.writeCSV(rows, outputPath, options)
- case FormatJSON:
- rowCount, err = e.writeJSON(rows, outputPath, options)
- case FormatXML:
- rowCount, err = e.writeXML(rows, outputPath, options)
- case FormatSQL:
- rowCount, err = e.writeSQL(rows, outputPath, options)
- default:
- return 0, fmt.Errorf("unsupported format: %s", options.Format)
- }
-
- if err != nil {
- return rowCount, fmt.Errorf("error exporting to %s: %w", options.Format, err)
- }
-
- return rowCount, nil
-}
-
-func (e *dataExporter) ExportCopy(conn *pgx.Conn, query string, outputPath string, options ExportOptions) (int, error) {
-
- logger.Debug("Starting COPY export for %s format โ output: %s", options.Format, outputPath)
-
- var rowCount int
- var err error
- switch options.Format {
- case FormatCSV:
- rowCount, err = e.writeCopyCSV(conn, query, outputPath, options)
- default:
- return 0, fmt.Errorf("unsupported format: %s", options.Format)
- }
-
- if err != nil {
- return rowCount, fmt.Errorf("error exporting to %s: %w", options.Format, err)
- }
-
- return rowCount, nil
-}
diff --git a/go.mod b/go.mod
index d9898ca..37175c8 100644
--- a/go.mod
+++ b/go.mod
@@ -1,6 +1,6 @@
-module github.com/fbz-tec/pgexport
+module github.com/fbz-tec/pgxport
-go 1.24.9
+go 1.24.10
require (
github.com/jackc/pgx/v5 v5.7.6
diff --git a/logger/logger.go b/internal/logger/logger.go
similarity index 100%
rename from logger/logger.go
rename to internal/logger/logger.go
diff --git a/version.go b/internal/version/version.go
similarity index 68%
rename from version.go
rename to internal/version/version.go
index 926e9d9..165ef69 100644
--- a/version.go
+++ b/internal/version/version.go
@@ -1,30 +1,30 @@
-package main
-
-import (
- "fmt"
- "runtime"
-)
-
-// Version information (set via ldflags during build)
-var (
- Version = "dev"
- BuildTime = "unknown"
- GitCommit = "unknown"
-)
-
-// GetVersionInfo returns formatted version information
-func GetVersionInfo() string {
- return fmt.Sprintf(
- `Version: %s
-Build time: %s
-Git commit: %s
-Go version: %s
-OS/Arch: %s/%s`,
- Version,
- BuildTime,
- GitCommit,
- runtime.Version(),
- runtime.GOOS,
- runtime.GOARCH,
- )
-}
+package version
+
+import (
+ "fmt"
+ "runtime"
+)
+
+// Version information (set via ldflags during build)
+var (
+ AppVersion = "dev"
+ BuildTime = "unknown"
+ GitCommit = "unknown"
+)
+
+// GetVersionInfo returns formatted version information
+func GetInfo() string {
+ return fmt.Sprintf(
+ `Version: %s
+Build time: %s
+Git commit: %s
+Go version: %s
+OS/Arch: %s/%s`,
+ AppVersion,
+ BuildTime,
+ GitCommit,
+ runtime.Version(),
+ runtime.GOOS,
+ runtime.GOARCH,
+ )
+}
diff --git a/main.go b/main.go
index 643f4c0..47e8329 100644
--- a/main.go
+++ b/main.go
@@ -1,331 +1,7 @@
package main
-import (
- "context"
- "fmt"
- "os"
- "strings"
-
- "github.com/fbz-tec/pgexport/exporters"
- "github.com/fbz-tec/pgexport/logger"
- "github.com/jackc/pgx/v5"
- "github.com/spf13/cobra"
-)
-
-var (
- sqlQuery string
- sqlFile string
- outputPath string
- format string
- delimiter string
- connString string
- tableName string
- compression string
- timeFormat string
- timeZone string
- xmlRootElement string
- xmlRowElement string
- withCopy bool
- failOnEmpty bool
- noHeader bool
- verbose bool
- rowPerStatement int
-)
+import "github.com/fbz-tec/pgxport/cmd"
func main() {
-
- var rootCmd = &cobra.Command{
- Use: "pgxport",
- Short: "Export PostgreSQL query results to CSV, JSON, XML, or SQL formats",
- Long: `A powerful CLI tool to export PostgreSQL query results.
-It supports direct SQL queries or SQL files, with customizable output options.
-
-Supported output formats:
- โข CSV โ standard text export with customizable delimiter
- โข JSON โ structured export for API or data processing
- โข XML โ hierarchical export for interoperability
- โข SQL โ generate INSERT statements`,
- Example: ` # Export with inline query
- pgxport -s "SELECT * FROM users" -o users.csv
-
- # Export from SQL file with custom delimiter
- pgxport -F query.sql -o output.csv -d ";"
-
- # Use the high-performance COPY mode for large CSV exports
- pgxport -s "SELECT * FROM events" -o events.csv -f csv --with-copy
-
- # Export to JSON
- pgxport -s "SELECT * FROM products" -o products.json -f json
-
- # Export to XML
- pgxport -s "SELECT * FROM orders" -o orders.xml -f xml
-
- # Export to SQL insert statements
- pgxport -s "SELECT * FROM orders" -o orders.sql -f sql -t orders_table`,
- RunE: runExport,
- SilenceUsage: true,
- SilenceErrors: true,
- }
-
- // Version command
- var versionCmd = &cobra.Command{
- Use: "version",
- Short: "Print version information",
- Run: func(cmd *cobra.Command, args []string) {
- fmt.Println(GetVersionInfo())
- },
- }
-
- rootCmd.Flags().StringVarP(&sqlQuery, "sql", "s", "", "SQL query to execute")
- rootCmd.Flags().StringVarP(&sqlFile, "sqlfile", "F", "", "Path to SQL file containing the query")
- rootCmd.Flags().StringVarP(&outputPath, "output", "o", "", "Output file path (required)")
- rootCmd.Flags().StringVarP(&format, "format", "f", "csv", "Output format (csv, json, xml, sql)")
- rootCmd.Flags().StringVarP(&timeFormat, "time-format", "T", "yyyy-MM-dd HH:mm:ss", "Custom time format (e.g. yyyy-MM-ddTHH:mm:ss.SSS)")
- rootCmd.Flags().StringVarP(&timeZone, "time-zone", "Z", "", "Time zone for date/time formatting (e.g. UTC, Europe/Paris). Defaults to local time zone.")
- rootCmd.Flags().StringVarP(&delimiter, "delimiter", "d", ",", "CSV delimiter character")
- rootCmd.Flags().StringVarP(&connString, "dsn", "", "", "Database connection string (postgres://user:pass@host:port/dbname)")
- rootCmd.Flags().StringVarP(&tableName, "table", "t", "", "Table name for SQL insert exports")
- rootCmd.Flags().StringVarP(&compression, "compression", "z", "none", "Compression to apply to the output file (none, gzip, zip)")
- rootCmd.Flags().BoolVar(&withCopy, "with-copy", false, "Use PostgreSQL native COPY for CSV export (faster for large datasets)")
- rootCmd.Flags().BoolVar(&failOnEmpty, "fail-on-empty", false, "Exit with error if query returns 0 rows")
- rootCmd.Flags().BoolVar(&noHeader, "no-header", false, "Skip header row in CSV output")
- rootCmd.Flags().StringVarP(&xmlRootElement, "xml-root-tag", "", "results", "Sets the root element name for XML exports")
- rootCmd.Flags().StringVarP(&xmlRowElement, "xml-row-tag", "", "row", "Sets the row element name for XML exports")
- rootCmd.Flags().IntVarP(&rowPerStatement, "insert-batch", "", 1, "Number of rows per INSERT statement in SQL export")
- rootCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, "Enable verbose output with detailed information")
-
- rootCmd.MarkFlagRequired("output")
- rootCmd.AddCommand(versionCmd)
-
- // Appliquer le flag verbose avant lโexรฉcution de la commande
- rootCmd.PersistentPreRun = func(cmd *cobra.Command, args []string) {
- logger.SetVerbose(verbose)
- if verbose {
- logger.Debug("Verbose mode enabled")
- }
- }
-
- if err := rootCmd.Execute(); err != nil {
- fmt.Fprintf(os.Stderr, "Error: %v\n", err)
- os.Exit(1)
- }
-
-}
-
-func runExport(cmd *cobra.Command, args []string) error {
-
- logger.Debug("Initializing pgxport execution environment")
- logger.Debug("Version: %s, Build: %s, Commit: %s", Version, BuildTime, GitCommit)
-
- logger.Debug("Validating export parameters")
-
- if err := validateExportParams(); err != nil {
- return err
- }
-
- logger.Debug("Export parameters validated successfully")
-
- var dbUrl string
- if connString != "" {
- logger.Debug("Using connection string from --dsn flag")
- dbUrl = connString
- } else {
- logger.Debug("Loading configuration from environment")
- config := LoadConfig()
- if err := config.Validate(); err != nil {
- return fmt.Errorf("configuration error: %w", err)
- }
- dbUrl = config.GetConnectionString()
- logger.Debug("Configuration loaded: host=%s port=%s database=%s user=%s",
- config.DBHost, config.DBPort, config.DBName, config.DBUser)
- }
-
- var query string
- var err error
- var rowCount int
- var rows pgx.Rows
-
- if sqlFile != "" {
- logger.Debug("Reading SQL from file: %s", sqlFile)
- query, err = readSQLFromFile(sqlFile)
- if err != nil {
- return fmt.Errorf("error reading SQL file: %w", err)
- }
- logger.Debug("SQL query loaded from file (%d characters)", len(query))
- } else {
- query = sqlQuery
- logger.Debug("Using inline SQL query (%d characters)", len(query))
- }
-
- if err := validateQuery(query); err != nil {
- return err
- }
-
- format = strings.ToLower(strings.TrimSpace(format))
-
- var delimRune rune = ','
- if format == "csv" {
- delimRune, err = parseDelimiter(delimiter)
- if err != nil {
- return fmt.Errorf("invalid delimiter: %w", err)
- }
- logger.Debug("CSV delimiter: %q", string(delimRune))
- }
-
- store := NewStore()
-
- if err := store.Open(dbUrl); err != nil {
- return fmt.Errorf("failed to connect to database: %w", err)
- }
-
- defer store.Close()
-
- options := exporters.ExportOptions{
- Format: format,
- Delimiter: delimRune,
- TableName: tableName,
- Compression: compression,
- TimeFormat: timeFormat,
- TimeZone: timeZone,
- NoHeader: noHeader,
- XmlRootElement: xmlRootElement,
- XmlRowElement: xmlRowElement,
- RowPerStatement: rowPerStatement,
- }
-
- if format == "csv" && withCopy {
- logger.Debug("Using PostgreSQL COPY mode for fast CSV export")
- exporter := exporters.NewCopyExporter()
- rowCount, err = exporter.ExportCopy(store.GetConnection(), query, outputPath, options)
- } else {
- logger.Debug("Using standard export mode for format: %s", format)
- rows, err = store.ExecuteQuery(context.Background(), query)
- if err != nil {
- return err
- }
- defer rows.Close()
- exporter := exporters.NewExporter()
- rowCount, err = exporter.Export(rows, outputPath, options)
- }
-
- if err != nil {
- return fmt.Errorf("export failed: %w", err)
- }
-
- return handleExportResult(rowCount, outputPath)
-}
-
-func validateExportParams() error {
- // Validate SQL query source
- if sqlQuery == "" && sqlFile == "" {
- return fmt.Errorf("error: Either --sql or --sqlfile must be provided")
- }
-
- if sqlQuery != "" && sqlFile != "" {
- return fmt.Errorf("error: Cannot use both --sql and --sqlfile at the same time")
- }
-
- // Normalize and validate format
- format = strings.ToLower(strings.TrimSpace(format))
- validFormats := []string{"csv", "json", "xml", "sql"}
-
- isValid := false
- for _, f := range validFormats {
- if format == f {
- isValid = true
- break
- }
- }
-
- if !isValid {
- return fmt.Errorf("error: Invalid format '%s'. Valid formats are: %s",
- format, strings.Join(validFormats, ", "))
- }
-
- compression = strings.ToLower(strings.TrimSpace(compression))
- if compression == "" {
- compression = "none"
- }
- validCompressions := []string{"none", "gzip", "zip"}
- compressionValid := false
- for _, c := range validCompressions {
- if compression == c {
- compressionValid = true
- break
- }
- }
-
- if !compressionValid {
- return fmt.Errorf("error: Invalid compression '%s'. Valid options are: %s",
- compression, strings.Join(validCompressions, ", "))
- }
-
- // Validate table name for SQL format
- if format == "sql" && strings.TrimSpace(tableName) == "" {
- return fmt.Errorf("error: --table (-t) is required when using SQL format")
- }
-
- if format == "sql" && rowPerStatement < 1 {
- return fmt.Errorf("error: --insert-batch must be at least 1")
- }
-
- // Validate time format if provided
- if timeFormat != "" {
- if err := exporters.ValidateTimeFormat(timeFormat); err != nil {
- return fmt.Errorf("error: Invalid time format '%s'. Use format like 'yyyy-MM-dd HH:mm:ss'", timeFormat)
- }
- }
-
- // Validate timezone if provided
- if timeZone != "" {
- if err := exporters.ValidateTimeZone(timeZone); err != nil {
- return fmt.Errorf("error: Invalid timezone '%s'. Use format like 'UTC' or 'Europe/Paris'", timeZone)
- }
- }
-
- return nil
-}
-
-func readSQLFromFile(filepath string) (string, error) {
- content, err := os.ReadFile(filepath)
- if err != nil {
- return "", fmt.Errorf("unable to read file: %w", err)
- }
- return string(content), nil
-}
-
-func parseDelimiter(delim string) (rune, error) {
- delim = strings.TrimSpace(delim)
-
- if delim == "" {
- return 0, fmt.Errorf("delimiter cannot be empty")
- }
-
- if delim == `\t` {
- return '\t', nil
- }
-
- runes := []rune(delim)
-
- if len(runes) != 1 {
- return 0, fmt.Errorf("delimiter must be a single character (use \\t for tab)")
- }
-
- return runes[0], nil
-}
-
-func handleExportResult(rowCount int, outputPath string) error {
- if rowCount == 0 {
-
- if failOnEmpty {
- return fmt.Errorf("export failed: query returned 0 rows")
- }
-
- logger.Warn("Query returned 0 rows. File created at %s but contains no data rows", outputPath)
-
- } else {
- logger.Success("Export completed: %d rows -> %s", rowCount, outputPath)
- }
-
- return nil
+ cmd.Execute()
}