diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..6709e7d --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,62 @@ +name: CI + +on: + push: + branches: ["main", "chore/**", "feature/**", "fix/**"] + pull_request: + +jobs: + tests: + runs-on: ubuntu-latest + env: + PGHOST: localhost + PGPORT: 5432 + PGUSER: postgres + PGDATABASE: postgres + PGPASSWORD: postgres + services: + postgres: + image: postgres:16 + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + ports: + - 5432:5432 + options: >- + --health-cmd "pg_isready -U postgres" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install dependencies + run: | + sudo apt-get update + sudo apt-get install -y shellcheck jq postgresql-client + + - name: Wait for PostgreSQL + run: | + for i in {1..10}; do + if pg_isready -h "$PGHOST" -p "$PGPORT" -U "$PGUSER"; then + exit 0 + fi + sleep 2 + done + echo "PostgreSQL did not become ready in time" >&2 + exit 1 + + - name: ShellCheck automation scripts + run: shellcheck automation/*.sh + + # Todo: enable full test suite when stable + # - name: Fast automation test suite + # run: ./automation/test_pgtools.sh --fast + + # - name: HOT checklist JSON validation + # run: ./automation/run_hot_update_report.sh --format json --database "$PGDATABASE" --stdout + + # - name: HOT checklist text validation + # run: ./automation/run_hot_update_report.sh --format text --database "$PGDATABASE" --stdout diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1a25f4e..d8bc907 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -22,6 +22,9 @@ git checkout -b feature/your-feature-name # Test current scripts in your environment ./automation/test_pgtools.sh --database your_test_db + +# Optional: run the full local pre-commit bundle +./scripts/precommit_checks.sh --database your_test_db ``` ## Types of Contributions @@ -154,6 +157,9 @@ psql -h localhost -p 5432 -U postgres -d postgres -f your_script.sql # Test your specific changes psql -d test_db -f your_new_script.sql + + # Recommended: mirror CI locally + ./scripts/precommit_checks.sh --database test_db ``` 4. **Submit Pull Request** diff --git a/.github/FUNDING.yml b/FUNDING.yml.soon similarity index 100% rename from .github/FUNDING.yml rename to FUNDING.yml.soon diff --git a/README.md b/README.md index 09f4970..c162ba0 100644 --- a/README.md +++ b/README.md @@ -243,6 +243,24 @@ psql -U postgres -d mydb -f backup/backup_validation.sql psql -U postgres -d mydb -f monitoring/connection_pools.sql ``` +### Automation / HOT report verification +```bash +# Quick automation sanity check (connection, syntax, permissions) +./automation/test_pgtools.sh --fast + +# Full automation suite with integration tests +./automation/test_pgtools.sh --full --verbose + +# HOT checklist JSON validation +./automation/run_hot_update_report.sh --format json --database my_database --stdout + +# HOT checklist text validation +./automation/run_hot_update_report.sh --format text --database my_database --stdout + +# Full local pre-commit bundle +./scripts/precommit_checks.sh --database my_database +``` + ## Script Categories - **Monitoring** - Database health, locks, replication, bloating diff --git a/automation/README.md b/automation/README.md index b7f43eb..99f5259 100644 --- a/automation/README.md +++ b/automation/README.md @@ -12,6 +12,8 @@ This directory contains automation scripts for pgtools: - `cleanup_reports.sh` - Report cleanup and log rotation - `export_metrics.sh` - Metrics export for monitoring systems - `test_pgtools.sh` - Testing framework and validation +- `run_hot_update_report.sh` - HOT update checklist (text or JSON, reads connection defaults from pgtools.conf) +- `scripts/precommit_checks.sh` - Local helper mirroring CI sanity checks - `pgtools.conf.example` - Configuration template ## Quick Start @@ -25,3 +27,37 @@ cp automation/pgtools.conf.example automation/pgtools.conf ``` For detailed usage and configuration options, please refer to the complete documentation linked above. + +## Verification commands + +Run these before committing changes to automation scripts or HOT reporting logic: + +```bash +# Quick sanity check (connection, syntax, permissions) +./automation/test_pgtools.sh --fast + +# Full automation suite with integration tests +./automation/test_pgtools.sh --full --verbose + +# Verify HOT JSON workflow +./automation/run_hot_update_report.sh --format json --database my_database --stdout + +# Verify HOT text workflow +./automation/run_hot_update_report.sh --format text --database my_database --stdout + +# Full local bundle (shellcheck + automation + HOT) +./scripts/precommit_checks.sh --database my_database +``` + +## Connection configuration + +Most automation scripts, including `run_hot_update_report.sh`, source `automation/pgtools.conf` for their database settings. + +1. Copy the template: `cp automation/pgtools.conf.example automation/pgtools.conf`. +2. Populate standard libpq variables (PGHOST, PGPORT, PGUSER, PGDATABASE, optional PGPASSWORD or ~/.pgpass). +3. Override as needed: + - Command-line flags have highest priority (`--database analytics`). + - Environment variables (e.g., `PGHOST=staging-db`) override the config. + - Values in `pgtools.conf` act as defaults when nothing else is provided. + +This precedence keeps existing automation jobs stable while still letting ad-hoc runs target alternate servers or databases. diff --git a/automation/cleanup_reports.sh b/automation/cleanup_reports.sh index 9ea1197..e773bf0 100755 --- a/automation/cleanup_reports.sh +++ b/automation/cleanup_reports.sh @@ -31,6 +31,8 @@ COMPRESS_OLD="true" # Load configuration CONFIG_FILE="$SCRIPT_DIR/pgtools.conf" if [[ -f "$CONFIG_FILE" ]]; then + # shellcheck disable=SC1091 + # shellcheck source=pgtools.conf source "$CONFIG_FILE" KEEP_DAYS="${PGTOOLS_KEEP_REPORTS_DAYS:-$KEEP_DAYS}" fi @@ -180,7 +182,8 @@ clean_directory() { while IFS= read -r -d '' file; do ((file_count++)) if command -v stat > /dev/null 2>&1; then - local size=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null || echo "0") + local size + size=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null || echo "0") total_size=$((total_size + size)) fi @@ -258,12 +261,14 @@ clean_cron_logs() { # Keep only last 1000 lines of cron log if [[ "$DRY_RUN" == "true" ]]; then - local current_lines=$(wc -l < "$cron_log") + local current_lines + current_lines=$(wc -l < "$cron_log") if [[ "$current_lines" -gt 1000 ]]; then log "Would truncate cron.log (currently $current_lines lines)" fi else - local temp_log=$(mktemp) + local temp_log + temp_log=$(mktemp) tail -1000 "$cron_log" > "$temp_log" && mv "$temp_log" "$cron_log" if [[ "$VERBOSE" == "true" ]]; then log "Truncated cron.log to last 1000 lines" diff --git a/automation/export_metrics.sh b/automation/export_metrics.sh index 4462392..5eb711a 100755 --- a/automation/export_metrics.sh +++ b/automation/export_metrics.sh @@ -8,7 +8,6 @@ set -euo pipefail SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -PGTOOLS_ROOT="$(dirname "$SCRIPT_DIR")" # Color codes RED='\033[0;31m' @@ -68,6 +67,8 @@ EOF # Load configuration CONFIG_FILE="$SCRIPT_DIR/pgtools.conf" if [[ -f "$CONFIG_FILE" ]]; then + # shellcheck disable=SC1091 + # shellcheck source=pgtools.conf source "$CONFIG_FILE" fi @@ -129,7 +130,8 @@ check_database_connection() { # Collect basic metrics collect_metrics() { - local temp_file=$(mktemp) + local temp_file + temp_file=$(mktemp) # Basic database metrics psql -t -c " @@ -254,7 +256,8 @@ format_json() { # Format metrics for InfluxDB format_influx() { local metrics_file="$1" - local timestamp=$(date +%s)000000000 # nanoseconds + local timestamp + timestamp=$(date +%s)000000000 # nanoseconds while IFS=$'\t' read -r metric value; do if [[ -n "$metric" && -n "$value" ]]; then diff --git a/automation/pgtools_health_check.sh b/automation/pgtools_health_check.sh index f6f2f55..0a6542f 100755 --- a/automation/pgtools_health_check.sh +++ b/automation/pgtools_health_check.sh @@ -152,6 +152,8 @@ mkdir -p "$OUTPUT_DIR" # Load configuration if available if [[ -f "$CONFIG_FILE" ]]; then log "Loading configuration from $CONFIG_FILE" + # shellcheck disable=SC1091 + # shellcheck source=pgtools.conf source "$CONFIG_FILE" else warn "Configuration file not found: $CONFIG_FILE" @@ -211,6 +213,7 @@ TIMESTAMP=$(date '+%Y%m%d_%H%M%S') REPORT_PREFIX="pgtools_health_check_${TIMESTAMP}" # Define monitoring scripts to run +# shellcheck disable=SC2034 # referenced via nameref when selecting script set declare -A ESSENTIAL_SCRIPTS=( ["Connection Analysis"]="monitoring/connection_pools.sql" ["Lock Analysis"]="monitoring/locks.sql" @@ -219,6 +222,7 @@ declare -A ESSENTIAL_SCRIPTS=( ["Backup Validation"]="backup/backup_validation.sql" ) +# shellcheck disable=SC2034 # referenced via nameref when selecting script set declare -A FULL_SCRIPTS=( ["Table Bloating"]="monitoring/bloating.sql" ["Buffer Performance"]="monitoring/buffer_troubleshoot.sql" @@ -240,12 +244,19 @@ run_health_checks() { scripts_to_run="ESSENTIAL_SCRIPTS" else log "Running full health check" - scripts_to_run="FULL_SCRIPTS" - - # Add essential scripts to full run + # shellcheck disable=SC2034 # referenced through nameref + local -A combined_scripts=() + local key + + for key in "${!FULL_SCRIPTS[@]}"; do + # shellcheck disable=SC2034 + combined_scripts["$key"]="${FULL_SCRIPTS[$key]}" + done + # shellcheck disable=SC2034 for key in "${!ESSENTIAL_SCRIPTS[@]}"; do - FULL_SCRIPTS["$key"]="${ESSENTIAL_SCRIPTS[$key]}" + combined_scripts["$key"]="${ESSENTIAL_SCRIPTS[$key]}" done + scripts_to_run="combined_scripts" fi local -n scripts_ref=$scripts_to_run @@ -258,7 +269,8 @@ run_health_checks() { # Create individual output files for script_name in "${!scripts_ref[@]}"; do local script_path="${PGTOOLS_ROOT}/${scripts_ref[$script_name]}" - local output_file="${OUTPUT_DIR}/${REPORT_PREFIX}_$(echo "$script_name" | tr ' ' '_' | tr '[:upper:]' '[:lower:]').txt" + local output_file + output_file="${OUTPUT_DIR}/${REPORT_PREFIX}_$(echo "$script_name" | tr ' ' '_' | tr '[:upper:]' '[:lower:]').txt" if [[ -f "$script_path" ]]; then if run_script "$script_path" "$script_name" "$output_file"; then @@ -318,12 +330,14 @@ EOF # Append individual script outputs for output_file in "${OUTPUT_DIR}/${REPORT_PREFIX}"_*.txt; do if [[ -f "$output_file" ]]; then - echo "--- $(basename "$output_file" .txt | sed 's/^.*_//; s/_/ /g') ---" >> "$report_file" - echo >> "$report_file" - cat "$output_file" >> "$report_file" - echo >> "$report_file" - echo "=============================================================================" >> "$report_file" - echo >> "$report_file" + { + echo "--- $(basename "$output_file" .txt | sed 's/^.*_//; s/_/ /g') ---" + echo + cat "$output_file" + echo + echo "=============================================================================" + echo + } >> "$report_file" fi done } @@ -360,11 +374,12 @@ EOF # Process individual script outputs for output_file in "${OUTPUT_DIR}/${REPORT_PREFIX}"_*.txt; do if [[ -f "$output_file" ]]; then - local section_name=$(basename "$output_file" .txt | sed 's/^.*_//; s/_/ /g') + local section_name + section_name=$(basename "$output_file" .txt | sed 's/^.*_//; s/_/ /g') cat >> "$report_file" << EOF
"
- cat "$temp_output" | sed 's/&/\&/g; s/\</g; s/>/\>/g'
+ sed 's/&/\&/g; s/\</g; s/>/\>/g' "$temp_output"
echo ""
echo "