diff --git a/.gitattributes b/.gitattributes index 94e0086..5aeb6cb 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ +SECURITY.md linguist-generated=true pk_generated_parent.pom linguist-generated=true dependencies.md linguist-generated=true doc/changes/changelog.md linguist-generated=true @@ -6,8 +7,6 @@ doc/changes/changelog.md linguist-genera .github/workflows/ci-build-next-java.yml linguist-generated=true .github/workflows/dependencies_check.yml linguist-generated=true .github/workflows/dependencies_update.yml linguist-generated=true -.github/workflows/release_droid_print_quick_checksum.yml linguist-generated=true -.github/workflows/release_droid_upload_github_release_assets.yml linguist-generated=true -.github/workflows/release_droid_prepare_original_checksum.yml linguist-generated=true +.github/workflows/release.yml linguist-generated=true .settings/org.eclipse.jdt.core.prefs linguist-generated=true .settings/org.eclipse.jdt.ui.prefs linguist-generated=true diff --git a/.github/workflows/broken_links_checker.yml b/.github/workflows/broken_links_checker.yml index 0fbcad5..09e4bac 100644 --- a/.github/workflows/broken_links_checker.yml +++ b/.github/workflows/broken_links_checker.yml @@ -1,34 +1,44 @@ -# Generated by Project Keeper -# https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/broken_links_checker.yml +# This file was generated by Project Keeper. name: Broken Links Checker - on: schedule: - - cron: "0 5 * * 0" - push: - branches: - - main - pull_request: - + - { + cron: 0 5 * * 0 + } + workflow_dispatch: null jobs: linkChecker: runs-on: ubuntu-latest - concurrency: - group: ${{ github.workflow }}-${{ github.ref }} + permissions: { + contents: read + } + defaults: + run: { + shell: bash + } + concurrency: { + group: '${{ github.workflow }}-${{ github.ref }}', cancel-in-progress: true + } steps: - - uses: actions/checkout@v4 - - name: Configure broken links checker + - { + id: checkout, + uses: actions/checkout@v4 + } + - id: configure-broken-links-checker + name: Configure broken links checker run: | mkdir -p ./target echo '{"aliveStatusCodes": [429, 200], "ignorePatterns": [' \ - '{"pattern": "^https?://(www|dev).mysql.com/"},' \ - '{"pattern": "^https?://(www.)?opensource.org"}' \ - '{"pattern": "^https?://(www.)?eclipse.org"}' \ - '{"pattern": "^https?://projects.eclipse.org"}' \ - ']}' > ./target/broken_links_checker.json - - uses: gaurav-nelson/github-action-markdown-link-check@v1 - with: - use-quiet-mode: "yes" - use-verbose-mode: "yes" + '{"pattern": "^https?://(www|dev).mysql.com/"},' \ + '{"pattern": "^https?://(www.)?opensource.org"}' \ + '{"pattern": "^https?://(www.)?eclipse.org"}' \ + '{"pattern": "^https?://projects.eclipse.org"}' \ + ']}' > ./target/broken_links_checker.json + - id: run-broken-links-checker + uses: gaurav-nelson/github-action-markdown-link-check@v1 + with: { + use-quiet-mode: yes, + use-verbose-mode: yes, config-file: ./target/broken_links_checker.json + } diff --git a/.github/workflows/ci-build-next-java.yml b/.github/workflows/ci-build-next-java.yml deleted file mode 100644 index e3acdb7..0000000 --- a/.github/workflows/ci-build-next-java.yml +++ /dev/null @@ -1,37 +0,0 @@ -# Generated by Project Keeper -# https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/ci-build-next-java.yml -name: CI Build next Java -on: - push: - branches: - - main - pull_request: - -jobs: - java-17-compatibility: - runs-on: ubuntu-latest - concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - steps: - - name: Checkout the repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Set up JDK 17 - uses: actions/setup-java@v4 - with: - distribution: "temurin" - java-version: 17 - cache: "maven" - - name: Run tests and build with Maven - run: | - mvn --batch-mode --update-snapshots clean package -DtrimStackTrace=false \ - -Djava.version=17 \ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn - - name: Publish Test Report for Java 17 - uses: scacap/action-surefire-report@v1 - if: ${{ always() && github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]' }} - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - fail_if_no_tests: false diff --git a/.github/workflows/ci-build.yml b/.github/workflows/ci-build.yml index 73fcdcb..239f381 100644 --- a/.github/workflows/ci-build.yml +++ b/.github/workflows/ci-build.yml @@ -1,78 +1,232 @@ -# Generated by Project Keeper -# https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/ci-build-db-version-matrix.yml +# This file was generated by Project Keeper. name: CI Build on: push: - branches: - - main + branches: [ + main + ] + pull_request: - + types: [ + opened, + synchronize, + reopened, + ready_for_review + ] + jobs: matrix-build: - runs-on: ubuntu-20.04 - concurrency: - group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.exasol_db_version }} + runs-on: ubuntu-24.04 + defaults: + run: { + shell: bash + } + permissions: { + contents: read + } + concurrency: { + group: '${{ github.workflow }}-${{ github.ref }}-${{ matrix.exasol_db_version }}', cancel-in-progress: true + } strategy: fail-fast: false matrix: - exasol_db_version: ["7.1.25", "8.24.0"] - env: - DEFAULT_EXASOL_DB_VERSION: "7.1.25" + exasol_db_version: [ + 8.34.0, + 8.29.9 + ] + + env: { + DEFAULT_EXASOL_DB_VERSION: 8.34.0 + } steps: - name: Free Disk Space + id: free-disk-space if: ${{ false }} run: | sudo rm -rf /usr/local/lib/android sudo rm -rf /usr/share/dotnet - name: Checkout the repository + id: checkout uses: actions/checkout@v4 - with: + with: { fetch-depth: 0 + } - name: Set up JDKs + id: setup-java uses: actions/setup-java@v4 with: - distribution: "temurin" - java-version: | + distribution: temurin + java-version: |- 11 17 - cache: "maven" + cache: maven - name: Cache SonarCloud packages + id: cache-sonar uses: actions/cache@v4 - with: - path: ~/.sonar/cache - key: ${{ runner.os }}-sonar - restore-keys: ${{ runner.os }}-sonar - - name: Enable testcontainer reuse + with: { + path: ~/.sonar/cache, + key: '${{ runner.os }}-sonar', + restore-keys: '${{ runner.os }}-sonar' + } + - { + name: Enable testcontainer reuse, + id: enable-testcontainer-reuse, run: echo 'testcontainers.reuse.enable=true' > "$HOME/.testcontainers.properties" + } + - { + name: Fix VM Crash in UDFs, + id: fix-vm-crash, + run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 + } - name: Run tests and build with Maven + id: build-pk-verify run: | mvn --batch-mode clean verify \ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ -DtrimStackTrace=false \ -Dcom.exasol.dockerdb.image=${{ matrix.exasol_db_version }} - env: - # Set additional environment variable as in scala projects the scalatest plugin does not forward - # the system property -Dcom.exasol.dockerdb.image to the test's implementation. - EXASOL_DB_VERSION: ${{ matrix.exasol_db_version }} - - name: Publish Test Report for Exasol ${{ matrix.exasol_db_version }} - uses: scacap/action-surefire-report@v1 - if: ${{ always() && github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]' }} - with: - github_token: ${{ secrets.GITHUB_TOKEN }} + env: { + EXASOL_DB_VERSION: '${{ matrix.exasol_db_version }}' + } - name: Sonar analysis + id: sonar-analysis if: ${{ env.SONAR_TOKEN != null && matrix.exasol_db_version == env.DEFAULT_EXASOL_DB_VERSION }} run: | mvn --batch-mode org.sonarsource.scanner.maven:sonar-maven-plugin:sonar \ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ -DtrimStackTrace=false \ -Dsonar.token=$SONAR_TOKEN - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - + env: { + GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}', + SONAR_TOKEN: '${{ secrets.SONAR_TOKEN }}' + } + - name: Verify Release Artifacts + id: verify-release-artifacts + run: "print_message() {\n local -r message=$1\n echo \"$message\"\n echo \"$message\" >> \"$GITHUB_STEP_SUMMARY\"\n}\n\nprint_message \"### Release Artifacts\"\n\nIFS=$'\\n' artifacts_array=($ARTIFACTS)\nmissing_files=()\nfor file in \"${artifacts_array[@]}\";\ndo \n echo \"Checking if file $file exists...\"\n if ! [[ -f \"$file\" ]]; then\n print_message \"* ⚠️ \\`$file\\` does not exist ⚠️\"\n echo \"Content of directory $(dirname \"$file\"):\"\n ls \"$(dirname \"$file\")\"\n missing_files+=(\"$file\")\n else\n print_message \"* \\`$file\\` ✅\" \n fi\ndone\nprint_message \"\"\nnumber_of_missing_files=${#missing_files[@]}\nif [[ $number_of_missing_files -gt 0 ]]; then\n print_message \"⚠️ $number_of_missing_files release artifact(s) missing ⚠️\"\n exit 1\nfi\n" + env: { + ARTIFACTS: '${{ steps.build-pk-verify.outputs.release-artifacts }}' + } + - name: Upload artifacts + id: upload-artifacts + uses: actions/upload-artifact@v4 + with: { + name: 'artifacts-exasol-${{ matrix.exasol_db_version }}', + path: '${{ steps.build-pk-verify.outputs.release-artifacts }}', + retention-days: 5 + } + - name: Configure broken links checker + id: configure-link-check + run: | + mkdir -p ./target + echo '{"aliveStatusCodes": [429, 200], "ignorePatterns": [' \ + '{"pattern": "^https?://(www|dev).mysql.com/"},' \ + '{"pattern": "^https?://(www.)?opensource.org"}' \ + '{"pattern": "^https?://(www.)?eclipse.org"}' \ + '{"pattern": "^https?://projects.eclipse.org"}' \ + ']}' > ./target/broken_links_checker.json + - uses: gaurav-nelson/github-action-markdown-link-check@v1 + id: run-link-check + with: { + use-quiet-mode: yes, + use-verbose-mode: yes, + config-file: ./target/broken_links_checker.json + } + next-java-compatibility: + runs-on: ubuntu-latest + defaults: + run: { + shell: bash + } + permissions: { + contents: read + } + concurrency: { + group: '${{ github.workflow }}-next-java-${{ github.ref }}', + cancel-in-progress: true + } + steps: + - name: Checkout the repository + id: checkout + uses: actions/checkout@v4 + with: { + fetch-depth: 0 + } + - name: Set up JDK 17 + id: setup-java + uses: actions/setup-java@v4 + with: { + distribution: temurin, + java-version: '17', + cache: maven + } + - { + name: Run tests and build with Maven 17, + id: build-next-java, + run: mvn --batch-mode clean package -DtrimStackTrace=false -Djava.version=17 + } build: - needs: matrix-build + needs: [ + matrix-build, + next-java-compatibility + ] runs-on: ubuntu-latest + defaults: + run: { + shell: bash + } + permissions: { + contents: read, + issues: read + } + outputs: { + release-required: '${{ steps.check-release.outputs.release-required }}' + } steps: - - run: echo "Build successful" + - name: Checkout the repository + id: checkout + uses: actions/checkout@v4 + with: { + fetch-depth: 0 + } + - name: Set up JDKs + id: setup-java + uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: |- + 11 + 17 + cache: maven + - name: Check if release is needed + id: check-release + if: ${{ github.ref == 'refs/heads/main' }} + run: | + if mvn --batch-mode com.exasol:project-keeper-maven-plugin:verify-release --projects .; then + echo "### ✅ Release preconditions met, start release" >> "$GITHUB_STEP_SUMMARY" + echo "release-required=true" >> "$GITHUB_OUTPUT" + else + echo "### 🛑 Not all release preconditions met, skipping release" >> "$GITHUB_STEP_SUMMARY" + echo "See log output for details." >> "$GITHUB_STEP_SUMMARY" + echo "release-required=false" >> "$GITHUB_OUTPUT" + fi + env: { + GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}' + } + start_release: + needs: build + if: ${{ github.ref == 'refs/heads/main' && needs.build.outputs.release-required == 'true' }} + concurrency: { + cancel-in-progress: false, + group: release + } + secrets: inherit + permissions: { + contents: write, + actions: read, + issues: read + } + uses: ./.github/workflows/release.yml + with: { + started-from-ci: true + } diff --git a/.github/workflows/dependencies_check.yml b/.github/workflows/dependencies_check.yml index 61b3baf..0832e80 100644 --- a/.github/workflows/dependencies_check.yml +++ b/.github/workflows/dependencies_check.yml @@ -1,62 +1,80 @@ -# Generated by Project Keeper -# https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/dependencies_check.yml -name: Report Security Issues for Repository +# This file was generated by Project Keeper. +name: Report Security Issues on: - workflow_dispatch: + workflow_dispatch: null schedule: - - cron: "0 2 * * *" - + - { + cron: 0 2 * * * + } jobs: report_security_issues: runs-on: ubuntu-latest - permissions: - contents: read + defaults: + run: { + shell: bash + } + permissions: { + contents: read, issues: write - outputs: - created-issues: ${{ steps.security-issues.outputs.created-issues }} + } + outputs: { + created-issues: '${{ steps.security-issues.outputs.created-issues }}' + } + concurrency: { + group: '${{ github.workflow }}-report_security_issues', + cancel-in-progress: true + } steps: - - uses: actions/checkout@v4 - + - { + name: Checkout, + id: checkout, + uses: actions/checkout@v4 + } - name: Set up JDKs + id: setup-jdks uses: actions/setup-java@v4 with: - distribution: "temurin" - java-version: | + distribution: temurin + java-version: |- 11 17 - cache: "maven" - + cache: maven - name: Generate ossindex report + id: ossindex-report run: | mvn --batch-mode org.sonatype.ossindex.maven:ossindex-maven-plugin:audit \ org.sonatype.ossindex.maven:ossindex-maven-plugin:audit-aggregate \ -Dossindex.reportFile=$(pwd)/ossindex-report.json \ -Dossindex.fail=false - - - name: Report Security Issues - id: security-issues - uses: exasol/python-toolbox/.github/actions/security-issues@main - with: - format: "maven" - command: "cat ossindex-report.json" - github-token: ${{ secrets.GITHUB_TOKEN }} - + - name: Create GitHub Issues + id: create-security-issues + uses: exasol/python-toolbox/.github/actions/security-issues@1.1.0 + with: { + format: maven, + command: cat ossindex-report.json, + github-token: '${{ secrets.GITHUB_TOKEN }}' + } - name: Output security issues (Debugging) + id: debug-print-security-issues run: | echo "$CREATED_ISSUES" > test.jsonl cat test.jsonl - env: - CREATED_ISSUES: ${{ steps.security-issues.outputs.created-issues }} - + env: { + CREATED_ISSUES: '${{ steps.security-issues.outputs.created-issues }}' + } start_dependency_udpate: needs: report_security_issues if: ${{ needs.report_security_issues.outputs.created-issues }} - concurrency: - cancel-in-progress: true - group: "dependency_update" - permissions: - contents: write + concurrency: { + group: '${{ github.workflow }}-start_dependency_update', + cancel-in-progress: false + } + secrets: inherit + permissions: { + contents: write, pull-requests: write + } uses: ./.github/workflows/dependencies_update.yml - with: - vulnerability_issues: ${{ needs.report_security_issues.outputs.created-issues }} + with: { + vulnerability_issues: '${{ needs.report_security_issues.outputs.created-issues }}' + } diff --git a/.github/workflows/dependencies_update.yml b/.github/workflows/dependencies_update.yml index d19b11c..c901506 100644 --- a/.github/workflows/dependencies_update.yml +++ b/.github/workflows/dependencies_update.yml @@ -1,103 +1,176 @@ -# Generated by Project Keeper -# https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/dependencies_update.yml +# This file was generated by Project Keeper. name: Update dependencies on: workflow_call: inputs: - vulnerability_issues: - description: "GitHub issues for vulnerable dependencies as JSONL" - required: false + vulnerability_issues: { + description: GitHub issues for vulnerable dependencies as JSONL, + required: true, type: string - workflow_dispatch: - + } + workflow_dispatch: null jobs: update_dependencies: runs-on: ubuntu-latest - permissions: - contents: write + defaults: + run: { + shell: bash + } + permissions: { + contents: write, pull-requests: write - + } + concurrency: { + group: '${{ github.workflow }}', + cancel-in-progress: false + } steps: - uses: actions/checkout@v4 - + id: checkout + with: { + fetch-depth: 0 + } - name: Set up JDKs + id: setup-jdks uses: actions/setup-java@v4 with: - distribution: "temurin" - java-version: | + distribution: temurin + java-version: |- 11 17 - cache: "maven" - + cache: maven - name: Print issues + id: debug-print-issues run: | echo "Issues from Action input: $ISSUES" - env: - ISSUES: ${{ inputs.vulnerability_issues }} - + env: { + ISSUES: '${{ inputs.vulnerability_issues }}' + } - name: Fail if not running on a branch + id: check-branch if: ${{ !startsWith(github.ref, 'refs/heads/') }} uses: actions/github-script@v7 with: script: | core.setFailed('Not running on a branch, github.ref is ${{ github.ref }}. Please start this workflow only on main or a branch') - - name: Update dependencies + id: update-dependencies run: | mvn --batch-mode com.exasol:project-keeper-maven-plugin:update-dependencies --projects . \ -Dproject-keeper:vulnerabilities="$CREATED_ISSUES" - env: - CREATED_ISSUES: ${{ inputs.vulnerability_issues }} - - - name: Project Keeper Fix - run: | - mvn --batch-mode com.exasol:project-keeper-maven-plugin:fix --projects . - - - name: Project Keeper Fix for updated Project Keeper version - # Calling PK fix a second time is necessary because the first invocation potentially updated PK itself. - # So we need to run PK fix again with the latest PK version. - run: | - mvn --batch-mode com.exasol:project-keeper-maven-plugin:fix --projects . - - - name: Generate PR comment + env: { + CREATED_ISSUES: '${{ inputs.vulnerability_issues }}' + } + - name: Generate Pull Request comment id: pr-comment run: | echo 'comment<> "$GITHUB_OUTPUT" - echo 'This Pull Request was created by `dependencies_update.yml` workflow' >> "$GITHUB_OUTPUT" - echo $CREATED_ISSUES | jq --raw-output '. | "Closes " + .issue_url + " (" + .cve + ")"' >> "$GITHUB_OUTPUT" + echo 'This Pull Request was created by [`dependencies_update.yml`](https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/dependencies_update.yml) workflow.' >> "$GITHUB_OUTPUT" + if [ -n "$CREATED_ISSUES" ]; then + echo 'It updates dependencies to fix the following vulnerabilities:' >> "$GITHUB_OUTPUT" + echo $CREATED_ISSUES | jq --raw-output '. | "* Closes " + .issue_url + " (" + .cve + ")"' >> "$GITHUB_OUTPUT" + else + echo 'It updates dependencies.' >> "$GITHUB_OUTPUT" + fi + echo >> "$GITHUB_OUTPUT" + echo '# ⚠️ Notes ⚠️' >> "$GITHUB_OUTPUT" + echo '## Run PK fix manually' >> "$GITHUB_OUTPUT" + echo 'Due to restrictions workflow `dependencies_update.yml` cannot update other workflows, see https://github.com/exasol/project-keeper/issues/578 for details.' >> "$GITHUB_OUTPUT" + echo 'Please checkout this PR locally and run `mvn com.exasol:project-keeper-maven-plugin:fix --projects .`' >> "$GITHUB_OUTPUT" + echo '## This PR does not trigger CI workflows' >> "$GITHUB_OUTPUT" + echo 'Please click the **Close pull request** button and then **Reopen pull request** to trigger running checks.' >> "$GITHUB_OUTPUT" + echo 'See https://github.com/exasol/project-keeper/issues/534 for details.' >> "$GITHUB_OUTPUT" echo 'EOF' >> "$GITHUB_OUTPUT" - env: - CREATED_ISSUES: ${{ inputs.vulnerability_issues }} + cat "$GITHUB_OUTPUT" + env: { + CREATED_ISSUES: '${{ inputs.vulnerability_issues }}' + } + - name: Generate Pull Request Title + id: pr-title + run: | + if [ -n "$CREATED_ISSUES" ]; then + echo "Security issues are available" + echo "title=🔐 Update dependencies to fix vulnerabilities" >> "$GITHUB_OUTPUT" + else + echo "Security issues are not available" + echo "title=Update dependencies" >> "$GITHUB_OUTPUT" + fi + + cat "$GITHUB_OUTPUT" + env: { + CREATED_ISSUES: '${{ inputs.vulnerability_issues }}' + } - name: Configure git + id: configure-git run: | git config --global user.email "opensource@exasol.com" git config --global user.name "Automatic Dependency Updater" - - name: Create branch + id: create-branch if: ${{ github.ref == 'refs/heads/main' }} run: | branch_name="dependency-update/$(date "+%Y%m%d%H%M%S")" echo "Creating branch $branch_name" git checkout -b "$branch_name" - - name: Commit changes & push + id: publish-branch if: ${{ startsWith(github.ref, 'refs/heads/' ) }} run: | branch_name=$(git rev-parse --abbrev-ref HEAD) - echo "Current branch: $branch_name, local changes:" + echo "Current branch: $branch_name" + echo "git diff --stat" git diff --stat + echo "git diff --numstat" git diff --numstat + echo "git diff --name-status" + git diff --name-status + echo "Adding untracked files:" + git add . --verbose --all echo "Committing changes..." - git commit --all --message "Update dependencies" + git commit --message "$TITLE" echo "Pushing branch $branch_name..." - git push --set-upstream origin $branch_name + git push --set-upstream origin "$branch_name" echo "Done." - + env: { + TITLE: '${{ steps.pr-title.outputs.title }}' + } - name: Create pull request + id: create-pr if: ${{ github.ref == 'refs/heads/main' }} run: | - gh pr create --base main --title "Update dependencies" --body "$COMMENT" - env: - COMMENT: ${{ steps.pr-comment.outputs.comment }} - GH_TOKEN: ${{ github.token }} + pr_url=$(gh pr create --base main --title "$TITLE" --body "$COMMENT") + echo "Created Pull Request: $pr_url" + echo "pr_url=$pr_url" >> "$GITHUB_OUTPUT" + env: { + COMMENT: '${{ steps.pr-comment.outputs.comment }}', + TITLE: '${{ steps.pr-title.outputs.title }}', + GH_TOKEN: '${{ github.token }}' + } + - name: Report failure Status to Slack channel + id: report-failure-slack + if: ${{ always() }} + uses: ravsamhq/notify-slack-action@v2 + with: { + status: '${{ job.status }}', + token: '${{ secrets.GITHUB_TOKEN }}', + notification_title: 'Dependency check in {repo} has {status_message}', + message_format: '{emoji} *{workflow}* {status_message} in <{repo_url}|{repo}>', + notify_when: 'failure,cancelled,warnings' + } + env: { + SLACK_WEBHOOK_URL: '${{ secrets.INTEGRATION_TEAM_SLACK_NOTIFICATION_WEBHOOK }}' + } + - name: Report new Pull Request to Slack channel + id: report-pr-slack + if: ${{ steps.create-pr.outputs.pr_url }} + uses: ravsamhq/notify-slack-action@v2 + with: { + status: '${{ job.status }}', + token: '${{ secrets.GITHUB_TOKEN }}', + notification_title: 'Dependency update for {repo} created a Pull Request', + message_format: '{workflow} created Pull Request ${{ steps.create-pr.outputs.pr_url }}' + } + env: { + SLACK_WEBHOOK_URL: '${{ secrets.INTEGRATION_TEAM_SLACK_NOTIFICATION_WEBHOOK }}' + } diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..750fe45 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,219 @@ +# This file was generated by Project Keeper. +name: Release +on: + workflow_call: + inputs: + started-from-ci: { + description: 'Marks this release as started from CI, skipping precondition check', + type: boolean, + required: true, + default: false + } + workflow_dispatch: + inputs: + skip-maven-central: { + description: Skip deployment to Maven Central, + required: true, + type: boolean, + default: false + } + skip-github-release: { + description: Skip creating the GitHub release, + required: true, + type: boolean, + default: false + } +jobs: + release: + runs-on: ubuntu-latest + defaults: + run: { + shell: bash + } + concurrency: { + group: '${{ github.workflow }}', + cancel-in-progress: false + } + permissions: { + contents: write, + actions: read, + issues: read + } + steps: + - name: Checkout the repository + id: checkout + uses: actions/checkout@v4 + with: { + fetch-depth: 0 + } + - name: Set up Maven Central Repository + id: configure-maven-central-credentials + if: ${{ false }} + uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: |- + 11 + 17 + cache: maven + server-id: ossrh + server-username: MAVEN_USERNAME + server-password: MAVEN_PASSWORD + gpg-private-key: ${{ secrets.OSSRH_GPG_SECRET_KEY }} + gpg-passphrase: MAVEN_GPG_PASSPHRASE + - name: Set up JDKs + id: setup-jdks + if: ${{ ! false }} + uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: |- + 11 + 17 + cache: maven + - name: Fail if not running on main branch + id: check-main-branch + if: ${{ github.ref != 'refs/heads/main' }} + uses: actions/github-script@v7 + with: + script: | + core.setFailed('Not running on main branch, github.ref is ${{ github.ref }}. Please start this workflow only on main') + - name: Check CI build of this commit succeeded + id: check-ci-build-status + if: ${{ ! inputs.started-from-ci }} + run: | + echo "Commit SHA: $COMMIT_SHA" + gh run list --workflow ci-build.yml --branch main --event push --commit $COMMIT_SHA + ci_build_status=$(gh run list --workflow ci-build.yml --branch main --event push --commit $COMMIT_SHA --json conclusion --template '{{range .}}{{.conclusion}}{{"\n"}}{{end}}') + echo "CI build status at commit $COMMIT_SHA was '$ci_build_status'" + if [[ "$ci_build_status" != "success" ]]; then + gh run list --workflow ci-build.yml --commit $COMMIT_SHA >> $GITHUB_STEP_SUMMARY + echo "Status of CI build for commit $COMMIT_SHA was '$ci_build_status', expected 'success'" >> $GITHUB_STEP_SUMMARY + cat $GITHUB_STEP_SUMMARY + exit 1 + fi + env: { + COMMIT_SHA: '${{ github.sha }}', + GH_TOKEN: '${{ github.token }}' + } + - name: Verify release preconditions + id: verify-release + run: | + mvn --batch-mode com.exasol:project-keeper-maven-plugin:verify-release --projects . + echo "$GITHUB_OUTPUT" + env: { + GITHUB_TOKEN: '${{ github.token }}' + } + - { + name: Build project, + id: build, + run: mvn --batch-mode -DskipTests clean verify + } + - { + name: List secret GPG keys, + id: list-secret-gpg-keys, + if: '${{ false && (! inputs.skip-maven-central) }}', + run: gpg --list-secret-keys + } + - name: Publish to Central Repository + id: deploy-maven-central + if: ${{ false && (! inputs.skip-maven-central) }} + run: | + echo "#### Maven Central Release" >> "$GITHUB_STEP_SUMMARY" + mvn --batch-mode -Dgpg.skip=false -DskipTests deploy + echo "Published to Maven Central ✅" >> "$GITHUB_STEP_SUMMARY" + env: { + MAVEN_USERNAME: '${{ secrets.OSSRH_USERNAME }}', + MAVEN_PASSWORD: '${{ secrets.OSSRH_PASSWORD }}', + MAVEN_GPG_PASSPHRASE: '${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }}' + } + - name: Calculate Artifact Checksums + id: artifact-checksum + if: ${{ ! inputs.skip-github-release }} + run: | + echo "Calculating sha256 checksum for artifact files" + echo "artifacts<> "$GITHUB_OUTPUT" + IFS=$'\n' artifacts_array=($ARTIFACTS) + for file in "${artifacts_array[@]}"; + do + full_path=$(realpath "$file") + echo "Calculate sha256sum for file '$full_path'" + file_dir="$(dirname "$full_path")" + file_name=$(basename "$full_path") + pushd "$file_dir" + checksum_file_name="${file_name}.sha256" + sha256sum "$file_name" > "$checksum_file_name" + echo "$full_path" >> "$GITHUB_OUTPUT" + echo "${file_dir}/$checksum_file_name" >> "$GITHUB_OUTPUT" + popd + done + echo "EOF" >> "$GITHUB_OUTPUT" + echo "Full artifact file list" + cat "$GITHUB_OUTPUT" + env: { + ARTIFACTS: '${{ steps.verify-release.outputs.release-artifacts }}' + } + - name: Create GitHub Release + id: create-github-release + if: ${{ ! inputs.skip-github-release }} + run: | + echo "### GitHub Release" >> "$GITHUB_STEP_SUMMARY" + IFS=$'\n' artifacts_array=($ARTIFACTS) + echo "#### Attaching Release Artifacts" >> "$GITHUB_STEP_SUMMARY" + for file in "${artifacts_array[@]}"; + do + echo "Attaching artifact '$file'" + echo "* \`$file\`" >> "$GITHUB_STEP_SUMMARY" + done + echo "" >> "$GITHUB_STEP_SUMMARY" + release_url=$(gh release create --latest --title "$TITLE" --notes "$NOTES" --target main $TAG "${artifacts_array[@]}") + echo "Created release $TAG with title '$TITLE' at $release_url ✅" >> "$GITHUB_STEP_SUMMARY" + echo "release-url=$release_url" >> "$GITHUB_OUTPUT" + + # [impl->dsn~release-workflow.create-golang-tags~1] + echo "#### Creating Additional Tags" >> "$GITHUB_STEP_SUMMARY" + IFS=$'\n' tags_array=($ADDITIONAL_TAGS) + for tag in "${tags_array[@]}"; + do + echo "Creating tag '$tag'" + git tag "$tag" + git push origin "$tag" + echo "* \`$tag\`" >> "$GITHUB_STEP_SUMMARY" + done + + git fetch --tags origin + env: { + GH_TOKEN: '${{ github.token }}', + TAG: '${{ steps.verify-release.outputs.release-tag }}', + ADDITIONAL_TAGS: '${{ steps.verify-release.outputs.additional-release-tags }}', + NOTES: '${{ steps.verify-release.outputs.release-notes }}', + TITLE: '${{ steps.verify-release.outputs.release-title }}', + ARTIFACTS: '${{ steps.artifact-checksum.outputs.artifacts }}' + } + - name: Report failure Status to Slack channel + id: report-failure-status-slack + if: ${{ always() }} + uses: ravsamhq/notify-slack-action@v2 + with: { + status: '${{ job.status }}', + token: '${{ github.token }}', + notification_title: 'Release build in {repo} has {status_message}', + message_format: '{emoji} *{workflow}* {status_message} in <{repo_url}|{repo}>', + notify_when: 'failure,cancelled,warnings,skipped' + } + env: { + SLACK_WEBHOOK_URL: '${{ secrets.INTEGRATION_TEAM_SLACK_NOTIFICATION_WEBHOOK }}' + } + - name: Report new release to Slack channel + id: report-new-release-slack + if: ${{ steps.create-github-release.outputs.release-url }} + uses: ravsamhq/notify-slack-action@v2 + with: { + status: '${{ job.status }}', + token: '${{ github.token }}', + notification_title: 'Release build for {repo} created a new release', + message_format: '{workflow} created release ${{ steps.create-github-release.outputs.release-url }}' + } + env: { + SLACK_WEBHOOK_URL: '${{ secrets.INTEGRATION_TEAM_SLACK_NOTIFICATION_WEBHOOK }}' + } diff --git a/.github/workflows/release_droid_prepare_original_checksum.yml b/.github/workflows/release_droid_prepare_original_checksum.yml deleted file mode 100644 index 2ff28b3..0000000 --- a/.github/workflows/release_droid_prepare_original_checksum.yml +++ /dev/null @@ -1,39 +0,0 @@ -# Generated by Project Keeper -# https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/release_droid_prepare_original_checksum.yml -name: Release Droid - Prepare Original Checksum -on: - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-20.04 - steps: - - name: Free Disk Space - if: ${{ false }} - run: | - sudo rm -rf /usr/local/lib/android - sudo rm -rf /usr/share/dotnet - - name: Checkout the repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Set up JDKs - uses: actions/setup-java@v4 - with: - distribution: "temurin" - java-version: | - 11 - 17 - cache: "maven" - - name: Enable testcontainer reuse - run: echo 'testcontainers.reuse.enable=true' > "$HOME/.testcontainers.properties" - - name: Run tests and build with Maven - run: mvn --batch-mode clean verify --file pom.xml - - name: Prepare checksum - run: find target -maxdepth 1 -name *.jar -exec sha256sum "{}" + > original_checksum - - name: Upload checksum to the artifactory - uses: actions/upload-artifact@v4 - with: - name: original_checksum - retention-days: 5 - path: original_checksum diff --git a/.github/workflows/release_droid_print_quick_checksum.yml b/.github/workflows/release_droid_print_quick_checksum.yml deleted file mode 100644 index 86979cd..0000000 --- a/.github/workflows/release_droid_print_quick_checksum.yml +++ /dev/null @@ -1,26 +0,0 @@ -# Generated by Project Keeper -# https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/release_droid_print_quick_checksum.yml -name: Release Droid - Print Quick Checksum -on: - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout the repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Set up JDKs - uses: actions/setup-java@v4 - with: - distribution: "temurin" - java-version: | - 11 - 17 - cache: "maven" - - name: Build with Maven skipping tests - run: mvn --batch-mode clean verify -DskipTests - - name: Print checksum - run: echo 'checksum_start==';find target -maxdepth 1 -name *.jar -exec sha256sum "{}" + | xargs;echo '==checksum_end' diff --git a/.github/workflows/release_droid_upload_github_release_assets.yml b/.github/workflows/release_droid_upload_github_release_assets.yml deleted file mode 100644 index b19f7cf..0000000 --- a/.github/workflows/release_droid_upload_github_release_assets.yml +++ /dev/null @@ -1,47 +0,0 @@ -# Generated by Project Keeper -# https://github.com/exasol/project-keeper/blob/main/project-keeper/src/main/resources/templates/.github/workflows/release_droid_upload_github_release_assets.yml -name: Release Droid - Upload GitHub Release Assets -on: - workflow_dispatch: - inputs: - upload_url: - description: "Assets upload URL" - required: true - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout the repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Set up JDKs - uses: actions/setup-java@v4 - with: - distribution: "temurin" - java-version: | - 11 - 17 - cache: "maven" - - name: Build with Maven skipping tests - run: mvn --batch-mode clean verify -DskipTests - - name: Generate sha256sum files - run: | - cd target - find . -maxdepth 1 -name \*.jar -exec bash -c 'sha256sum {} > {}.sha256' \; - - name: Upload assets to the GitHub release draft - uses: shogo82148/actions-upload-release-asset@v1 - with: - upload_url: ${{ github.event.inputs.upload_url }} - asset_path: target/*.jar - - name: Upload sha256sum files - uses: shogo82148/actions-upload-release-asset@v1 - with: - upload_url: ${{ github.event.inputs.upload_url }} - asset_path: target/*.sha256 - - name: Upload error-code-report - uses: shogo82148/actions-upload-release-asset@v1 - with: - upload_url: ${{ github.event.inputs.upload_url }} - asset_path: target/error_code_report.json diff --git a/.project-keeper.yml b/.project-keeper.yml index 7ae66b2..aa0fe9e 100644 --- a/.project-keeper.yml +++ b/.project-keeper.yml @@ -8,10 +8,22 @@ sources: version: fromSource: pom.xml build: - runnerOs: ubuntu-20.04 + runnerOs: ubuntu-24.04 freeDiskSpace: false exasolDbVersions: - - "7.1.25" - - "8.24.0" + - "8.34.0" + - "8.29.9" # Added to test support for TIMESTAMP without nano precision. Remove if/when 7.1 is re-enabled + # - "7.1.30" # Exasol 7.1.x does not support the latest Ubuntu version + workflows: + - name: ci-build.yml + stepCustomizations: + - action: INSERT_AFTER + job: matrix-build + stepId: enable-testcontainer-reuse + content: + name: Fix VM Crash in UDFs + id: fix-vm-crash + run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 + linkReplacements: - - "https://jdbc.postgresql.org/about/license.html|https://jdbc.postgresql.org/license/" \ No newline at end of file + - "https://jdbc.postgresql.org/about/license.html|https://jdbc.postgresql.org/license/" diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs index bb40c3f..7644ed3 100644 --- a/.settings/org.eclipse.jdt.core.prefs +++ b/.settings/org.eclipse.jdt.core.prefs @@ -1,15 +1,19 @@ eclipse.preferences.version=1 +org.eclipse.jdt.core.builder.annotationPath.allLocations=disabled org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=disabled org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore org.eclipse.jdt.core.compiler.annotation.nonnull=org.eclipse.jdt.annotation.NonNull org.eclipse.jdt.core.compiler.annotation.nonnull.secondary= org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annotation.NonNullByDefault org.eclipse.jdt.core.compiler.annotation.nonnullbydefault.secondary= +org.eclipse.jdt.core.compiler.annotation.notowning=org.eclipse.jdt.annotation.NotOwning org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable org.eclipse.jdt.core.compiler.annotation.nullable.secondary= org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled +org.eclipse.jdt.core.compiler.annotation.owning=org.eclipse.jdt.annotation.Owning +org.eclipse.jdt.core.compiler.annotation.resourceanalysis=disabled org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled -org.eclipse.jdt.core.compiler.codegen.methodParameters=do not generate +org.eclipse.jdt.core.compiler.codegen.methodParameters=generate org.eclipse.jdt.core.compiler.codegen.targetPlatform=11 org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve org.eclipse.jdt.core.compiler.compliance=11 @@ -17,6 +21,7 @@ org.eclipse.jdt.core.compiler.debug.lineNumber=generate org.eclipse.jdt.core.compiler.debug.localVariable=generate org.eclipse.jdt.core.compiler.debug.sourceFile=generate org.eclipse.jdt.core.compiler.problem.APILeak=warning +org.eclipse.jdt.core.compiler.problem.annotatedTypeArgumentToUnannotated=info org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning org.eclipse.jdt.core.compiler.problem.assertIdentifier=error org.eclipse.jdt.core.compiler.problem.autoboxing=ignore @@ -39,8 +44,10 @@ org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=warning org.eclipse.jdt.core.compiler.problem.includeNullInfoFromAsserts=disabled org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning +org.eclipse.jdt.core.compiler.problem.incompatibleOwningContract=warning org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore +org.eclipse.jdt.core.compiler.problem.insufficientResourceAnalysis=warning org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=warning org.eclipse.jdt.core.compiler.problem.missingDefaultCase=ignore @@ -56,15 +63,15 @@ org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=warning org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning org.eclipse.jdt.core.compiler.problem.nonnullTypeVariableFromLegacyInvocation=warning -org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=error +org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=warning org.eclipse.jdt.core.compiler.problem.nullReference=warning -org.eclipse.jdt.core.compiler.problem.nullSpecViolation=error -org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning +org.eclipse.jdt.core.compiler.problem.nullSpecViolation=warning +org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=ignore org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore org.eclipse.jdt.core.compiler.problem.pessimisticNullAnalysisForFreeTypeVariables=warning org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore -org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore +org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=warning @@ -78,7 +85,8 @@ org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=warning org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled -org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled +org.eclipse.jdt.core.compiler.problem.suppressWarningsNotFullyAnalysed=info +org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=enabled org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore org.eclipse.jdt.core.compiler.problem.terminalDeprecation=warning org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning @@ -233,7 +241,7 @@ org.eclipse.jdt.core.formatter.indent_empty_lines=false org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true -org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false +org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true org.eclipse.jdt.core.formatter.indentation.size=4 org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant=insert org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field=insert @@ -439,7 +447,7 @@ org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constan org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert org.eclipse.jdt.core.formatter.join_lines_in_comments=true -org.eclipse.jdt.core.formatter.join_wrapped_lines=true +org.eclipse.jdt.core.formatter.join_wrapped_lines=false org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line=one_line_never org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line=one_line_never org.eclipse.jdt.core.formatter.keep_code_block_on_one_line=one_line_never diff --git a/.settings/org.eclipse.jdt.ui.prefs b/.settings/org.eclipse.jdt.ui.prefs index 1add06a..54d02ac 100644 --- a/.settings/org.eclipse.jdt.ui.prefs +++ b/.settings/org.eclipse.jdt.ui.prefs @@ -76,6 +76,7 @@ sp_cleanup.add_missing_nls_tags=false sp_cleanup.add_missing_override_annotations=true sp_cleanup.add_missing_override_annotations_interface_methods=true sp_cleanup.add_serial_version_id=false +sp_cleanup.also_simplify_lambda=false sp_cleanup.always_use_blocks=true sp_cleanup.always_use_parentheses_in_expressions=true sp_cleanup.always_use_this_for_non_static_field_access=true @@ -130,6 +131,7 @@ sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false sp_cleanup.operand_factorization=false sp_cleanup.organize_imports=true sp_cleanup.overridden_assignment=false +sp_cleanup.overridden_assignment_move_decl=false sp_cleanup.plain_replacement=false sp_cleanup.precompile_regex=false sp_cleanup.primitive_comparison=false @@ -159,10 +161,12 @@ sp_cleanup.remove_unnecessary_casts=true sp_cleanup.remove_unnecessary_nls_tags=true sp_cleanup.remove_unused_imports=true sp_cleanup.remove_unused_local_variables=false +sp_cleanup.remove_unused_method_parameters=false sp_cleanup.remove_unused_private_fields=true sp_cleanup.remove_unused_private_members=false sp_cleanup.remove_unused_private_methods=true sp_cleanup.remove_unused_private_types=true +sp_cleanup.replace_deprecated_calls=false sp_cleanup.return_expression=false sp_cleanup.simplify_lambda_expression_and_method_ref=false sp_cleanup.single_used_field=false @@ -174,6 +178,8 @@ sp_cleanup.strictly_equal_or_different=false sp_cleanup.stringbuffer_to_stringbuilder=false sp_cleanup.stringbuilder=false sp_cleanup.stringbuilder_for_local_vars=false +sp_cleanup.stringconcat_stringbuffer_stringbuilder=false +sp_cleanup.stringconcat_to_textblock=false sp_cleanup.substring=false sp_cleanup.switch=false sp_cleanup.system_property=false diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..464c1ea --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,11 @@ +{ + "configurations": [ + { + "type": "java", + "name": "Generate capability report", + "request": "launch", + "mainClass": "com.exasol.adapter.dialects.postgresql.docgeneration.PostgresAutogeneratedResourceVerifier", + "vmArgs": "-DprojectDir=${workspaceFolder} -DfixAutogeneratedResources=true" + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json index 3bfac9a..2c5a293 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,21 +1,26 @@ { - "editor.formatOnSave": true, - "editor.codeActionsOnSave": { - "source.organizeImports": true, - "source.generate.finalModifiers": true, - "source.fixAll": true - }, - "java.codeGeneration.useBlocks": true, - "java.saveActions.organizeImports": true, - "java.sources.organizeImports.starThreshold": 3, - "java.sources.organizeImports.staticStarThreshold": 3, - "java.test.config": { - "vmArgs": [ - "-Djava.util.logging.config.file=src/test/resources/logging.properties" - ] - }, - "sonarlint.connectedMode.project": { - "connectionId": "exasol", - "projectKey": "com.exasol:postgresql-virtual-schema" - } + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit", + "source.generate.finalModifiers": "explicit", + "source.fixAll": "explicit" + }, + "java.codeGeneration.useBlocks": true, + "java.saveActions.organizeImports": true, + "java.sources.organizeImports.starThreshold": 3, + "java.sources.organizeImports.staticStarThreshold": 3, + "java.test.config": { + "vmArgs": [ + "-Djava.util.logging.config.file=src/test/resources/logging.properties", + "-Dcom.exasol.dockerdb.image=8.34.0" + ], + "env": { + "TESTCONTAINERS_RYUK_DISABLED": "true" + } + }, + "sonarlint.connectedMode.project": { + "connectionId": "exasol", + "projectKey": "com.exasol:postgresql-virtual-schema" + }, + "java.jdt.ls.vmargs": "-XX:+UseParallelGC -XX:GCTimeRatio=4 -XX:AdaptiveSizePolicyWeight=90 -Dsun.zip.disableMemoryMapping=true -Xmx12G -Xms100m" } diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..d4616aa --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,25 @@ +# Security + +If you believe you have found a new security vulnerability in this repository, please report it to us as follows. + +## Reporting Security Issues + +* Please do **not** report security vulnerabilities through public GitHub issues. + +* Please create a draft security advisory on the Github page: the reporting form is under `> Security > Advisories`. The URL is https://github.com/exasol/postgresql-virtual-schema/security/advisories/new. + +* If you prefer to email, please send your report to `infosec@exasol.com`. + +## Guidelines + +* When reporting a vulnerability, please include as much information as possible, including the complete steps to reproduce the issue. + +* Avoid sending us executables. + +* Feel free to include any script you wrote and used but avoid sending us scripts that download and run binaries. + +* We will prioritise reports that show how the exploits work in realistic environments. + +* We prefer all communications to be in English. + +* We do not offer financial rewards. We are happy to acknowledge your research publicly when possible. diff --git a/dependencies.md b/dependencies.md index 48210b0..2aa1d84 100644 --- a/dependencies.md +++ b/dependencies.md @@ -14,7 +14,7 @@ | Dependency | License | | ----------------------------------------------- | ---------------------------------------------- | | [Virtual Schema Common JDBC][0] | [MIT License][1] | -| [Hamcrest][6] | [BSD License 3][7] | +| [Hamcrest][6] | [BSD-3-Clause][7] | | [JUnit Jupiter (Aggregator)][8] | [Eclipse Public License v2.0][9] | | [mockito-junit-jupiter][10] | [MIT][11] | | [Test containers for Exasol on Docker][12] | [MIT License][13] | @@ -26,31 +26,38 @@ | [Markdown Generator][22] | [The Apache Software License, Version 2.0][23] | | [Autogenerated resource verifier][24] | [MIT License][25] | | [virtual-schema-shared-integration-tests][26] | [MIT License][27] | -| [JaCoCo :: Agent][28] | [Eclipse Public License 2.0][29] | +| [SLF4J JDK14 Provider][28] | [MIT][29] | +| [JaCoCo :: Agent][30] | [EPL-2.0][31] | ## Plugin Dependencies -| Dependency | License | -| ------------------------------------------------------- | --------------------------------- | -| [SonarQube Scanner for Maven][30] | [GNU LGPL 3][31] | -| [Apache Maven Toolchains Plugin][32] | [Apache License, Version 2.0][33] | -| [Apache Maven Compiler Plugin][34] | [Apache-2.0][33] | -| [Apache Maven Enforcer Plugin][35] | [Apache-2.0][33] | -| [Maven Flatten Plugin][36] | [Apache Software Licenese][33] | -| [org.sonatype.ossindex.maven:ossindex-maven-plugin][37] | [ASL2][23] | -| [Maven Surefire Plugin][38] | [Apache-2.0][33] | -| [Versions Maven Plugin][39] | [Apache License, Version 2.0][33] | -| [duplicate-finder-maven-plugin Maven Mojo][40] | [Apache License 2.0][41] | -| [Apache Maven Assembly Plugin][42] | [Apache-2.0][33] | -| [Apache Maven JAR Plugin][43] | [Apache License, Version 2.0][33] | -| [Artifact reference checker and unifier][44] | [MIT License][45] | -| [Project Keeper Maven plugin][46] | [The MIT License][47] | -| [Apache Maven Dependency Plugin][48] | [Apache-2.0][33] | -| [Exec Maven Plugin][49] | [Apache License 2][33] | -| [Maven Failsafe Plugin][50] | [Apache-2.0][33] | -| [JaCoCo :: Maven Plugin][51] | [Eclipse Public License 2.0][29] | -| [error-code-crawler-maven-plugin][52] | [MIT License][53] | -| [Reproducible Build Maven Plugin][54] | [Apache 2.0][23] | +| Dependency | License | +| ------------------------------------------------------- | ------------------------------------------- | +| [Apache Maven Clean Plugin][32] | [Apache-2.0][33] | +| [Apache Maven Install Plugin][34] | [Apache-2.0][33] | +| [Apache Maven Resources Plugin][35] | [Apache-2.0][33] | +| [Apache Maven Site Plugin][36] | [Apache-2.0][33] | +| [SonarQube Scanner for Maven][37] | [GNU LGPL 3][38] | +| [Apache Maven Toolchains Plugin][39] | [Apache-2.0][33] | +| [Apache Maven Compiler Plugin][40] | [Apache-2.0][33] | +| [Apache Maven Enforcer Plugin][41] | [Apache-2.0][33] | +| [Maven Flatten Plugin][42] | [Apache Software Licenese][33] | +| [org.sonatype.ossindex.maven:ossindex-maven-plugin][43] | [ASL2][23] | +| [Maven Surefire Plugin][44] | [Apache-2.0][33] | +| [Versions Maven Plugin][45] | [Apache License, Version 2.0][33] | +| [duplicate-finder-maven-plugin Maven Mojo][46] | [Apache License 2.0][47] | +| [Apache Maven Artifact Plugin][48] | [Apache-2.0][33] | +| [Apache Maven Assembly Plugin][49] | [Apache-2.0][33] | +| [Apache Maven JAR Plugin][50] | [Apache-2.0][33] | +| [Artifact reference checker and unifier][51] | [MIT License][52] | +| [Project Keeper Maven plugin][53] | [The MIT License][54] | +| [Apache Maven Dependency Plugin][55] | [Apache-2.0][33] | +| [Exec Maven Plugin][56] | [Apache License 2][33] | +| [Maven Failsafe Plugin][57] | [Apache-2.0][33] | +| [JaCoCo :: Maven Plugin][58] | [EPL-2.0][31] | +| [Quality Summarizer Maven Plugin][59] | [MIT License][60] | +| [error-code-crawler-maven-plugin][61] | [MIT License][62] | +| [Git Commit Id Maven Plugin][63] | [GNU Lesser General Public License 3.0][64] | [0]: https://github.com/exasol/virtual-schema-common-jdbc/ [1]: https://github.com/exasol/virtual-schema-common-jdbc/blob/main/LICENSE @@ -59,7 +66,7 @@ [4]: https://jdbc.postgresql.org [5]: https://jdbc.postgresql.org/license/ [6]: http://hamcrest.org/JavaHamcrest/ -[7]: http://opensource.org/licenses/BSD-3-Clause +[7]: https://raw.githubusercontent.com/hamcrest/JavaHamcrest/master/LICENSE [8]: https://junit.org/junit5/ [9]: https://www.eclipse.org/legal/epl-v20.html [10]: https://github.com/mockito/mockito @@ -80,30 +87,40 @@ [25]: https://github.com/exasol/autogenerated-resource-verifier-java/blob/main/LICENSE [26]: https://github.com/exasol/virtual-schema-shared-integration-tests/ [27]: https://github.com/exasol/virtual-schema-shared-integration-tests/blob/main/LICENSE -[28]: https://www.eclemma.org/jacoco/index.html -[29]: https://www.eclipse.org/legal/epl-2.0/ -[30]: http://sonarsource.github.io/sonar-scanner-maven/ -[31]: http://www.gnu.org/licenses/lgpl.txt -[32]: https://maven.apache.org/plugins/maven-toolchains-plugin/ +[28]: http://www.slf4j.org +[29]: https://opensource.org/license/mit +[30]: https://www.eclemma.org/jacoco/index.html +[31]: https://www.eclipse.org/legal/epl-2.0/ +[32]: https://maven.apache.org/plugins/maven-clean-plugin/ [33]: https://www.apache.org/licenses/LICENSE-2.0.txt -[34]: https://maven.apache.org/plugins/maven-compiler-plugin/ -[35]: https://maven.apache.org/enforcer/maven-enforcer-plugin/ -[36]: https://www.mojohaus.org/flatten-maven-plugin/ -[37]: https://sonatype.github.io/ossindex-maven/maven-plugin/ -[38]: https://maven.apache.org/surefire/maven-surefire-plugin/ -[39]: https://www.mojohaus.org/versions/versions-maven-plugin/ -[40]: https://basepom.github.io/duplicate-finder-maven-plugin -[41]: http://www.apache.org/licenses/LICENSE-2.0.html -[42]: https://maven.apache.org/plugins/maven-assembly-plugin/ -[43]: https://maven.apache.org/plugins/maven-jar-plugin/ -[44]: https://github.com/exasol/artifact-reference-checker-maven-plugin/ -[45]: https://github.com/exasol/artifact-reference-checker-maven-plugin/blob/main/LICENSE -[46]: https://github.com/exasol/project-keeper/ -[47]: https://github.com/exasol/project-keeper/blob/main/LICENSE -[48]: https://maven.apache.org/plugins/maven-dependency-plugin/ -[49]: https://www.mojohaus.org/exec-maven-plugin -[50]: https://maven.apache.org/surefire/maven-failsafe-plugin/ -[51]: https://www.jacoco.org/jacoco/trunk/doc/maven.html -[52]: https://github.com/exasol/error-code-crawler-maven-plugin/ -[53]: https://github.com/exasol/error-code-crawler-maven-plugin/blob/main/LICENSE -[54]: http://zlika.github.io/reproducible-build-maven-plugin +[34]: https://maven.apache.org/plugins/maven-install-plugin/ +[35]: https://maven.apache.org/plugins/maven-resources-plugin/ +[36]: https://maven.apache.org/plugins/maven-site-plugin/ +[37]: http://docs.sonarqube.org/display/PLUG/Plugin+Library/sonar-scanner-maven/sonar-maven-plugin +[38]: http://www.gnu.org/licenses/lgpl.txt +[39]: https://maven.apache.org/plugins/maven-toolchains-plugin/ +[40]: https://maven.apache.org/plugins/maven-compiler-plugin/ +[41]: https://maven.apache.org/enforcer/maven-enforcer-plugin/ +[42]: https://www.mojohaus.org/flatten-maven-plugin/ +[43]: https://sonatype.github.io/ossindex-maven/maven-plugin/ +[44]: https://maven.apache.org/surefire/maven-surefire-plugin/ +[45]: https://www.mojohaus.org/versions/versions-maven-plugin/ +[46]: https://basepom.github.io/duplicate-finder-maven-plugin +[47]: http://www.apache.org/licenses/LICENSE-2.0.html +[48]: https://maven.apache.org/plugins/maven-artifact-plugin/ +[49]: https://maven.apache.org/plugins/maven-assembly-plugin/ +[50]: https://maven.apache.org/plugins/maven-jar-plugin/ +[51]: https://github.com/exasol/artifact-reference-checker-maven-plugin/ +[52]: https://github.com/exasol/artifact-reference-checker-maven-plugin/blob/main/LICENSE +[53]: https://github.com/exasol/project-keeper/ +[54]: https://github.com/exasol/project-keeper/blob/main/LICENSE +[55]: https://maven.apache.org/plugins/maven-dependency-plugin/ +[56]: https://www.mojohaus.org/exec-maven-plugin +[57]: https://maven.apache.org/surefire/maven-failsafe-plugin/ +[58]: https://www.jacoco.org/jacoco/trunk/doc/maven.html +[59]: https://github.com/exasol/quality-summarizer-maven-plugin/ +[60]: https://github.com/exasol/quality-summarizer-maven-plugin/blob/main/LICENSE +[61]: https://github.com/exasol/error-code-crawler-maven-plugin/ +[62]: https://github.com/exasol/error-code-crawler-maven-plugin/blob/main/LICENSE +[63]: https://github.com/git-commit-id/git-commit-id-maven-plugin +[64]: http://www.gnu.org/licenses/lgpl-3.0.txt diff --git a/doc/changes/changelog.md b/doc/changes/changelog.md index 2eabc8e..f71ae68 100644 --- a/doc/changes/changelog.md +++ b/doc/changes/changelog.md @@ -1,5 +1,6 @@ # Changes +* [3.1.0](changes_3.1.0.md) * [3.0.0](changes_3.0.0.md) * [2.2.2](changes_2.2.2.md) * [2.2.1](changes_2.2.1.md) diff --git a/doc/changes/changes_3.1.0.md b/doc/changes/changes_3.1.0.md new file mode 100644 index 0000000..d74bd2f --- /dev/null +++ b/doc/changes/changes_3.1.0.md @@ -0,0 +1,71 @@ +# Virtual Schema for PostgreSQL 3.1.0, released 2025-06-06 + +Code name: Timestamp precision + +## Summary + +This release improves the support for TIMESTAMP columns with fractional second precision (FSP). +The specified FSP will be maintained in Exasol newer versions (>= 8.32.0) + +This release also contains a security update. We updated the dependencies of the project to fix transitive security issues. + +We also added an exception for the OSSIndex for CVE-2024-55551, which is a false positive in Exasol's JDBC driver. +This issue has been fixed quite a while back now, but the OSSIndex unfortunately does not contain the fix version of 24.2.1 (2024-12-10) set. + +## Security + +* #83: Upgraded dependencies + +## Features + +* #80: Enabled Timestamp precision support +* #79: Re-enabled `current_schema` pushdown + +## Dependency Updates + +### Compile Dependency Updates + +* Updated `com.exasol:virtual-schema-common-jdbc:12.0.0` to `13.0.0` +* Updated `org.postgresql:postgresql:42.7.2` to `42.7.6` + +### Test Dependency Updates + +* Updated `com.exasol:exasol-testcontainers:7.0.1` to `7.1.5` +* Updated `com.exasol:hamcrest-resultset-matcher:1.6.4` to `1.7.1` +* Updated `com.exasol:test-db-builder-java:3.5.3` to `3.6.1` +* Updated `com.exasol:udf-debugging-java:0.6.11` to `0.6.16` +* Updated `com.exasol:virtual-schema-common-jdbc:12.0.0` to `13.0.0` +* Updated `com.exasol:virtual-schema-shared-integration-tests:3.0.0` to `3.0.1` +* Updated `org.hamcrest:hamcrest:2.2` to `3.0` +* Updated `org.jacoco:org.jacoco.agent:0.8.11` to `0.8.13` +* Updated `org.junit.jupiter:junit-jupiter:5.10.1` to `5.13.0` +* Updated `org.mockito:mockito-junit-jupiter:5.10.0` to `5.18.0` +* Added `org.slf4j:slf4j-jdk14:2.0.17` +* Updated `org.testcontainers:junit-jupiter:1.19.4` to `1.21.1` +* Updated `org.testcontainers:postgresql:1.19.4` to `1.21.1` + +### Plugin Dependency Updates + +* Updated `com.exasol:artifact-reference-checker-maven-plugin:0.4.2` to `0.4.3` +* Updated `com.exasol:error-code-crawler-maven-plugin:2.0.0` to `2.0.3` +* Updated `com.exasol:project-keeper-maven-plugin:4.0.0` to `5.1.0` +* Added `com.exasol:quality-summarizer-maven-plugin:0.2.0` +* Added `io.github.git-commit-id:git-commit-id-maven-plugin:9.0.1` +* Removed `io.github.zlika:reproducible-build-maven-plugin:0.16` +* Added `org.apache.maven.plugins:maven-artifact-plugin:3.6.0` +* Updated `org.apache.maven.plugins:maven-assembly-plugin:3.6.0` to `3.7.1` +* Updated `org.apache.maven.plugins:maven-clean-plugin:3.2.0` to `3.4.1` +* Updated `org.apache.maven.plugins:maven-compiler-plugin:3.12.1` to `3.14.0` +* Updated `org.apache.maven.plugins:maven-dependency-plugin:3.6.1` to `3.8.1` +* Updated `org.apache.maven.plugins:maven-enforcer-plugin:3.4.1` to `3.5.0` +* Updated `org.apache.maven.plugins:maven-failsafe-plugin:3.2.5` to `3.5.3` +* Updated `org.apache.maven.plugins:maven-install-plugin:3.1.2` to `3.1.4` +* Updated `org.apache.maven.plugins:maven-jar-plugin:3.3.0` to `3.4.2` +* Updated `org.apache.maven.plugins:maven-site-plugin:3.12.1` to `3.21.0` +* Updated `org.apache.maven.plugins:maven-surefire-plugin:3.2.5` to `3.5.3` +* Updated `org.apache.maven.plugins:maven-toolchains-plugin:3.1.0` to `3.2.0` +* Updated `org.codehaus.mojo:exec-maven-plugin:3.1.0` to `3.5.0` +* Updated `org.codehaus.mojo:flatten-maven-plugin:1.6.0` to `1.7.0` +* Updated `org.codehaus.mojo:versions-maven-plugin:2.16.2` to `2.18.0` +* Updated `org.jacoco:jacoco-maven-plugin:0.8.11` to `0.8.13` +* Updated `org.sonarsource.scanner.maven:sonar-maven-plugin:3.10.0.2594` to `5.1.0.4751` diff --git a/doc/generated/capabilities.md b/doc/generated/capabilities.md index 4510812..08622d5 100644 --- a/doc/generated/capabilities.md +++ b/doc/generated/capabilities.md @@ -150,6 +150,7 @@ Capabilities tell the Exasol which SQL features / keywords a Virtual Schema adap | TAN | ✓ | | TANH | ✓ | | TRUNC | ✓ | +| WIDTH_BUCKET | ✓ | | ASCII | ✓ | | BIT_LENGTH | ✓ | | CHR | ✓ | diff --git a/doc/user_guide/postgresql_user_guide.md b/doc/user_guide/postgresql_user_guide.md index 0bf401f..8a41117 100644 --- a/doc/user_guide/postgresql_user_guide.md +++ b/doc/user_guide/postgresql_user_guide.md @@ -47,7 +47,7 @@ The SQL statement below creates the adapter script, defines the Java class that --/ CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtual-schema-dist-12.0.0-postgresql-3.0.0.jar; + %jar /buckets///virtual-schema-dist-13.0.0-postgresql-3.1.0.jar; %jar /buckets///postgresql-.jar; / ``` @@ -90,7 +90,7 @@ CREATE VIRTUAL SCHEMA | Variable | Description | |----------|-------------| | `` | Name of the virtual schema you want to use. | -| `` | Name of the catalog, usally equivalent to the name of the PostgreSQL database. | +| `` | Name of the catalog, usually equivalent to the name of the PostgreSQL database. | | `` | Name of the database schema you want to use in the PostgreSQL database. | See also section [Remote logging](../developers_guide/developers_guide.md#remote-logging) in the developers guide. diff --git a/pk_generated_parent.pom b/pk_generated_parent.pom index d53d4fc..f639150 100644 --- a/pk_generated_parent.pom +++ b/pk_generated_parent.pom @@ -3,11 +3,12 @@ 4.0.0 com.exasol postgresql-virtual-schema-generated-parent - 3.0.0 + 3.1.0 pom UTF-8 UTF-8 + ${git.commit.time} 11 exasol https://sonarcloud.io @@ -37,22 +38,42 @@ org.jacoco org.jacoco.agent - 0.8.11 + 0.8.13 test runtime + + org.apache.maven.plugins + maven-clean-plugin + 3.4.1 + + + org.apache.maven.plugins + maven-install-plugin + 3.1.4 + + + org.apache.maven.plugins + maven-resources-plugin + 3.3.1 + + + org.apache.maven.plugins + maven-site-plugin + 3.21.0 + org.sonarsource.scanner.maven sonar-maven-plugin - 3.10.0.2594 + 5.1.0.4751 org.apache.maven.plugins maven-toolchains-plugin - 3.1.0 + 3.2.0 @@ -71,22 +92,21 @@ org.apache.maven.plugins maven-compiler-plugin - 3.12.1 + 3.14.0 ${java.version} ${java.version} true - - -Xlint:all,-processing - + -Xlint:all + -Werror org.apache.maven.plugins maven-enforcer-plugin - 3.4.1 + 3.5.0 enforce-maven @@ -96,7 +116,7 @@ - 3.6.3 + 3.8.7 17 @@ -109,7 +129,7 @@ org.codehaus.mojo flatten-maven-plugin - 1.6.0 + 1.7.0 true oss @@ -148,7 +168,7 @@ org.apache.maven.plugins maven-surefire-plugin - 3.2.5 + 3.5.3 @@ -159,11 +179,11 @@ org.codehaus.mojo versions-maven-plugin - 2.16.2 + 2.18.0 display-updates - package + verify display-plugin-updates display-dependency-updates @@ -180,6 +200,7 @@ true true true + false false true true @@ -212,8 +233,22 @@ org.apache.maven.plugins - maven-assembly-plugin + maven-artifact-plugin 3.6.0 + + + check-build-plan + verify + + check-buildplan + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 3.7.1 src/assembly/all-dependencies.xml @@ -239,7 +274,7 @@ org.apache.maven.plugins maven-jar-plugin - 3.3.0 + 3.4.2 default-jar @@ -250,7 +285,7 @@ com.exasol artifact-reference-checker-maven-plugin - 0.4.2 + 0.4.3 verify @@ -263,7 +298,7 @@ org.apache.maven.plugins maven-dependency-plugin - 3.6.1 + 3.8.1 copy-jacoco @@ -283,7 +318,7 @@ org.apache.maven.plugins maven-failsafe-plugin - 3.2.5 + 3.5.3 -Djava.util.logging.config.file=src/test/resources/logging.properties ${argLine} @@ -305,7 +340,7 @@ org.jacoco jacoco-maven-plugin - 0.8.11 + 0.8.13 prepare-agent @@ -349,10 +384,23 @@ + + com.exasol + quality-summarizer-maven-plugin + 0.2.0 + + + summarize-metrics + + summarize + + + + com.exasol error-code-crawler-maven-plugin - 2.0.0 + 2.0.3 verify @@ -363,18 +411,25 @@ - io.github.zlika - reproducible-build-maven-plugin - 0.16 + io.github.git-commit-id + git-commit-id-maven-plugin + 9.0.1 - strip-jar - package + get-the-git-infos - strip-jar + revision + initialize + + true + UTC + + git.commit.time + + diff --git a/pom.xml b/pom.xml index 7000a2e..17726bd 100644 --- a/pom.xml +++ b/pom.xml @@ -2,13 +2,13 @@ 4.0.0 postgresql-virtual-schema - 3.0.0 + 3.1.0 Virtual Schema for PostgreSQL Virtual Schema for connecting PostgreSQL as data source to Exasol https://github.com/exasol/postgresql-virtual-schema/ - 12.0.0 - 1.19.4 + 13.0.0 + 1.21.1 @@ -24,7 +24,7 @@ org.postgresql postgresql - 42.7.2 + 42.7.6 @@ -37,26 +37,26 @@ org.hamcrest hamcrest - 2.2 + 3.0 test org.junit.jupiter junit-jupiter - 5.10.1 + 5.13.0 test org.mockito mockito-junit-jupiter - 5.10.0 + 5.18.0 test com.exasol exasol-testcontainers - 7.0.1 + 7.1.5 test @@ -74,19 +74,19 @@ com.exasol test-db-builder-java - 3.5.3 + 3.6.1 test com.exasol hamcrest-resultset-matcher - 1.6.4 + 1.7.1 test com.exasol udf-debugging-java - 0.6.11 + 0.6.16 test @@ -104,7 +104,14 @@ com.exasol virtual-schema-shared-integration-tests - 3.0.0 + 3.0.1 + test + + + + org.slf4j + slf4j-jdk14 + 2.0.17 test @@ -120,7 +127,7 @@ com.exasol project-keeper-maven-plugin - 4.0.0 + 5.1.0 @@ -151,7 +158,7 @@ org.codehaus.mojo exec-maven-plugin - 3.1.0 + 3.5.0 validate-capabilities-report @@ -179,12 +186,22 @@ false + + org.sonatype.ossindex.maven + ossindex-maven-plugin + + + + CVE-2024-55551 + + + postgresql-virtual-schema-generated-parent com.exasol - 3.0.0 + 3.1.0 pk_generated_parent.pom diff --git a/release_config.yml b/release_config.yml deleted file mode 100644 index 45f75e8..0000000 --- a/release_config.yml +++ /dev/null @@ -1,3 +0,0 @@ -release-platforms: - - GitHub -language: Java diff --git a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLColumnMetadataReader.java b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLColumnMetadataReader.java index 3703386..d4eed61 100644 --- a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLColumnMetadataReader.java +++ b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLColumnMetadataReader.java @@ -3,6 +3,7 @@ import java.sql.*; import java.util.logging.Logger; +import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.dialects.IdentifierConverter; import com.exasol.adapter.dialects.postgresql.PostgreSQLIdentifierMapping.CaseFolding; @@ -22,24 +23,26 @@ public class PostgreSQLColumnMetadataReader extends BaseColumnMetadataReader { * * @param connection JDBC connection to the remote data source * @param properties user-defined adapter properties + * @param exaMetadata metadata of the Exasol database * @param identifierConverter converter between source and Exasol identifiers */ public PostgreSQLColumnMetadataReader(final Connection connection, final AdapterProperties properties, + final ExaMetadata exaMetadata, final IdentifierConverter identifierConverter) { - super(connection, properties, identifierConverter); + super(connection, properties, exaMetadata, identifierConverter); } @Override public DataType mapJdbcType(final JDBCTypeDescription jdbcTypeDescription) { switch (jdbcTypeDescription.getJdbcType()) { - case Types.OTHER: - return mapJdbcTypeOther(jdbcTypeDescription); - case Types.SQLXML: - case Types.DISTINCT: - case Types.BINARY: - return DataType.createMaximumSizeVarChar(DataType.ExaCharset.UTF8); - default: - return super.mapJdbcType(jdbcTypeDescription); + case Types.OTHER: + return mapJdbcTypeOther(jdbcTypeDescription); + case Types.SQLXML: + case Types.DISTINCT: + case Types.BINARY: + return DataType.createMaximumSizeVarChar(DataType.ExaCharset.UTF8); + default: + return super.mapJdbcType(jdbcTypeDescription); } } @@ -71,4 +74,4 @@ public String readColumnName(final ResultSet columns) throws SQLException { CaseFolding getIdentifierMapping() { return PostgreSQLIdentifierMapping.from(this.properties); } -} \ No newline at end of file +} diff --git a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLMetadataReader.java b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLMetadataReader.java index 7b1527c..2985ffb 100644 --- a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLMetadataReader.java +++ b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLMetadataReader.java @@ -2,6 +2,7 @@ import java.sql.Connection; +import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.dialects.IdentifierConverter; import com.exasol.adapter.jdbc.*; @@ -16,23 +17,25 @@ public class PostgreSQLMetadataReader extends AbstractRemoteMetadataReader { * @param connection connection to the PostgreSQL database * @param properties user-defined adapter properties */ - public PostgreSQLMetadataReader(final Connection connection, final AdapterProperties properties) { - super(connection, properties); + public PostgreSQLMetadataReader(final Connection connection, final AdapterProperties properties, + final ExaMetadata exaMetadata) { + super(connection, properties, exaMetadata); } @Override public BaseTableMetadataReader createTableMetadataReader() { return new PostgreSQLTableMetadataReader(this.connection, getColumnMetadataReader(), this.properties, - getIdentifierConverter()); + this.exaMetadata, getIdentifierConverter()); } @Override public ColumnMetadataReader createColumnMetadataReader() { - return new PostgreSQLColumnMetadataReader(this.connection, this.properties, getIdentifierConverter()); + return new PostgreSQLColumnMetadataReader(this.connection, this.properties, exaMetadata, + getIdentifierConverter()); } @Override public IdentifierConverter createIdentifierConverter() { return new PostgreSQLIdentifierConverter(this.properties); } -} \ No newline at end of file +} diff --git a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialect.java b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialect.java index fc9d189..b50a0d0 100644 --- a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialect.java +++ b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialect.java @@ -13,6 +13,7 @@ import java.util.*; import java.util.function.Predicate; +import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.capabilities.Capabilities; import com.exasol.adapter.capabilities.ScalarFunctionCapability; @@ -97,10 +98,11 @@ private static ScalarFunctionCapability[] getEnabledScalarFunctionCapabilities() * @param connectionFactory factory for the JDBC connection to the remote data source * @param properties user-defined adapter properties */ - public PostgreSQLSqlDialect(final ConnectionFactory connectionFactory, final AdapterProperties properties) { - super(connectionFactory, properties, // + public PostgreSQLSqlDialect(final ConnectionFactory connectionFactory, final AdapterProperties properties, + final ExaMetadata exaMetadata) { + super(connectionFactory, properties, exaMetadata, Set.of(CATALOG_NAME_PROPERTY, SCHEMA_NAME_PROPERTY, IGNORE_ERRORS_PROPERTY, - PostgreSQLIdentifierMapping.PROPERTY), // + PostgreSQLIdentifierMapping.PROPERTY), List.of(PostgreSQLIdentifierMapping.validator())); } @@ -112,7 +114,8 @@ public String getName() { @Override protected RemoteMetadataReader createRemoteMetadataReader() { try { - return new PostgreSQLMetadataReader(this.connectionFactory.getConnection(), this.properties); + return new PostgreSQLMetadataReader(this.connectionFactory.getConnection(), this.properties, + this.exaMetadata); } catch (final SQLException exception) { throw new RemoteMetadataReaderException(ExaError.messageBuilder("E-VSPG-3") .message("Unable to create PostgreSQL remote metadata reader. Caused by: {{cause}}", diff --git a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectFactory.java b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectFactory.java index 558b346..42d8a8a 100644 --- a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectFactory.java +++ b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectFactory.java @@ -1,5 +1,6 @@ package com.exasol.adapter.dialects.postgresql; +import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.dialects.SqlDialect; import com.exasol.adapter.dialects.SqlDialectFactory; @@ -10,21 +11,22 @@ * Factory for the PostgreSQL SQL dialect. */ public class PostgreSQLSqlDialectFactory implements SqlDialectFactory { - + @Override public String getSqlDialectName() { return PostgreSQLSqlDialect.NAME; } @Override - public SqlDialect createSqlDialect(final ConnectionFactory connectionFactory, final AdapterProperties properties) { - return new PostgreSQLSqlDialect(connectionFactory, properties); + public SqlDialect createSqlDialect(final ConnectionFactory connectionFactory, final AdapterProperties properties, + final ExaMetadata exaMetadata) { + return new PostgreSQLSqlDialect(connectionFactory, properties, exaMetadata); } - + @Override public String getSqlDialectVersion() { final VersionCollector versionCollector = new VersionCollector( "META-INF/maven/com.exasol/virtual-schema-jdbc-adapter/pom.properties"); return versionCollector.getVersionNumber(); } -} \ No newline at end of file +} diff --git a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLTableMetadataReader.java b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLTableMetadataReader.java index ce9925b..d0650ea 100644 --- a/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLTableMetadataReader.java +++ b/src/main/java/com/exasol/adapter/dialects/postgresql/PostgreSQLTableMetadataReader.java @@ -5,6 +5,7 @@ import java.sql.Connection; import java.util.logging.Logger; +import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.dialects.IdentifierConverter; import com.exasol.adapter.dialects.postgresql.PostgreSQLIdentifierMapping.CaseFolding; @@ -26,8 +27,9 @@ public class PostgreSQLTableMetadataReader extends BaseTableMetadataReader { * @param identifierConverter converter between source and Exasol identifiers */ public PostgreSQLTableMetadataReader(final Connection connection, final ColumnMetadataReader columnMetadataReader, - final AdapterProperties properties, final IdentifierConverter identifierConverter) { - super(connection, columnMetadataReader, properties, identifierConverter); + final AdapterProperties properties, final ExaMetadata exaMetadata, + final IdentifierConverter identifierConverter) { + super(connection, columnMetadataReader, properties, exaMetadata, identifierConverter); } /** @@ -84,4 +86,4 @@ private boolean containsUppercaseCharacter(final String tableName) { } return false; } -} \ No newline at end of file +} diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLColumnMetadataReaderTest.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLColumnMetadataReaderTest.java index 8d1c376..1672d0f 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLColumnMetadataReaderTest.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLColumnMetadataReaderTest.java @@ -2,6 +2,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.when; import java.sql.Types; import java.util.HashMap; @@ -9,18 +10,25 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.dialects.BaseIdentifierConverter; import com.exasol.adapter.dialects.postgresql.PostgreSQLIdentifierMapping.CaseFolding; import com.exasol.adapter.jdbc.JDBCTypeDescription; import com.exasol.adapter.metadata.DataType; +@ExtendWith(MockitoExtension.class) class PostgreSQLColumnMetadataReaderTest { private PostgreSQLColumnMetadataReader columnMetadataReader; private Map rawProperties; + @Mock + ExaMetadata exaMetadataMock; @BeforeEach void beforeEach() { @@ -29,7 +37,8 @@ void beforeEach() { } private PostgreSQLColumnMetadataReader createDefaultPostgreSQLColumnMetadataReader() { - return new PostgreSQLColumnMetadataReader(null, AdapterProperties.emptyProperties(), + when(exaMetadataMock.getDatabaseVersion()).thenReturn("8.34.0"); + return new PostgreSQLColumnMetadataReader(null, AdapterProperties.emptyProperties(), exaMetadataMock, BaseIdentifierConverter.createDefault()); } @@ -63,17 +72,17 @@ void testGetDefaultPostgreSQLIdentifierMapping() { void testGetPreserveCasePostgreSQLIdentifierMapping() { this.rawProperties.put("POSTGRESQL_IDENTIFIER_MAPPING", "PRESERVE_ORIGINAL_CASE"); final AdapterProperties adapterProperties = new AdapterProperties(this.rawProperties); - final PostgreSQLColumnMetadataReader columnMetadataReader = new PostgreSQLColumnMetadataReader(null, - adapterProperties, BaseIdentifierConverter.createDefault()); - assertThat(columnMetadataReader.getIdentifierMapping(), equalTo(CaseFolding.PRESERVE_ORIGINAL_CASE)); + final PostgreSQLColumnMetadataReader testee = new PostgreSQLColumnMetadataReader(null, + adapterProperties, exaMetadataMock, BaseIdentifierConverter.createDefault()); + assertThat(testee.getIdentifierMapping(), equalTo(CaseFolding.PRESERVE_ORIGINAL_CASE)); } @Test void testGetConverToUpperPostgreSQLIdentifierMapping() { this.rawProperties.put("POSTGRESQL_IDENTIFIER_MAPPING", "CONVERT_TO_UPPER"); final AdapterProperties adapterProperties = new AdapterProperties(this.rawProperties); - final PostgreSQLColumnMetadataReader columnMetadataReader = new PostgreSQLColumnMetadataReader(null, - adapterProperties, BaseIdentifierConverter.createDefault()); - assertThat(columnMetadataReader.getIdentifierMapping(), equalTo(CaseFolding.CONVERT_TO_UPPER)); + final PostgreSQLColumnMetadataReader testee = new PostgreSQLColumnMetadataReader(null, + adapterProperties, exaMetadataMock, BaseIdentifierConverter.createDefault()); + assertThat(testee.getIdentifierMapping(), equalTo(CaseFolding.CONVERT_TO_UPPER)); } } diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLMetadataReaderTest.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLMetadataReaderTest.java index 082675a..065731c 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLMetadataReaderTest.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLMetadataReaderTest.java @@ -1,23 +1,32 @@ package com.exasol.adapter.dialects.postgresql; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertAll; +import static org.mockito.Mockito.when; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.dialects.IdentifierCaseHandling; import com.exasol.adapter.dialects.IdentifierConverter; +@ExtendWith(MockitoExtension.class) class PostgreSQLMetadataReaderTest { private PostgreSQLMetadataReader reader; + @Mock + ExaMetadata exaMetadataMock; @BeforeEach void beforeEach() { - this.reader = new PostgreSQLMetadataReader(null, AdapterProperties.emptyProperties()); + when(exaMetadataMock.getDatabaseVersion()).thenReturn("8.34.0"); + this.reader = new PostgreSQLMetadataReader(null, AdapterProperties.emptyProperties(), exaMetadataMock); } @Test @@ -39,4 +48,4 @@ void testGetIdentifierConverter() { () -> assertThat(identifierConverter.getUnquotedIdentifierHandling(), equalTo(IdentifierCaseHandling.INTERPRET_AS_LOWER))); } -} \ No newline at end of file +} diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLScalarFunctionsIT.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLScalarFunctionsIT.java index 355c7fb..abee7d5 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLScalarFunctionsIT.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLScalarFunctionsIT.java @@ -55,18 +55,18 @@ public VirtualSchemaTestSetupProvider getVirtualSchemaTestSetupProvider() { @Override public String getExternalTypeFor(final DataType exasolType) { switch (exasolType.getExaDataType()) { - case VARCHAR: - return "VARCHAR(" + exasolType.getSize() + ")"; - case DOUBLE: - return "DOUBLE PRECISION"; - case DECIMAL: - if (exasolType.getScale() == 0) { - return "INTEGER"; - } else { + case VARCHAR: + return "VARCHAR(" + exasolType.getSize() + ")"; + case DOUBLE: + return "DOUBLE PRECISION"; + case DECIMAL: + if (exasolType.getScale() == 0) { + return "INTEGER"; + } else { + return exasolType.toString(); + } + default: return exasolType.toString(); - } - default: - return exasolType.toString(); } } @@ -88,11 +88,9 @@ public Set getDialectSpecificExcludes() { // expected was a value close to <1972-01-01> (tolerance: +/- <0.00010>) but was // "1972-01-01T00:00:00Z" "add_years", - // Check 'current_schema' functionality, re-enable tests after resolution #79 - // currently a bug in the compiler, compiler always expects 'VARCHAR(1) ASCII' see - // https://github.com/exasol/postgresql-virtual-schema/issues/79 - // https://exasol.atlassian.net/browse/SPOT-19716 - "current_schema"); + // expected was a value close to <1970-01-01 00:00:01.0> (tolerance: +/- <0.00010>) but was + // "1970-01-01T00:00:00Z"' + "add_seconds(\"DATE_C5\", \"DOUBLE_PRECISION_C0\")"); } @Override @@ -126,12 +124,13 @@ public void close() { // protected virtual method, must be overridden @Override - protected void beforeAllSetup() throws SQLException { + protected void beforeAllSetup() { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); } // protected virtual method, must be overridden @Override - protected void afterAllTeardown() throws SQLException { + protected void afterAllTeardown() { + // Nothing to do } } diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectFactoryTest.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectFactoryTest.java index da3ad98..18cd688 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectFactoryTest.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectFactoryTest.java @@ -1,8 +1,8 @@ package com.exasol.adapter.dialects.postgresql; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.MatcherAssert.assertThat; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -24,7 +24,7 @@ void testGetName() { @Test void testCreateDialect() { - assertThat(this.factory.createSqlDialect(null, AdapterProperties.emptyProperties()), + assertThat(this.factory.createSqlDialect(null, AdapterProperties.emptyProperties(), null), instanceOf(PostgreSQLSqlDialect.class)); } -} \ No newline at end of file +} diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectIT.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectIT.java index 5ceb7d1..67976d6 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectIT.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectIT.java @@ -1,28 +1,30 @@ package com.exasol.adapter.dialects.postgresql; -import static com.exasol.matcher.ResultSetMatcher.matchesResultSet; import static com.exasol.matcher.ResultSetStructureMatcher.table; import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assumptions.assumeFalse; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.sql.*; import java.text.ParseException; import java.text.SimpleDateFormat; -import java.util.*; -import java.util.Date; -import java.util.stream.Collectors; +import java.util.Collections; +import java.util.Map; -import org.hamcrest.MatcherAssert; +import org.hamcrest.Matcher; import org.junit.jupiter.api.*; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; import com.exasol.closeafterall.CloseAfterAll; import com.exasol.closeafterall.CloseAfterAllExtension; +import com.exasol.containers.ExasolDockerImageReference; import com.exasol.dbbuilder.dialects.DatabaseObjectException; -import com.exasol.dbbuilder.dialects.Schema; import com.exasol.dbbuilder.dialects.exasol.VirtualSchema; import com.exasol.matcher.TypeMatchMode; @@ -37,14 +39,13 @@ class PostgreSQLSqlDialectIT { private static final String TABLE_POSTGRES_MIXED_CASE = "Table_Postgres_Mixed_Case"; private static final String TABLE_POSTGRES_LOWER_CASE = "table_postgres_lower_case"; private static final String TABLE_POSTGRES_ALL_DATA_TYPES = "table_postgres_all_data_types"; - private static Schema exasolSchema; private static VirtualSchema virtualSchemaPostgres; private static VirtualSchema virtualSchemaPostgresUppercaseTable; private static final String TABLE_JOIN_1 = "TABLE_JOIN_1"; private static final String TABLE_JOIN_2 = "TABLE_JOIN_2"; private static VirtualSchema virtualSchemaPostgresPreserveOriginalCase; - private static String QUALIFIED_TABLE_JOIN_NAME_1; - private static String QUALIFIED_TABLE_JOIN_NAME_2; + private static String qualifiedTableJoinName1; + private static String qualifiedTableJoinName2; private static Statement statementExasol; @BeforeAll @@ -63,9 +64,8 @@ static void beforeAll() throws SQLException { Map.of("IGNORE_ERRORS", "POSTGRESQL_UPPERCASE_TABLES")); virtualSchemaPostgresPreserveOriginalCase = SETUP.createVirtualSchema(SCHEMA_POSTGRES_UPPERCASE_TABLE, Map.of("POSTGRESQL_IDENTIFIER_MAPPING", "PRESERVE_ORIGINAL_CASE")); - QUALIFIED_TABLE_JOIN_NAME_1 = virtualSchemaPostgres.getName() + "." + TABLE_JOIN_1; - QUALIFIED_TABLE_JOIN_NAME_2 = virtualSchemaPostgres.getName() + "." + TABLE_JOIN_2; - exasolSchema = SETUP.getExasolFactory().createSchema("EXASOL_TEST_SCHEMA"); + qualifiedTableJoinName1 = virtualSchemaPostgres.getName() + "." + TABLE_JOIN_1; + qualifiedTableJoinName2 = virtualSchemaPostgres.getName() + "." + TABLE_JOIN_2; } private static void createPostgresTestTableSimple(final Statement statementPostgres) throws SQLException { @@ -109,6 +109,9 @@ private static void createPostgresTestTableAllDataTypes(final Statement statemen + "myTime TIME, " // + "myTimeWithTimeZone TIME WITH TIME ZONE, " // + "myTimestamp TIMESTAMP, " // + + "myTimestamp0 TIMESTAMP(0), " // + + "myTimestamp3 TIMESTAMP(3), " // + + "myTimestamp6 TIMESTAMP(6), " // + "myTimestampWithTimeZone TIMESTAMP WITH TIME ZONE, " // + "myTsquery TSQUERY, " // + "myTsvector TSVECTOR, " // @@ -148,6 +151,9 @@ private static void createPostgresTestTableAllDataTypes(final Statement statemen + "'11:11:11', " // myTime + "'11:11:11 +01:00', " // myTimeWithTimeZone + "'2010-01-01 11:11:11', " // myTimestamp + + "'2010-01-01 11:11:11', " // myTimestamp0 + + "'2010-01-01 11:11:11.123', " // myTimestamp3 + + "'2010-01-01 11:11:11.123456', " // myTimestamp6 + "'2010-01-01 11:11:11 +01:00', " // myTimestampwithtimezone + "'fat & rat'::tsquery, " // myTsquery + "to_tsvector('english', 'The Fat Rats'), " // myTsvector @@ -177,127 +183,124 @@ private static void createTestTablesForJoinTests(final String schemaName) throws } @Test - void testSelectSingleColumn() throws SQLException { - final ResultSet actualResultSet = statementExasol - .executeQuery("SELECT * FROM " + virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_SIMPLE); - assertThat(actualResultSet, table().row(1).matches(TypeMatchMode.NO_JAVA_TYPE_CHECK)); + void testSelectSingleColumn() { + assertResult("SELECT * FROM " + virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_SIMPLE, + table().row(1).matches(TypeMatchMode.NO_JAVA_TYPE_CHECK)); } @Test - void testInnerJoin() throws SQLException { - final String query = "SELECT * FROM " + QUALIFIED_TABLE_JOIN_NAME_1 + " a INNER JOIN " - + QUALIFIED_TABLE_JOIN_NAME_2 + " b ON a.x=b.x"; - final ResultSet expected = getExpectedResultSet(List.of("x INT", "y VARCHAR(100)", "a INT", "b VARCHAR(100)"), // - List.of("2,'bbb', 2,'bbb'")); - assertThat(getActualResultSet(query), matchesResultSet(expected)); + void testInnerJoin() { + final String query = "SELECT * FROM " + qualifiedTableJoinName1 + " a INNER JOIN " + + qualifiedTableJoinName2 + " b ON a.x=b.x"; + assertResult(query, + table("BIGINT", "VARCHAR", "BIGINT", "VARCHAR").row(2L, "bbb", 2L, "bbb").matches()); + } + + private void assertResult(final String query, final Matcher matcher) { + try (ResultSet resultSet = getActualResultSet(query)) { + assertThat(resultSet, matcher); + } catch (final SQLException exception) { + throw new IllegalStateException(String.format("Failed to execute query '%s'", query)); + } + } + + private void assertEmptyResult(final String query) { + try (ResultSet resultSet = getActualResultSet(query)) { + assertThat(resultSet.next(), is(false)); + } catch (final SQLException exception) { + throw new IllegalStateException(String.format("Failed to execute query '%s'", query)); + } + } + + private ResultSet getActualResultSet(final String query) throws SQLException { + return statementExasol.executeQuery(query); } @Test - void testInnerJoinWithProjection() throws SQLException { - final String query = "SELECT b.y || " + QUALIFIED_TABLE_JOIN_NAME_1 + ".y FROM " + QUALIFIED_TABLE_JOIN_NAME_1 - + " INNER JOIN " + QUALIFIED_TABLE_JOIN_NAME_2 + " b ON " + QUALIFIED_TABLE_JOIN_NAME_1 + ".x=b.x"; - final ResultSet expected = getExpectedResultSet(List.of("y VARCHAR(100)"), // - List.of("'bbbbbb'")); - assertThat(getActualResultSet(query), matchesResultSet(expected)); + void testInnerJoinWithProjection() { + final String query = "SELECT b.y || " + qualifiedTableJoinName1 + ".y FROM " + qualifiedTableJoinName1 + + " INNER JOIN " + qualifiedTableJoinName2 + " b ON " + qualifiedTableJoinName1 + ".x=b.x"; + assertResult(query, table("VARCHAR").row("bbbbbb").matches()); } @Test - void testLeftJoin() throws SQLException { - final String query = "SELECT * FROM " + QUALIFIED_TABLE_JOIN_NAME_1 + " a LEFT OUTER JOIN " - + QUALIFIED_TABLE_JOIN_NAME_2 + " b ON a.x=b.x ORDER BY a.x"; - final ResultSet expected = getExpectedResultSet(List.of("x INT", "y VARCHAR(100)", "a INT", "b VARCHAR(100)"), // - List.of("1, 'aaa', null, null", // - "2, 'bbb', 2, 'bbb'")); - assertThat(getActualResultSet(query), matchesResultSet(expected)); + void testLeftJoin() { + final String query = "SELECT * FROM " + qualifiedTableJoinName1 + " a LEFT OUTER JOIN " + + qualifiedTableJoinName2 + " b ON a.x=b.x ORDER BY a.x"; + assertResult(query, table("BIGINT", "VARCHAR", "BIGINT", "VARCHAR").row(1L, "aaa", null, null) + .row(2L, "bbb", 2L, "bbb").matches()); } @Test - void testRightJoin() throws SQLException { - final String query = "SELECT * FROM " + QUALIFIED_TABLE_JOIN_NAME_1 + " a RIGHT OUTER JOIN " - + QUALIFIED_TABLE_JOIN_NAME_2 + " b ON a.x=b.x ORDER BY a.x"; - final ResultSet expected = getExpectedResultSet(List.of("x INT", "y VARCHAR(100)", "a INT", "b VARCHAR(100)"), // - List.of("2, 'bbb', 2, 'bbb'", // - "null, null, 3, 'ccc'")); - assertThat(getActualResultSet(query), matchesResultSet(expected)); + void testRightJoin() { + final String query = "SELECT * FROM " + qualifiedTableJoinName1 + " a RIGHT OUTER JOIN " + + qualifiedTableJoinName2 + " b ON a.x=b.x ORDER BY a.x"; + assertResult(query, table("BIGINT", "VARCHAR", "BIGINT", "VARCHAR").row(2L, "bbb", 2L, "bbb") + .row(null, null, 3L, "ccc").matches()); } @Test - void testFullOuterJoin() throws SQLException { - final String query = "SELECT * FROM " + QUALIFIED_TABLE_JOIN_NAME_1 + " a FULL OUTER JOIN " - + QUALIFIED_TABLE_JOIN_NAME_2 + " b ON a.x=b.x ORDER BY a.x"; - final ResultSet expected = getExpectedResultSet(List.of("x INT", "y VARCHAR(100)", "a INT", "b VARCHAR(100)"), // - List.of("1, 'aaa', null, null", // - "2, 'bbb', 2, 'bbb'", // - "null, null, 3, 'ccc'")); - assertThat(getActualResultSet(query), matchesResultSet(expected)); + void testFullOuterJoin() { + final String query = "SELECT * FROM " + qualifiedTableJoinName1 + " a FULL OUTER JOIN " + + qualifiedTableJoinName2 + " b ON a.x=b.x ORDER BY a.x"; + assertResult(query, table("BIGINT", "VARCHAR", "BIGINT", "VARCHAR") + .row(1L, "aaa", null, null) + .row(2L, "bbb", 2L, "bbb") + .row(null, null, 3L, "ccc").matches()); } @Test - void testRightJoinWithComplexCondition() throws SQLException { - final String query = "SELECT * FROM " + QUALIFIED_TABLE_JOIN_NAME_1 + " a RIGHT OUTER JOIN " - + QUALIFIED_TABLE_JOIN_NAME_2 + " b ON a.x||a.y=b.x||b.y ORDER BY a.x"; - final ResultSet expected = getExpectedResultSet(List.of("x INT", "y VARCHAR(100)", "a INT", "b VARCHAR(100)"), // - List.of("2, 'bbb', 2, 'bbb'", // - "null, null, 3, 'ccc'")); - assertThat(getActualResultSet(query), matchesResultSet(expected)); + void testRightJoinWithComplexCondition() { + final String query = "SELECT * FROM " + qualifiedTableJoinName1 + " a RIGHT OUTER JOIN " + + qualifiedTableJoinName2 + " b ON a.x||a.y=b.x||b.y ORDER BY a.x"; + assertResult(query, table("BIGINT", "VARCHAR", "BIGINT", "VARCHAR") + .row(2L, "bbb", 2L, "bbb") + .row(null, null, 3L, "ccc").matches()); } @Test - void testFullOuterJoinWithComplexCondition() throws SQLException { - final String query = "SELECT * FROM " + QUALIFIED_TABLE_JOIN_NAME_1 + " a FULL OUTER JOIN " - + QUALIFIED_TABLE_JOIN_NAME_2 + " b ON a.x-b.x=0 ORDER BY a.x"; - final ResultSet expected = getExpectedResultSet(List.of("x INT", "y VARCHAR(100)", "a INT", "b VARCHAR(100)"), // - List.of("1, 'aaa', null, null", // - "2, 'bbb', 2, 'bbb'", // - "null, null, 3, 'ccc'")); - assertThat(getActualResultSet(query), matchesResultSet(expected)); + void testFullOuterJoinWithComplexCondition() { + final String query = "SELECT * FROM " + qualifiedTableJoinName1 + " a FULL OUTER JOIN " + + qualifiedTableJoinName2 + " b ON a.x-b.x=0 ORDER BY a.x"; + assertResult(query, table("BIGINT", "VARCHAR", "BIGINT", "VARCHAR") + .row(1L, "aaa", null, null) + .row(2L, "bbb", 2L, "bbb") + .row(null, null, 3L, "ccc").matches()); } @Test - void testYearScalarFunctionFromTimeStamp() throws SQLException { + void testYearScalarFunctionFromTimeStamp() { final String query = "SELECT year(\"MYTIMESTAMP\") FROM " + virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_ALL_DATA_TYPES; - final ResultSet actualResultSet = getActualResultSet(query); - final Short yearShort = 2010; - assertThat(actualResultSet, table().row(yearShort).matches()); + assertResult(query, table().row((short) 2010).matches()); } @Test - void testYearScalarFunctionFromDate() throws SQLException { + void testYearScalarFunctionFromDate() { final String query = "SELECT year(\"MYDATE\") FROM " + virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_ALL_DATA_TYPES; - final ResultSet actualResultSet = getActualResultSet(query); - final Short yearShort = 2010; - assertThat(actualResultSet, table().row(yearShort).matches()); + assertResult(query, table().row((short) 2010).matches()); } - // Check 'current_schema' functionality, re-enable tests after resolution - // currently a bug in the compiler, compiler always expects 'VARCHAR(1) ASCII' see - // https://github.com/exasol/postgresql-virtual-schema/issues/79 - // https://exasol.atlassian.net/browse/SPOT-19716 - @Disabled("Currently a bug in the compiler, compiler always expects 'VARCHAR(1) ASCII'") @Test - void testCurrentSchemaScalarFunction() throws SQLException { + void testCurrentSchemaScalarFunction() { final String query = " SELECT current_schema FROM " + virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_ALL_DATA_TYPES; - final ResultSet actualResultSet = getActualResultSet(query); - assertThat(actualResultSet, table().row(TABLE_POSTGRES_ALL_DATA_TYPES).matches()); + assertResult(query, table().row("public").matches()); } @Test - void testFloatDivFunction() throws SQLException { - final String query = " SELECT MYINTEGER / MYINTEGER FROM " + virtualSchemaPostgres.getName() + "." + void testFloatDivFunction() { + final String query = "SELECT MYINTEGER / MYINTEGER FROM " + virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_ALL_DATA_TYPES; - final ResultSet actualResultSet = getActualResultSet(query); - assertThat(actualResultSet, table("DOUBLE PRECISION").row(1.0).matches()); + assertResult(query, table("DOUBLE PRECISION").row(1.0).matches()); } @Test - void testCountAll() throws SQLException { + void testCountAll() { final String qualifiedExpectedTableName = virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_SIMPLE; final String query = "SELECT COUNT(*) FROM " + qualifiedExpectedTableName; - final ResultSet actualResultSet = getActualResultSet(query); - assertThat(actualResultSet, table("BIGINT").row(1L).matches()); + assertResult(query, table("BIGINT").row(1L).matches()); } @Test @@ -324,10 +327,9 @@ void testQueryUpperCaseTableThrowsException() { } @Test - void testQueryLowerCaseTable() throws SQLException { - final ResultSet result = statementExasol.executeQuery( + void testQueryLowerCaseTable() { + assertEmptyResult( "SELECT x FROM " + virtualSchemaPostgresUppercaseTable.getName() + "." + TABLE_POSTGRES_LOWER_CASE); - assertThat(result.next(), equalTo(false)); } @Test @@ -362,246 +364,236 @@ void testPreserveCaseQueryLowerCaseTableThrowsException() { } @Test - void testPreserveCaseQueryLowerCaseTableWithQuotes() throws SQLException { - final ResultSet result = statementExasol.executeQuery("SELECT \"x\" FROM " + void testPreserveCaseQueryLowerCaseTableWithQuotes() { + assertEmptyResult("SELECT \"x\" FROM " + virtualSchemaPostgresPreserveOriginalCase.getName() + ".\"" + TABLE_POSTGRES_LOWER_CASE + "\""); - assertThat(result.next(), equalTo(false)); } @Test - void testPreserveCaseQueryUpperCaseTableWithQuotes() throws SQLException { - final ResultSet result = statementExasol.executeQuery("SELECT \"Y\" FROM " + void testPreserveCaseQueryUpperCaseTableWithQuotes() { + assertEmptyResult("SELECT \"Y\" FROM " + virtualSchemaPostgresPreserveOriginalCase.getName() + ".\"" + TABLE_POSTGRES_MIXED_CASE + "\""); - assertThat(result.next(), equalTo(false)); } @Test - void testDatatypeBigint() throws SQLException { + void testDatatypeBigint() { assertSingleValue("myBigint", "DECIMAL(19,0)", "10000000000"); } @Test - void testPreserveCaseQueryUpperCaseTableWithQuotesLowerCaseColumn() throws SQLException { - final ResultSet result = statementExasol.executeQuery("SELECT \"x\" FROM " + void testPreserveCaseQueryUpperCaseTableWithQuotesLowerCaseColumn() { + assertEmptyResult("SELECT \"x\" FROM " + virtualSchemaPostgresPreserveOriginalCase.getName() + ".\"" + TABLE_POSTGRES_MIXED_CASE + "\""); - assertThat(result.next(), equalTo(false)); } @Test - void testDatatypeBigSerial() throws SQLException { + void testDatatypeBigSerial() { assertSingleValue("myBigserial", "DECIMAL(19,0)", "1"); } @Test - void testDatatypeBit() throws SQLException { + void testDatatypeBit() { assertSingleValue("myBit", "BOOLEAN", true); } @Test - void testDatatypeBitVar() throws SQLException { + void testDatatypeBitVar() { assertSingleValue("myBitvar", "VARCHAR(5) UTF8", "0"); } @Test - void testDatatypeBoolean() throws SQLException { + void testDatatypeBoolean() { assertSingleValue("myBoolean", "BOOLEAN", false); } @Test - void testDatatypeBox() throws SQLException { + void testDatatypeBox() { assertSingleValue("myBox", "VARCHAR(2000000) UTF8", "(4,16),(1,8)"); } @Test - void testDatatypeBytea() throws SQLException { + void testDatatypeBytea() { assertSingleValue("myBytea", "VARCHAR(2000000) UTF8", "bytea NOT SUPPORTED"); } @Test - void testDatatypeCharacter() throws SQLException { + void testDatatypeCharacter() { final String empty = " "; final String expected = "hajksdf" + String.join("", Collections.nCopies(993, empty)); assertSingleValue("myCharacter", "CHAR(1000) UTF8", expected); } @Test - void testDatatypeCharacterVar() throws SQLException { + void testDatatypeCharacterVar() { assertSingleValue("myCharactervar", "VARCHAR(1000) UTF8", "hjkdhjgfh"); } @Test - void testDatatypeCidr() throws SQLException { + void testDatatypeCidr() { assertSingleValue("myCidr", "VARCHAR(2000000) UTF8", "192.168.100.128/25"); } @Test - void testDatatypeCircle() throws SQLException { + void testDatatypeCircle() { assertSingleValue("myCircle", "VARCHAR(2000000) UTF8", "<(1,5),3>"); } @Test - void testDatatypeDate() throws SQLException, ParseException { - final Date expectedDate = new SimpleDateFormat("yyyy-MM-dd").parse("2010-01-01"); + void testDatatypeDate() throws ParseException { + final java.util.Date expectedDate = new SimpleDateFormat("yyyy-MM-dd").parse("2010-01-01"); assertSingleValue("myDate", "DATE", expectedDate); } @Test - void testDatatypeDouble() throws SQLException { + void testDatatypeDouble() { assertSingleValue("myDouble", "DOUBLE", "192189234.1723854"); } @Test - void testDatatypeInet() throws SQLException { + void testDatatypeInet() { assertSingleValue("myInet", "VARCHAR(2000000) UTF8", "192.168.100.128/32"); } @Test - void testDatatypeInteger() throws SQLException { + void testDatatypeInteger() { assertSingleValue("myInteger", "DECIMAL(10,0)", "7189234"); } @Test - void testDatatypeIntervalYM() throws SQLException { + void testDatatypeIntervalYM() { assertSingleValue("myInterval", "VARCHAR(2000000) UTF8", "1 year"); } @Test - void testDatatypeJSON() throws SQLException { + void testDatatypeJSON() { assertSingleValue("myJson", "VARCHAR(2000000) UTF8", "{\"bar\": \"baz\", \"balance\": 7.77, \"active\": false}"); } @Test - void testDatatypeJSONB() throws SQLException { + void testDatatypeJSONB() { assertSingleValue("myJsonb", "VARCHAR(2000000) UTF8", "{\"bar\": \"baz\", \"active\": false, \"balance\": 7.77}"); } @Test - void testDatatypeLine() throws SQLException { + void testDatatypeLine() { assertSingleValue("myLine", "VARCHAR(2000000) UTF8", "{1,2,3}"); } @Test - void testDatatypeLSeg() throws SQLException { + void testDatatypeLSeg() { assertSingleValue("myLseg", "VARCHAR(2000000) UTF8", "[(1,2),(3,4)]"); } @Test - void testDatatypeMACAddr() throws SQLException { + void testDatatypeMACAddr() { assertSingleValue("myMacaddr", "VARCHAR(2000000) UTF8", "08:00:2b:01:02:03"); } @Test - void testDatatypeMoney() throws SQLException { + void testDatatypeMoney() { assertSingleValue("myMoney", "DOUBLE", 100.01); } @Test - void testDatatypeNumeric() throws SQLException { + void testDatatypeNumeric() { assertSingleValue("myNumeric", "VARCHAR(2000000) UTF8", 24.2300000000); } @Test - void testDatatypePath() throws SQLException { + void testDatatypePath() { assertSingleValue("myPath", "VARCHAR(2000000) UTF8", "[(1,2),(3,4)]"); } @Test - void testDatatypePoint() throws SQLException { + void testDatatypePoint() { assertSingleValue("myPoint", "VARCHAR(2000000) UTF8", "(1,3)"); } @Test - void testDatatypePolygon() throws SQLException { + void testDatatypePolygon() { assertSingleValue("myPolygon", "VARCHAR(2000000) UTF8", "((1,2),(2,4),(3,7))"); } @Test - void testDatatypeReal() throws SQLException { + void testDatatypeReal() { assertSingleValue("myReal", "DOUBLE", 10.12); } @Test - void testDatatypeSmallInt() throws SQLException { + void testDatatypeSmallInt() { assertSingleValue("mySmallint", "DECIMAL(5,0)", 100); } @Test - void testDatatypeText() throws SQLException { + void testDatatypeText() { assertSingleValue("myText", "VARCHAR(2000000) UTF8", "This cat is super cute"); } @Test - void testDatatypeTime() throws SQLException { + void testDatatypeTime() { assertSingleValue("myTime", "VARCHAR(2000000) UTF8", "1970-01-01 11:11:11.0"); } @Test - void testDatatypeTimeWithTimezone() throws SQLException { + void testDatatypeTimeWithTimezone() { assertSingleValue("myTimeWithTimeZone", "VARCHAR(2000000) UTF8", "1970-01-01 11:11:11.0"); } + @ParameterizedTest + @CsvSource({ + "myTimestamp, TIMESTAMP, 2010-01-01 11:11:11", + "myTimestamp0, TIMESTAMP, 2010-01-01 11:11:11", + "myTimestamp3, TIMESTAMP, 2010-01-01 11:11:11.123", + "myTimestampwithtimezone, TIMESTAMP, 2010-01-01 11:11:11", + }) + void testDatatypeTimestamp(final String column, final String expectedType, final String expectedTimestamp) { + assertSingleValue(column, expectedType, Timestamp.valueOf(expectedTimestamp)); + } + @Test - void testDatatypeTimestamp() throws SQLException, ParseException { - final Timestamp expectedDate = new Timestamp( - new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2010-01-01 11:11:11").getTime()); - assertSingleValue("myTimestamp", "TIMESTAMP", expectedDate); + void testDatatypeTimestampWithPrecision6() { + assumeTrue(supportTimestampPrecision()); + assertSingleValue("myTimestamp6", "TIMESTAMP", Timestamp.valueOf("2010-01-01 11:11:11.123456")); } @Test - void testDatatypeTimestampWithTimezone() throws SQLException, ParseException { - final Timestamp expectedDate = new Timestamp( - new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2010-01-01 11:11:11").getTime()); - assertSingleValue("myTimestampwithtimezone", "TIMESTAMP", expectedDate); + void testDatatypeTimestampWithoutPrecision6() { + assumeFalse(supportTimestampPrecision()); + assertSingleValue("myTimestamp6", "TIMESTAMP", Timestamp.valueOf("2010-01-01 11:11:11.123")); } @Test - void testDatatypeTsQuery() throws SQLException { + void testDatatypeTsQuery() { assertSingleValue("myTsquery", "VARCHAR(2000000) UTF8", "'fat' & 'rat'"); } @Test - void testDatatypeTsvector() throws SQLException { + void testDatatypeTsvector() { assertSingleValue("myTsvector", "VARCHAR(2000000) UTF8", "'fat':2 'rat':3"); } @Test - void testDatatypeUUID() throws SQLException { + void testDatatypeUUID() { assertSingleValue("myUuid", "VARCHAR(2000000) UTF8", "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11"); } @Test - void testDatatypeXML() throws SQLException { + void testDatatypeXML() { assertSingleValue("myXml", "VARCHAR(2000000) UTF8", "Manual..."); } - private void assertSingleValue(final String columnName, final String expectedColumnType, final Object expectedValue) - throws SQLException { - final ResultSet actual = statementExasol.executeQuery("SELECT " + columnName + " FROM " - + virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_ALL_DATA_TYPES); - MatcherAssert.assertThat(actual, table().row(expectedValue).matches(TypeMatchMode.NO_JAVA_TYPE_CHECK)); + private void assertSingleValue(final String columnName, final String expectedColumnType, + final Object expectedValue) { + final String query = "SELECT " + columnName + " FROM " + + virtualSchemaPostgres.getName() + "." + TABLE_POSTGRES_ALL_DATA_TYPES; + assertResult(query, table().row(expectedValue).matches(TypeMatchMode.NO_JAVA_TYPE_CHECK)); } - // TODO refactor to use table().row().matches() - private ResultSet getExpectedResultSet(final List expectedColumns, final List expectedRows) - throws SQLException { - final String expectedValues = expectedRows.stream().map(row -> "(" + row + ")") - .collect(Collectors.joining(",")); - final String qualifiedExpectedTableName = exasolSchema.getName() + ".EXPECTED"; - final String createTableStatement = "CREATE OR REPLACE TABLE " + qualifiedExpectedTableName + "(" - + String.join(", ", expectedColumns) + ");"; - statementExasol.execute(createTableStatement); - final String insertIntoTableStatement = "INSERT INTO " + qualifiedExpectedTableName + " VALUES " - + expectedValues + ";"; - statementExasol.execute(insertIntoTableStatement); - final String selectStatement = "SELECT * FROM " + qualifiedExpectedTableName + ";"; - return statementExasol.executeQuery(selectStatement); + private boolean supportTimestampPrecision() { + final ExasolDockerImageReference reference = SETUP.getExasolContainer().getDockerImageReference(); + return reference.getMajor() > 8 || (reference.getMajor() == 8 && reference.getMinor() >= 32); } - - private ResultSet getActualResultSet(final String query) throws SQLException { - return statementExasol.executeQuery(query); - } - } diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectTest.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectTest.java index 1f8e22f..c0bcd1f 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectTest.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLSqlDialectTest.java @@ -5,11 +5,13 @@ import static com.exasol.adapter.capabilities.LiteralCapability.*; import static com.exasol.adapter.capabilities.MainCapability.*; import static com.exasol.adapter.capabilities.PredicateCapability.*; +import static java.util.Collections.emptyMap; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.when; import java.sql.Connection; @@ -26,6 +28,7 @@ import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import com.exasol.ExaMetadata; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.capabilities.Capabilities; import com.exasol.adapter.dialects.SqlDialect; @@ -38,11 +41,14 @@ class PostgreSQLSqlDialectTest { private PostgreSQLSqlDialect dialect; @Mock - private ConnectionFactory connectionFactoryMock; + ConnectionFactory connectionFactoryMock; + @Mock + ExaMetadata exaMetadataMock; @BeforeEach void beforeEach() { - this.dialect = new PostgreSQLSqlDialect(this.connectionFactoryMock, AdapterProperties.emptyProperties()); + lenient().when(exaMetadataMock.getDatabaseVersion()).thenReturn("8.34.0"); + this.dialect = testee(emptyMap()); } @Test @@ -86,9 +92,9 @@ void testGetCapabilities() { ); } - @CsvSource({ "ABC, \"abc\"", // - "AbCde, \"abcde\"", // - "\"tableName, \"\"\"tablename\"" // + @CsvSource({ "ABC, \"abc\"", + "AbCde, \"abcde\"", + "\"tableName, \"\"\"tablename\"" }) @ParameterizedTest void testApplyQuote(final String unquoted, final String quoted) { @@ -110,17 +116,17 @@ void testGetLiteralStringNull() { @Test void testPostgreSQLIdentifierMappingConsistency() throws PropertyValidationException { - final SqlDialect sqlDialect = new PostgreSQLSqlDialect(null, new AdapterProperties(Map.of( // - CONNECTION_NAME_PROPERTY, "MY_CONN", // - "POSTGRESQL_IDENTIFIER_MAPPING", "CONVERT_TO_UPPER"))); + final SqlDialect sqlDialect = testee(Map.of( + CONNECTION_NAME_PROPERTY, "MY_CONN", + "POSTGRESQL_IDENTIFIER_MAPPING", "CONVERT_TO_UPPER")); sqlDialect.validateProperties(); } @Test void testPostgreSQLIdentifierMappingInvalidPropertyValueThrowsException() { - final SqlDialect sqlDialect = new PostgreSQLSqlDialect(null, new AdapterProperties(Map.of( // - CONNECTION_NAME_PROPERTY, "MY_CONN", // - "POSTGRESQL_IDENTIFIER_MAPPING", "CONVERT"))); + final SqlDialect sqlDialect = testee(Map.of( + CONNECTION_NAME_PROPERTY, "MY_CONN", + "POSTGRESQL_IDENTIFIER_MAPPING", "CONVERT")); final PropertyValidationException exception = assertThrows(PropertyValidationException.class, sqlDialect::validateProperties); assertThat(exception.getMessage(), containsString("E-VSPG-4")); @@ -128,9 +134,9 @@ void testPostgreSQLIdentifierMappingInvalidPropertyValueThrowsException() { @Test void testIgnoreErrorsConsistency() { - final SqlDialect sqlDialect = new PostgreSQLSqlDialect(null, new AdapterProperties(Map.of( // - CONNECTION_NAME_PROPERTY, "MY_CONN", // - "IGNORE_ERRORS", "ORACLE_ERROR"))); + final SqlDialect sqlDialect = testee(Map.of( + CONNECTION_NAME_PROPERTY, "MY_CONN", + "IGNORE_ERRORS", "ORACLE_ERROR")); final PropertyValidationException exception = assertThrows(PropertyValidationException.class, sqlDialect::validateProperties); assertThat(exception.getMessage(), containsString("E-VSPG-5")); @@ -138,17 +144,25 @@ void testIgnoreErrorsConsistency() { @Test void testValidateCatalogProperty() throws PropertyValidationException { - final SqlDialect sqlDialect = new PostgreSQLSqlDialect(null, new AdapterProperties(Map.of( // - CONNECTION_NAME_PROPERTY, "MY_CONN", // - CATALOG_NAME_PROPERTY, "MY_CATALOG"))); + final SqlDialect sqlDialect = testee(Map.of( + CONNECTION_NAME_PROPERTY, "MY_CONN", + CATALOG_NAME_PROPERTY, "MY_CATALOG")); sqlDialect.validateProperties(); } @Test void testValidateSchemaProperty() throws PropertyValidationException { - final SqlDialect sqlDialect = new PostgreSQLSqlDialect(null, new AdapterProperties(Map.of( // - CONNECTION_NAME_PROPERTY, "MY_CONN", // - SCHEMA_NAME_PROPERTY, "MY_SCHEMA"))); + final SqlDialect sqlDialect = testee(Map.of( + CONNECTION_NAME_PROPERTY, "MY_CONN", + SCHEMA_NAME_PROPERTY, "MY_SCHEMA")); sqlDialect.validateProperties(); } -} \ No newline at end of file + + private PostgreSQLSqlDialect testee(final Map properties) { + return testee(new AdapterProperties(properties)); + } + + private PostgreSQLSqlDialect testee(final AdapterProperties properties) { + return new PostgreSQLSqlDialect(connectionFactoryMock, properties, exaMetadataMock); + } +} diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLTableMetadataReaderTest.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLTableMetadataReaderTest.java index c875eac..f03f76e 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLTableMetadataReaderTest.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgreSQLTableMetadataReaderTest.java @@ -27,7 +27,7 @@ class PostgreSQLTableMetadataReaderTest { void beforeEach() { this.rawProperties = new HashMap<>(); final AdapterProperties properties = new AdapterProperties(this.rawProperties); - this.reader = new PostgreSQLTableMetadataReader(null, null, properties, + this.reader = new PostgreSQLTableMetadataReader(null, null, properties, null, BaseIdentifierConverter.createDefault()); } @@ -67,4 +67,4 @@ void testIsUppercaseTableIncludedByMappingWithConvertToUpperNotIgnoringUppercase () -> this.reader.isTableIncludedByMapping("\"FooBar\"")); assertThat(exception.getMessage(), containsString("E-VSPG-6")); } -} \ No newline at end of file +} diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgresSQLSqlGenerationVisitorTest.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgresSQLSqlGenerationVisitorTest.java index 9728974..041afd1 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgresSQLSqlGenerationVisitorTest.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgresSQLSqlGenerationVisitorTest.java @@ -33,17 +33,18 @@ class PostgresSQLSqlGenerationVisitorTest { @BeforeEach void beforeEach(@Mock final ConnectionFactory connectionFactoryMock) { - final SqlDialect dialect = new PostgreSQLSqlDialect(connectionFactoryMock, AdapterProperties.emptyProperties()); + final SqlDialect dialect = new PostgreSQLSqlDialect(connectionFactoryMock, AdapterProperties.emptyProperties(), + null); final SqlGenerationContext context = new SqlGenerationContext("test_catalog", "test_schema", false); this.visitor = new PostgresSQLSqlGenerationVisitor(dialect, context); } - @CsvSource({ "ADD_DAYS, days", // - "ADD_HOURS, hours", // - "ADD_MINUTES, mins", // - "ADD_SECONDS, secs", // - "ADD_YEARS, years", // - "ADD_WEEKS, weeks", // + @CsvSource({ "ADD_DAYS, days", + "ADD_HOURS, hours", + "ADD_MINUTES, mins", + "ADD_SECONDS, secs", + "ADD_YEARS, years", + "ADD_WEEKS, weeks", "ADD_MONTHS, months" }) @ParameterizedTest void testVisitSqlFunctionScalarAddDate(final ScalarFunction scalarFunction, final String expected) @@ -64,11 +65,11 @@ private SqlFunctionScalar createSqlFunctionScalarForDateTest(final ScalarFunctio return new SqlFunctionScalar(scalarFunction, arguments); } - @CsvSource({ "SECOND, SECOND, 2", // - "MINUTE, MINUTE, 2", // - "DAY, DAY, 2", // - "WEEK, WEEK, 2", // - "MONTH, MONTH, 2", // + @CsvSource({ "SECOND, SECOND, 2", + "MINUTE, MINUTE, 2", + "DAY, DAY, 2", + "WEEK, WEEK, 2", + "MONTH, MONTH, 2", "YEAR, YEAR, 4" }) @ParameterizedTest void testVisitSqlFunctionScalarDatetime(final ScalarFunction scalarFunction, final String expected, @@ -93,12 +94,12 @@ void testVisitSqlSelectListAnyValue() throws AdapterException { @Test void testVisitSqlStatementSelect() throws AdapterException { final SqlStatementSelect select = (SqlStatementSelect) DialectTestData.getTestSqlNode(); - assertThat(this.visitor.visit(select), // - equalTo("SELECT \"user_id\", " // - + "COUNT(\"url\") FROM \"test_schema\".\"clicks\" " // - + "WHERE 1 < \"user_id\" " // - + "GROUP BY \"user_id\" " // - + "HAVING 1 < COUNT(\"url\") " // + assertThat(this.visitor.visit(select), + equalTo("SELECT \"user_id\", " + + "COUNT(\"url\") FROM \"test_schema\".\"clicks\" " + + "WHERE 1 < \"user_id\" " + + "GROUP BY \"user_id\" " + + "HAVING 1 < COUNT(\"url\") " + "ORDER BY \"user_id\" LIMIT 10")); } @@ -116,4 +117,4 @@ void testVisitSqlFunctionAggregateGroupConcat() throws AdapterException { .builder(argument).separator(new SqlLiteralString("'")).orderBy(orderBy).build(); assertThat(this.visitor.visit(sqlFunctionAggregateGroupConcat), equalTo("STRING_AGG(E'test', E'''') ")); } -} \ No newline at end of file +} diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgresVirtualSchemaIntegrationTestSetup.java b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgresVirtualSchemaIntegrationTestSetup.java index 6ae21a5..804684e 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/PostgresVirtualSchemaIntegrationTestSetup.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/PostgresVirtualSchemaIntegrationTestSetup.java @@ -1,6 +1,7 @@ package com.exasol.adapter.dialects.postgresql; import static com.exasol.dbbuilder.dialects.exasol.AdapterScript.Language.JAVA; +import static java.util.Objects.requireNonNull; import java.io.Closeable; import java.io.FileNotFoundException; @@ -27,12 +28,12 @@ * This class contains the common integration test setup for all PostgreSQL virtual schemas. */ public class PostgresVirtualSchemaIntegrationTestSetup implements Closeable { - private static final String VIRTUAL_SCHEMAS_JAR_NAME_AND_VERSION = "virtual-schema-dist-12.0.0-postgresql-3.0.0.jar"; + private static final String VIRTUAL_SCHEMAS_JAR_NAME_AND_VERSION = "virtual-schema-dist-13.0.0-postgresql-3.1.0.jar"; private static final Path PATH_TO_VIRTUAL_SCHEMAS_JAR = Path.of("target", VIRTUAL_SCHEMAS_JAR_NAME_AND_VERSION); private static final String SCHEMA_EXASOL = "SCHEMA_EXASOL"; private static final String ADAPTER_SCRIPT_EXASOL = "ADAPTER_SCRIPT_EXASOL"; - private static final String EXASOL_DOCKER_IMAGE_REFERENCE = "8.24.0"; - private static final String POSTGRES_CONTAINER_NAME = "postgres:14.2"; + private static final String EXASOL_DOCKER_IMAGE_REFERENCE = "8.34.0"; + private static final String POSTGRES_CONTAINER_NAME = "postgres:17.5"; private static final String JDBC_DRIVER_NAME = "postgresql.jar"; private static final Path JDBC_DRIVER_PATH = Path.of("target/postgresql-driver/" + JDBC_DRIVER_NAME); @@ -41,10 +42,12 @@ public class PostgresVirtualSchemaIntegrationTestSetup implements Closeable { private final Statement postgresStatement; private final PostgreSQLContainer> postgresqlContainer = new PostgreSQLContainer<>( POSTGRES_CONTAINER_NAME); + @SuppressWarnings("resource") // Will be closed in close() method private final ExasolContainer> exasolContainer = new ExasolContainer<>( EXASOL_DOCKER_IMAGE_REFERENCE).withRequiredServices(ExasolService.BUCKETFS, ExasolService.UDF) .withReuse(true); private final Connection exasolConnection; + private final UdfTestSetup udfTestSetup; private final Statement exasolStatement; private final AdapterScript adapterScript; private final ConnectionDefinition connectionDefinition; @@ -64,9 +67,8 @@ public class PostgresVirtualSchemaIntegrationTestSetup implements Closeable { this.exasolStatement = this.exasolConnection.createStatement(); this.postgresConnection = this.postgresqlContainer.createConnection(""); this.postgresStatement = this.postgresConnection.createStatement(); - final String hostIpAddress = getTestHostIpFromInsideExasol(); - assert (hostIpAddress != null); - final UdfTestSetup udfTestSetup = new UdfTestSetup(hostIpAddress, this.exasolContainer.getDefaultBucket(), + final String hostIpAddress = requireNonNull(getTestHostIpFromInsideExasol()); + this.udfTestSetup = new UdfTestSetup(hostIpAddress, this.exasolContainer.getDefaultBucket(), this.exasolConnection); this.exasolFactory = new ExasolObjectFactory(this.exasolContainer.createConnection(""), ExasolObjectConfiguration.builder().withJvmOptions(udfTestSetup.getJvmOptions()).build()); @@ -78,16 +80,12 @@ public class PostgresVirtualSchemaIntegrationTestSetup implements Closeable { + this.postgresqlContainer.getDatabaseName(); this.connectionDefinition = this.exasolFactory.createConnectionDefinition("POSGRES_CONNECTION", connectionString, this.postgresqlContainer.getUsername(), this.postgresqlContainer.getPassword()); - } catch (final SQLException | BucketAccessException | TimeoutException exception) { + } catch (final SQLException exception) { throw new IllegalStateException("Failed to created PostgreSQL test setup.", exception); - } catch (final InterruptedException exception) { - Thread.currentThread().interrupt(); - throw new IllegalStateException("Thread was interrupted"); } } - private static void uploadDriverToBucket(final ExasolContainer> container) - throws InterruptedException, TimeoutException, BucketAccessException { + private static void uploadDriverToBucket(final ExasolContainer> container) { try { container.getDriverManager().install( // JdbcDriver.builder("POSTGRES_JDBC_DRIVER") // @@ -157,6 +155,7 @@ public ExasolObjectFactory getExasolFactory() { public void close() { try { this.exasolStatement.close(); + this.udfTestSetup.close(); this.exasolConnection.close(); this.postgresStatement.close(); this.postgresConnection.close(); diff --git a/src/test/java/com/exasol/adapter/dialects/postgresql/docgeneration/CapabilitiesReport.java b/src/test/java/com/exasol/adapter/dialects/postgresql/docgeneration/CapabilitiesReport.java index 02bce85..4bee33a 100644 --- a/src/test/java/com/exasol/adapter/dialects/postgresql/docgeneration/CapabilitiesReport.java +++ b/src/test/java/com/exasol/adapter/dialects/postgresql/docgeneration/CapabilitiesReport.java @@ -20,7 +20,7 @@ public CapabilitiesReport(final SqlDialectFactory dialectFactory) { @Override public String generateContent() { final LinedStringBuilder reportBuilder = new LinedStringBuilder(); - final SqlDialect sqlDialect = this.dialectFactory.createSqlDialect(null, null); + final SqlDialect sqlDialect = this.dialectFactory.createSqlDialect(null, null, null); final Capabilities capabilities = sqlDialect.getCapabilities(); reportBuilder.appendLine( " "); diff --git a/src/test/resources/integration/scalarFunctionsParameterCache.yml b/src/test/resources/integration/scalarFunctionsParameterCache.yml index cfeb94b..92cc403 100644 --- a/src/test/resources/integration/scalarFunctionsParameterCache.yml +++ b/src/test/resources/integration/scalarFunctionsParameterCache.yml @@ -2,9 +2,8 @@ abs: ['"DOUBLE_PRECISION_C0"', '"INTEGER_C1"', '"DECIMAL18__3_C2"'] acos: ['"DOUBLE_PRECISION_C0"'] add_hours: ['"DATE_C5", "INTEGER_C1"', '"TIMESTAMP_C6", "INTEGER_C1"'] add_minutes: ['"DATE_C5", "INTEGER_C1"', '"TIMESTAMP_C6", "INTEGER_C1"'] -add_seconds: ['"DATE_C5", "DOUBLE_PRECISION_C0"', '"DATE_C5", "INTEGER_C1"', '"DATE_C5", - "DECIMAL18__3_C2"', '"TIMESTAMP_C6", "DOUBLE_PRECISION_C0"', '"TIMESTAMP_C6", - "INTEGER_C1"', '"TIMESTAMP_C6", "DECIMAL18__3_C2"'] +add_seconds: ['"DATE_C5", "INTEGER_C1"', '"DATE_C5", "DECIMAL18__3_C2"', '"TIMESTAMP_C6", + "DOUBLE_PRECISION_C0"', '"TIMESTAMP_C6", "INTEGER_C1"', '"TIMESTAMP_C6", "DECIMAL18__3_C2"'] ascii: ['"VARCHAR2_C4"'] asin: ['"DOUBLE_PRECISION_C0"'] atan: ['"DOUBLE_PRECISION_C0"', '"INTEGER_C1"', '"DECIMAL18__3_C2"'] diff --git a/src/test/resources/logging.properties b/src/test/resources/logging.properties index 8c97abe..8d41bf2 100644 --- a/src/test/resources/logging.properties +++ b/src/test/resources/logging.properties @@ -2,5 +2,5 @@ handlers=java.util.logging.ConsoleHandler .level=INFO java.util.logging.ConsoleHandler.level=ALL java.util.logging.ConsoleHandler.formatter=java.util.logging.SimpleFormatter -java.util.logging.SimpleFormatter.format=%1$tF %1$tT.%1$tL [%4$-7s] %5$s %n +java.util.logging.SimpleFormatter.format=%1$tF %1$tT.%1$tL [%4$-7s] %5$s %6$s%n com.exasol.level=ALL