diff --git a/.github/workflows/docs-pages.yaml b/.github/workflows/docs-pages.yaml
new file mode 100644
index 00000000000..b40bb391062
--- /dev/null
+++ b/.github/workflows/docs-pages.yaml
@@ -0,0 +1,43 @@
+name: "Docs / Publish"
+# For more information,
+# see https://sphinx-theme.scylladb.com/stable/deployment/production.html#available-workflows
+
+on:
+ push:
+ branches:
+ - scylla-3.x
+ paths:
+ - 'docs/**'
+ - 'faq/**'
+ - 'manual/**'
+ - 'changelog/**'
+ - 'upgrade_guide/**'
+ workflow_dispatch:
+
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Set up Python
+ uses: actions/setup-python@v3
+ with:
+ python-version: 3.7
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v1
+ with:
+ java-version: 1.8
+ - name: Set up env
+ run: make -C docs setupenv
+ - name: Build redirects
+ run: make -C docs redirects
+ - name: Build docs
+ run: make -C docs multiversion
+ - name: Deploy docs to GitHub Pages
+ run: ./docs/_utils/deploy.sh
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml
new file mode 100644
index 00000000000..73f1e34c65b
--- /dev/null
+++ b/.github/workflows/docs-pr.yaml
@@ -0,0 +1,36 @@
+name: "Docs / Build PR"
+# For more information,
+# see https://sphinx-theme.scylladb.com/stable/deployment/production.html#available-workflows
+
+on:
+ pull_request:
+ branches:
+ - scylla-3.x
+ paths:
+ - 'docs/**'
+ - 'faq/**'
+ - 'manual/**'
+ - 'changelog/**'
+ - 'upgrade_guide/**'
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Set up Python
+ uses: actions/setup-python@v3
+ with:
+ python-version: 3.7
+ - name: Set up JDK 1.8
+ uses: actions/setup-java@v1
+ with:
+ java-version: 1.8
+ - name: Set up env
+ run: make -C docs setupenv
+ - name: Build docs
+ run: make -C docs test
\ No newline at end of file
diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml
deleted file mode 100644
index 27ebb4ce1bf..00000000000
--- a/.github/workflows/pages.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-name: "CI Docs"
-
-on:
- push:
- branches:
- - scylla-3.x
- paths:
- - 'docs/**'
- - 'faq/**'
- - 'manual/**'
- - 'changelog/**'
- - 'upgrade_guide/**'
-jobs:
- release:
- name: Build
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- with:
- persist-credentials: false
- fetch-depth: 0
- - name: Set up Python
- uses: actions/setup-python@v1
- with:
- python-version: 3.7
- - name: Set up JDK 1.8
- uses: actions/setup-java@v1
- with:
- java-version: 1.8
- - name: Build Sphinx docs
- run: |
- export PATH=$PATH:~/.local/bin
- cd docs
- make multiversion
- - name: Deploy
- run : ./docs/_utils/deploy.sh
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/tests-reports-4x@v1.yml b/.github/workflows/tests-reports-4x@v1.yml
new file mode 100644
index 00000000000..02921c9a9cd
--- /dev/null
+++ b/.github/workflows/tests-reports-4x@v1.yml
@@ -0,0 +1,24 @@
+# This is a workflow that runs after 'Tests'
+# workflow, because the 'Tests' (PR) workflow
+# has insufficient permissions to write
+# GitHub Actions checks.
+name: 'Tests Reports (Driver 4.x)'
+on:
+ workflow_run:
+ workflows: ['Tests (Driver 4.x)']
+ types:
+ - completed
+jobs:
+ report:
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+
+ steps:
+ - name: Generate test report
+ uses: dorny/test-reporter@v1
+ with:
+ artifact: 'test-results'
+ name: 'Test report'
+ path: '**/TEST-TestSuite.xml'
+ reporter: java-junit
+ list-tests: failed
\ No newline at end of file
diff --git a/.github/workflows/tests-reports@v1.yml b/.github/workflows/tests-reports@v1.yml
new file mode 100644
index 00000000000..a6d1b3b5b9e
--- /dev/null
+++ b/.github/workflows/tests-reports@v1.yml
@@ -0,0 +1,24 @@
+# This is a workflow that runs after 'Tests'
+# workflow, because the 'Tests' (PR) workflow
+# has insufficient permissions to write
+# GitHub Actions checks.
+name: 'Tests Reports'
+on:
+ workflow_run:
+ workflows: ['Tests']
+ types:
+ - completed
+jobs:
+ report:
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+
+ steps:
+ - name: Generate test report
+ uses: dorny/test-reporter@v1
+ with:
+ artifact: 'test-results'
+ name: 'Test report'
+ path: '**/TEST-TestSuite.xml'
+ reporter: java-junit
+ list-tests: failed
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
deleted file mode 100644
index 5baa4e9d96f..00000000000
--- a/.github/workflows/tests.yml
+++ /dev/null
@@ -1,85 +0,0 @@
-name: Tests
-
-on:
- push:
- branches: [ scylla-3.x ]
- pull_request:
- branches: [ scylla-3.x ]
-
-jobs:
- run-unit-tests:
- name: Run unit tests
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
-
- - name: Set up JDK 8
- uses: actions/setup-java@v2
- with:
- java-version: '8'
- distribution: 'adopt'
-
- - name: Run unit tests
- run: mvn -B test
-
- run-cassandra-integration-tests:
- name: Run integration tests on Cassandra
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
-
- - name: Set up JDK 8
- uses: actions/setup-java@v2
- with:
- java-version: '8'
- distribution: 'adopt'
-
- - name: Setup environment (Integration test on Cassandra 3.11.11)
- run: |
- sudo apt-get update
- sudo apt-get install -y python3 python3-pip python-is-python3 python3-boto3
- sudo pip3 install https://github.com/scylladb/scylla-ccm/archive/master.zip
-
- - name: Run integration tests on Cassandra 3.11.11
- run: mvn -B verify -Pshort -Dcassandra.version=3.11.11
-
- - uses: actions/upload-artifact@v2
- if: ${{ failure() }}
- with:
- name: ccm-logs-cassandra-3.11.11
- path: /tmp/*-0/ccm*/node*/logs/*
-
- run-scylla-integration-tests:
- name: Run integration tests on Scylla
- runs-on: ubuntu-latest
-
- strategy:
- matrix:
- scylla-version: ['4.4.4', '4.3.6']
-
- steps:
- - uses: actions/checkout@v2
-
- - name: Set up JDK 8
- uses: actions/setup-java@v2
- with:
- java-version: '8'
- distribution: 'adopt'
-
- - name: Setup environment (Integration test on Scylla ${{ matrix.scylla-version }})
- run: |
- sudo apt-get update
- sudo apt-get install -y python3 python3-pip python-is-python3 python3-boto3
- sudo pip3 install https://github.com/scylladb/scylla-ccm/archive/master.zip
- sudo sh -c "echo 2097152 >> /proc/sys/fs/aio-max-nr"
-
- - name: Run integration tests on Scylla (${{ matrix.scylla-version }})
- run: mvn -B verify -Pshort -Dscylla.version=${{ matrix.scylla-version }}
-
- - uses: actions/upload-artifact@v2
- if: ${{ failure() }}
- with:
- name: ccm-logs-scylla-${{ matrix.scylla-version }}
- path: /tmp/*-0/ccm*/node*/logs/*
\ No newline at end of file
diff --git a/.github/workflows/tests@v1.yml b/.github/workflows/tests@v1.yml
new file mode 100644
index 00000000000..80702698380
--- /dev/null
+++ b/.github/workflows/tests@v1.yml
@@ -0,0 +1,227 @@
+name: Tests
+
+on:
+ push:
+ branches: [ scylla-3.*x ]
+ pull_request:
+ branches: [ scylla-3.*x ]
+ workflow_dispatch:
+
+jobs:
+ build:
+ name: Build
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+
+ strategy:
+ matrix:
+ java-version: [8, 11]
+ fail-fast: false
+
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v2
+
+ - name: Set up JDK ${{ matrix.java-version }}
+ uses: actions/setup-java@v2
+ with:
+ java-version: ${{ matrix.java-version }}
+ distribution: 'adopt'
+
+ - name: Compile source and tests
+ run: mvn -B compile test-compile -Dfmt.skip=true -Dclirr.skip=true -Danimal.sniffer.skip=true
+
+ verify:
+ name: Full verify
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+
+ strategy:
+ matrix:
+ java-version: [8, 11]
+ fail-fast: false
+
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v2
+
+ - name: Set up JDK ${{ matrix.java-version }}
+ uses: actions/setup-java@v2
+ with:
+ java-version: ${{ matrix.java-version }}
+ distribution: 'adopt'
+
+ - name: Full verify
+ run: mvn -B verify -DskipTests
+
+ unit-tests:
+ name: Unit tests
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v2
+
+ - name: Set up JDK 8
+ uses: actions/setup-java@v2
+ with:
+ java-version: '8'
+ distribution: 'adopt'
+
+ - name: Run unit tests
+ run: mvn -B test -Dfmt.skip=true -Dclirr.skip=true -Danimal.sniffer.skip=true
+
+ - name: Copy test results
+ if: success() || failure()
+ run: |
+ shopt -s globstar
+ mkdir unit
+ cp --parents ./**/target/*-reports/*.xml unit/
+
+ - name: Upload test results
+ uses: actions/upload-artifact@v2
+ if: success() || failure()
+ with:
+ name: test-results
+ path: "*/**/target/*-reports/*.xml"
+
+ setup-integration-tests:
+ name: Setup ITs
+ runs-on: ubuntu-latest
+ timeout-minutes: 2
+
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v2
+
+ - name: Setup Python 3
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.x'
+
+ - name: Fetch Scylla and Cassandra versions
+ id: fetch-versions
+ run: |
+ pip3 install -r ci/requirements.txt
+ echo "::set-output name=scylla-integration-tests-versions::$(python3 ci/version_fetch.py scylla-oss-stable:2 scylla-oss-rc scylla-enterprise-stable:2 scylla-enterprise-rc)"
+ echo "::set-output name=cassandra-integration-tests-versions::$(python3 ci/version_fetch.py cassandra3-stable:1)"
+
+ outputs:
+ scylla-integration-tests-versions: ${{ steps.fetch-versions.outputs.scylla-integration-tests-versions }}
+ cassandra-integration-tests-versions: ${{ steps.fetch-versions.outputs.cassandra-integration-tests-versions }}
+
+ cassandra-integration-tests:
+ name: Cassandra ITs
+ runs-on: ubuntu-latest
+ needs: [setup-integration-tests]
+ timeout-minutes: 90
+
+ strategy:
+ matrix:
+ cassandra-version: ${{ fromJson(needs.setup-integration-tests.outputs.cassandra-integration-tests-versions) }}
+ fail-fast: false
+
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v2
+
+ - name: Set up JDK 8
+ uses: actions/setup-java@v2
+ with:
+ java-version: '8'
+ distribution: 'adopt'
+
+ - name: Setup Python 3
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.x'
+
+ - name: Setup environment
+ run: |
+ sudo sh -c "echo 'deb http://security.ubuntu.com/ubuntu xenial-security main' >> /etc/apt/sources.list"
+ sudo apt-get update
+ sudo apt-get install libssl1.0.0
+ pip3 install https://github.com/scylladb/scylla-ccm/archive/master.zip
+
+ - name: Run integration tests on Cassandra (${{ matrix.cassandra-version }})
+ run: mvn -B verify -Pshort -Dcassandra.version=${{ matrix.cassandra-version }} -Dfmt.skip=true -Dclirr.skip=true -Danimal.sniffer.skip=true
+
+ - name: Copy test results
+ if: success() || failure()
+ run: |
+ shopt -s globstar
+ mkdir cassandra-${{ matrix.cassandra-version }}
+ cp --parents ./**/target/*-reports/*.xml cassandra-${{ matrix.cassandra-version }}/
+
+ - name: Upload test results
+ uses: actions/upload-artifact@v2
+ if: success() || failure()
+ with:
+ name: test-results
+ path: "*/**/target/*-reports/*.xml"
+
+ - name: Upload CCM logs
+ uses: actions/upload-artifact@v2
+ if: ${{ failure() }}
+ with:
+ name: ccm-logs-cassandra-${{ matrix.cassandra-version }}
+ path: /tmp/*-0/ccm*/node*/logs/*
+
+ scylla-integration-tests:
+ name: Scylla ITs
+ runs-on: ubuntu-latest
+ needs: [setup-integration-tests]
+ timeout-minutes: 90
+
+ strategy:
+ matrix:
+ scylla-version: ${{ fromJson(needs.setup-integration-tests.outputs.scylla-integration-tests-versions) }}
+ fail-fast: false
+
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v2
+
+ - name: Set up JDK 8
+ uses: actions/setup-java@v2
+ with:
+ java-version: '8'
+ distribution: 'adopt'
+
+ - name: Setup Python 3
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.x'
+
+ - name: Setup environment
+ run: |
+ sudo sh -c "echo 'deb http://security.ubuntu.com/ubuntu xenial-security main' >> /etc/apt/sources.list"
+ sudo apt-get update
+ sudo apt-get install libssl1.0.0
+ pip3 install https://github.com/scylladb/scylla-ccm/archive/master.zip
+ sudo sh -c "echo 2097152 >> /proc/sys/fs/aio-max-nr"
+
+ - name: Run integration tests on Scylla (${{ matrix.scylla-version }})
+ run: mvn -B verify -Pshort -Dscylla.version=${{ matrix.scylla-version }} -Dfmt.skip=true -Dclirr.skip=true -Danimal.sniffer.skip=true
+
+ - name: Copy test results
+ if: success() || failure()
+ run: |
+ shopt -s globstar
+ mkdir scylla-${{ matrix.scylla-version }}
+ cp --parents ./**/target/*-reports/*.xml scylla-${{ matrix.scylla-version }}/
+
+ - name: Upload test results
+ uses: actions/upload-artifact@v2
+ if: success() || failure()
+ with:
+ name: test-results
+ path: "*/**/target/*-reports/*.xml"
+
+ - name: Upload CCM logs
+ uses: actions/upload-artifact@v2
+ if: ${{ failure() }}
+ with:
+ name: ccm-logs-scylla-${{ matrix.scylla-version }}
+ path: /tmp/*-0/ccm*/node*/logs/*
diff --git a/.gitorderfile b/.gitorderfile
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index c5450ce3cd4..be63f984c2b 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -22,10 +22,12 @@ imports in ASCII sort order. In addition, please avoid using wildcard imports.
## Working on an issue
-Before starting to work on something, please comment in JIRA or ask on the mailing list
-to make sure nobody else is working on it.
+We use [github issues](https://github.com/scylladb/java-driver/issues) to track ongoing issues.
+Before starting to work on something, please check the issues list to make sure nobody else is working on it.
+It's also a good idea to get in contact through [ScyllaDB-Users Slack](https://scylladb-users.slack.com/)
+to make your intentions known and clear.
-If your fix applies to multiple branches, base your work on the lowest active branch. Since version 3 of the driver,
+If your fix applies to multiple branches, base your work on the lowest active branch. Most of the time if you want to implement a feature for driver version 3, then you'll base your work on `scylla-3.x` (and `scylla-4.x` for version 4). Since version 3 of the driver,
we've adopted [semantic versioning](http://semver.org/) and our branches use the following scheme:
```
@@ -59,15 +61,13 @@ Before you send your pull request, make sure that:
- you have a unit test that failed before the fix and succeeds after.
- the fix is mentioned in `changelog/README.md`.
-- the commit message include the reference of the JIRA ticket for automatic linking
- (example: `JAVA-503: Fix NPE when a connection fails during pool construction.`).
+- the commit message includes the reference of the github issue for
+ automatic linking
+ (example: `Fixes #1234`).
As long as your pull request is not merged, it's OK to rebase your branch and push with
-`--force`.
-
-If you want to contribute but don't have a specific issue in mind, the [lhf](https://datastax-oss.atlassian.net/secure/IssueNavigator.jspa?reset=true&mode=hide&jqlQuery=project%20%3D%20JAVA%20AND%20status%20in%20(Open%2C%20Reopened)%20AND%20labels%20%3D%20lhf)
-label in JIRA is a good place to start: it marks "low hanging fruits" that don't require
-in-depth knowledge of the codebase.
+`--force`. Commit history should be as flat as reasonably possible. Multiple commits where each one represents a single logical piece of pull request are fine.
+If you want to contribute but don't have a specific issue in mind, it's best to reach out through users slack.
## Editor configuration
@@ -89,9 +89,17 @@ The Maven build uses profiles named after the categories to choose which tests t
mvn test -Pshort
```
-The default is "unit". Each profile runs the ones before it ("short" runs unit, etc.)
+The default is "unit". Each profile runs only their own category ("short" will *not* run "unit").
+
+Integration tests use [CCM](https://github.com/pcmanus/ccm) to bootstrap Cassandra instances. It is recommended to
+setup [Scylla CCM](https://github.com/scylladb/scylla-ccm) in its place:
+```
+pip3 install https://github.com/scylladb/scylla-ccm/archive/master.zip
+```
+
+The SSL tests use `libssl.1.0.0.so`. Before starting the tests, make sure it is installed on your system
+(`compat-openssl10` on Fedora and `libssl1.0.0` on Ubuntu, `xenial-security` repository source).
-Integration tests use [CCM](https://github.com/pcmanus/ccm) to bootstrap Cassandra instances.
Two Maven properties control its execution:
- `cassandra.version`: the Cassandra version. This has a default value in the root POM,
@@ -99,6 +107,13 @@ Two Maven properties control its execution:
- `ipprefix`: the prefix of the IP addresses that the Cassandra instances will bind to (see
below). This defaults to `127.0.1.`.
+Additionally `-Dscylla.version=${{ matrix.scylla-version }}` can be used instead with Scylla CCM to test against Scylla.
+
+Examples:
+- `mvn test -Pshort -Dcassandra.version=3.11.11`
+- `mvn test -Plong -Dcassandra.version=3.11.11`
+- `mvn verify -Plong -Dscylla.version=4.3.6`
+
CCM launches multiple Cassandra instances on localhost by binding to different addresses. The
driver uses up to 10 different instances (127.0.1.1 to 127.0.1.10 with the default prefix).
diff --git a/README-dev.md b/README-dev.md
new file mode 100644
index 00000000000..5da38fd77da
--- /dev/null
+++ b/README-dev.md
@@ -0,0 +1,17 @@
+# Building the docs
+
+The docs build instructions have been tested with Sphinx 4 and Fedora 32.
+
+## Prerequisites
+
+To build and preview the docs locally, you will need to install the following software:
+
+- Git
+- Python 3.7
+- pip
+- Java JDK 8 or above
+- Maven
+
+## Commands
+
+For more information, see [Commands](https://sphinx-theme.scylladb.com/stable/commands.html).
diff --git a/README-dev.rst b/README-dev.rst
deleted file mode 100644
index 98e6f0b573c..00000000000
--- a/README-dev.rst
+++ /dev/null
@@ -1,68 +0,0 @@
-Building the Docs
-=================
-
-*Note*: The docs build instructions have been tested with Sphinx 2.4.4 and Fedora 32.
-
-To build and preview the docs locally, you will need to install the following software:
-
-- `Git `_
-- `Python 3.7 `_
-- `pip `_
-- Java JDK 6 or above
-- Maven
-
-Run the following command to build the docs.
-
-.. code:: console
-
- cd docs
- make preview
-
-Once the command completes processing, open http://127.0.0.1:5500/ with your preferred browser.
-
-Building multiple documentation versions
-========================================
-
-Build Sphinx docs for all the versions defined in ``docs/conf.py``.
-
-.. code:: console
-
- cd docs
- make multiversion
-
-Then, open ``docs/_build/dirhtml//index.html`` with your preferred browser.
-
-**NOTE:** If you only can see docs generated for the master branch, try to run ``git fetch --tags`` to download the latest tags from remote.
-
-Defining supported versions
-===========================
-
-Let's say you want to generate docs for the new version ``scylla-3.x.y``.
-
-1. The file ``.github/workflows`` defines the branch from where all the documentation versions will be build.
-
-.. code:: yaml
-
- on:
- push:
- branches:
- - scylla-3.x
-
-In our case, this branch currently is``scylla-3.x``.
-In practice, this means that the file ``docs/source/conf.py`` of ```scylla-3.x`` defines which documentation versions are supported.
-
-2. In the file ``docs/source/conf.py`` (``scylla-3.x`` branch), list the new target version support inside the ``BRANCHES`` array.
-For example, listing ``scylla-3.x.y`` should look like in your code:
-
-.. code:: python
-
- BRANCHES = ['scylla-3.x.y']
- smv_branch_whitelist = multiversion_regex_builder(BRANCHES)
-
-3. (optional) If the new version is the latest stable version, update as well the variable ``smv_latest_version`` in ``docs/source/conf.py``.
-
-.. code:: python
-
- smv_latest_version = 'scylla-3.x.y'
-
-4. Commit & push the changes to the ``scylla-3.x`` branch.
diff --git a/README.md b/README.md
index 001d420a60f..5f17898f6c5 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@ not yet have been released. You can find the documentation for the latest
version through the [Java driver
docs](https://docs.scylladb.com/using-scylla/scylla-java-driver/) or via the release tags,
[e.g.
-3.10.2.0](https://github.com/scylladb/java-driver/releases/tag/3.10.2.0).*
+3.11.2.0](https://github.com/scylladb/java-driver/releases/tag/3.11.2.0).*
A modern, [feature-rich](manual/) and highly tunable Java client
library for Apache Cassandra (2.1+) and using exclusively Cassandra's binary protocol
@@ -62,7 +62,7 @@ it in your application using the following Maven dependency
com.scylladbscylla-driver-core
- 3.10.2.0
+ 3.11.2.0
```
@@ -72,7 +72,7 @@ Note that the object mapper is published as a separate artifact:
com.scylladbscylla-driver-mapping
- 3.10.2.0
+ 3.11.2.0
```
@@ -82,7 +82,7 @@ The 'extras' module is also published as a separate artifact:
com.scylladbscylla-driver-extras
- 3.10.2.0
+ 3.11.2.0
```
@@ -92,8 +92,8 @@ to avoid the explicit dependency to Netty.
## Compatibility
-The Java client driver 3.10.2.0 ([branch 3.x](https://github.com/scylladb/java-driver/tree/3.x)) is compatible with Apache
-Cassandra 2.1, 2.2 and 3.0+.
+The Java client driver 3.11.2.0 ([branch 3.x](https://github.com/scylladb/java-driver/tree/3.x)) is compatible with
+Scylla and Apache Cassandra 2.1, 2.2, 3.0+.
UDT and tuple support is available only when using Apache Cassandra 2.1 or higher.
diff --git a/changelog/README.md b/changelog/README.md
index 1ba799ce701..971460f7010 100644
--- a/changelog/README.md
+++ b/changelog/README.md
@@ -5,6 +5,24 @@
3.x versions get published.
-->
+### 3.11.2
+- [improvement] JAVA-3008: Upgrade Netty to 4.1.75, 3.x edition
+- [improvement] JAVA-2984: Upgrade Jackson to resolve high-priority CVEs
+
+
+### 3.11.1
+- [bug] JAVA-2967: Support native transport peer information for DSE 6.8.
+- [bug] JAVA-2976: Support missing protocol v5 error codes CAS_WRITE_UNKNOWN, CDC_WRITE_FAILURE.
+
+
+### 3.11.0
+
+- [improvement] JAVA-2705: Remove protocol v5 beta status, add v6-beta.
+- [bug] JAVA-2923: Detect and use Guava's new HostAndPort.getHost method.
+- [bug] JAVA-2922: Switch to modern framing format inside a channel handler.
+- [bug] JAVA-2924: Consider protocol version unsupported when server requires USE_BETA flag for it.
+
+
### 3.10.2
- [bug] JAVA-2860: Avoid NPE if channel initialization crashes.
@@ -16,8 +34,9 @@
### 3.10.0
-- [improvement] JAVA-2676: Don't reschedule flusher after empty runs
-- [new feature] JAVA-2772: Support new protocol v5 message format
+- [improvement] JAVA-2676: Don't reschedule flusher after empty runs.
+- [new feature] JAVA-2772: Support new protocol v5 message format.
+
### 3.9.0
diff --git a/ci/appveyor.ps1 b/ci/appveyor.ps1
deleted file mode 100644
index bc1d95b69f7..00000000000
--- a/ci/appveyor.ps1
+++ /dev/null
@@ -1,132 +0,0 @@
-Add-Type -AssemblyName System.IO.Compression.FileSystem
-
-$dep_dir="C:\Users\appveyor\deps"
-If (!(Test-Path $dep_dir)) {
- Write-Host "Creating $($dep_dir)"
- New-Item -Path $dep_dir -ItemType Directory -Force
-}
-
-$apr_platform = "Win32"
-$openssl_platform = "Win32"
-$vc_platform = "x86"
-$env:PYTHON="C:\Python27"
-$env:OPENSSL_PATH="C:\OpenSSL-Win32"
-If ($env:PLATFORM -eq "X64") {
- $apr_platform = "x64"
- $vc_platform = "x64"
- $env:PYTHON="C:\Python27-x64"
- $env:OPENSSL_PATH="C:\OpenSSL-Win64"
-}
-
-$env:JAVA_HOME="C:\Program Files\Java\jdk$($env:java_version)"
-# The configured java version to test with.
-$env:JAVA_PLATFORM_HOME="$($env:JAVA_HOME)"
-$env:JAVA_8_HOME="C:\Program Files\Java\jdk1.8.0"
-$env:PATH="$($env:PYTHON);$($env:PYTHON)\Scripts;$($env:JAVA_HOME)\bin;$($env:OPENSSL_PATH)\bin;$($env:PATH)"
-$env:CCM_PATH="$($dep_dir)\ccm"
-
-$apr_dist_path = "$($dep_dir)\apr"
-# Build APR if it hasn't been previously built.
-If (!(Test-Path $apr_dist_path)) {
- Write-Host "Cloning APR"
- $apr_path = "C:\Users\appveyor\apr"
- Start-Process git -ArgumentList "clone --branch=1.5.2 --depth=1 https://github.com/apache/apr.git $($apr_path)" -Wait -nnw
- Write-Host "Setting Visual Studio Environment to VS 2015"
- Push-Location "$($env:VS140COMNTOOLS)\..\..\VC"
- cmd /c "vcvarsall.bat $vc_platform & set" |
- foreach {
- if ($_ -match "=") {
- $v = $_.split("="); Set-Item -force -path "ENV:\$($v[0])" -value "$($v[1])"
- }
- }
- Pop-Location
- Write-Host "Building APR (an error may be printed, but it will still build)"
- Push-Location $($apr_path)
- cmd /c nmake -f Makefile.win ARCH="$apr_platform Release" PREFIX=$($apr_dist_path) buildall install
- Pop-Location
- Write-Host "Done Building APR"
-}
-$env:PATH="$($apr_dist_path)\bin;$($env:PATH)"
-
-# Install Ant and Maven
-$ant_base = "$($dep_dir)\ant"
-$ant_path = "$($ant_base)\apache-ant-1.9.7"
-If (!(Test-Path $ant_path)) {
- Write-Host "Installing Ant"
- $ant_url = "https://www.dropbox.com/s/lgx95x1jr6s787l/apache-ant-1.9.7-bin.zip?dl=1"
- $ant_zip = "C:\Users\appveyor\apache-ant-1.9.7-bin.zip"
- (new-object System.Net.WebClient).DownloadFile($ant_url, $ant_zip)
- [System.IO.Compression.ZipFile]::ExtractToDirectory($ant_zip, $ant_base)
-}
-$env:PATH="$($ant_path)\bin;$($env:PATH)"
-
-$maven_base = "$($dep_dir)\maven"
-$maven_path = "$($maven_base)\apache-maven-3.2.5"
-If (!(Test-Path $maven_path)) {
- Write-Host "Installing Maven"
- $maven_url = "https://www.dropbox.com/s/fh9kffmexprsmha/apache-maven-3.2.5-bin.zip?dl=1"
- $maven_zip = "C:\Users\appveyor\apache-maven-3.2.5-bin.zip"
- (new-object System.Net.WebClient).DownloadFile($maven_url, $maven_zip)
- [System.IO.Compression.ZipFile]::ExtractToDirectory($maven_zip, $maven_base)
-}
-$env:M2_HOME="$($maven_path)"
-$env:PATH="$($maven_path)\bin;$($env:PATH)"
-
-$jdks = @("1.6.0", "1.7.0", "1.8.0")
-foreach ($jdk in $jdks) {
- $java_dir = "C:\Program Files\Java\jdk$jdk"
- $jce_target = "$java_dir\jre\lib\security"
- $jce_indicator = "$jce_target\README.txt"
- # Install Java Cryptographic Extensions, needed for SSL.
- # If this file doesn't exist we know JCE hasn't been installed.
- If (!(Test-Path $jce_indicator)) {
- Write-Host "Installing JCE for $jdk"
- $zip = "$dep_dir\jce_policy-$jdk.zip"
- $url = "https://www.dropbox.com/s/po4308hlwulpvep/UnlimitedJCEPolicyJDK7.zip?dl=1"
- $extract_folder = "UnlimitedJCEPolicy"
- If ($jdk -eq "1.8.0") {
- $url = "https://www.dropbox.com/s/al1e6e92cjdv7m7/jce_policy-8.zip?dl=1"
- $extract_folder = "UnlimitedJCEPolicyJDK8"
- }
- ElseIf ($jdk -eq "1.6.0") {
- $url = "https://www.dropbox.com/s/dhrtucxcif4n11k/jce_policy-6.zip?dl=1"
- $extract_folder = "jce"
- }
- # Download zip to staging area if it doesn't exist, we do this because
- # we extract it to the directory based on the platform and we want to cache
- # this file so it can apply to all platforms.
- if(!(Test-Path $zip)) {
- (new-object System.Net.WebClient).DownloadFile($url, $zip)
- }
-
- [System.IO.Compression.ZipFile]::ExtractToDirectory($zip, $jce_target)
-
- $jcePolicyDir = "$jce_target\$extract_folder"
- Move-Item $jcePolicyDir\* $jce_target\ -force
- Remove-Item $jcePolicyDir
- }
-}
-
-# Install Python Dependencies for CCM.
-Write-Host "Installing Python Dependencies for CCM"
-Start-Process python -ArgumentList "-m pip install psutil pyYaml six" -Wait -nnw
-
-# Clone ccm from git and use master.
-If (!(Test-Path $env:CCM_PATH)) {
- Write-Host "Cloning CCM"
- Start-Process git -ArgumentList "clone https://github.com/pcmanus/ccm.git $($env:CCM_PATH)" -Wait -nnw
-}
-
-# Copy ccm -> ccm.py so windows knows to run it.
-If (!(Test-Path $env:CCM_PATH\ccm.py)) {
- Copy-Item "$env:CCM_PATH\ccm" "$env:CCM_PATH\ccm.py"
-}
-$env:PYTHONPATH="$($env:CCM_PATH);$($env:PYTHONPATH)"
-$env:PATH="$($env:CCM_PATH);$($env:PATH)"
-
-# Predownload cassandra version for CCM if it isn't already downloaded.
-If (!(Test-Path C:\Users\appveyor\.ccm\repository\$env:cassandra_version)) {
- Write-Host "Preinstalling C* $($env:cassandra_version)"
- Start-Process python -ArgumentList "$($env:CCM_PATH)\ccm.py create -v $($env:cassandra_version) -n 1 predownload" -Wait -nnw
- Start-Process python -ArgumentList "$($env:CCM_PATH)\ccm.py remove predownload" -Wait -nnw
-}
diff --git a/ci/appveyor.yml b/ci/appveyor.yml
deleted file mode 100644
index 81dd5b01958..00000000000
--- a/ci/appveyor.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-environment:
- test_profile: default
- matrix:
- - java_version: 1.6.0
- - java_version: 1.7.0
- - java_version: 1.8.0
- test_profile: short
-platform: x64
-install:
- - ps: .\ci\appveyor.ps1
-build_script:
- - "set \"JAVA_HOME=%JAVA_8_HOME%\" && mvn install -DskipTests=true -B -V"
-test_script:
- - "set \"JAVA_HOME=%JAVA_PLATFORM_HOME%\" && mvn -B -D\"ccm.java.home\"=\"%JAVA_8_HOME%\" -D\"ccm.maxNumberOfNodes\"=1 -D\"cassandra.version\"=%cassandra_version% verify -P %test_profile%"
-on_finish:
- - ps: .\ci\uploadtests.ps1
-cache:
- - C:\Users\appveyor\.m2
- - C:\Users\appveyor\.ccm\repository
- - C:\Users\appveyor\deps -> .\ci\appveyor.ps1
diff --git a/ci/requirements.txt b/ci/requirements.txt
new file mode 100644
index 00000000000..663bd1f6a2a
--- /dev/null
+++ b/ci/requirements.txt
@@ -0,0 +1 @@
+requests
\ No newline at end of file
diff --git a/ci/uploadtests.ps1 b/ci/uploadtests.ps1
deleted file mode 100644
index cf88b16229c..00000000000
--- a/ci/uploadtests.ps1
+++ /dev/null
@@ -1,17 +0,0 @@
-$testResults=Get-ChildItem TEST-TestSuite.xml -Recurse
-
-Write-Host "Uploading test results."
-
-$url = "https://ci.appveyor.com/api/testresults/junit/$($env:APPVEYOR_JOB_ID)"
-$wc = New-Object 'System.Net.WebClient'
-
-foreach ($testResult in $testResults) {
- try {
- Write-Host -ForegroundColor Green "Uploading $testResult -> $url."
- $wc.UploadFile($url, $testResult)
- } catch [Net.WebException] {
- Write-Host -ForegroundColor Red "Failed Uploading $testResult -> $url. $_"
- }
-}
-
-Write-Host "Done uploading test results."
diff --git a/ci/version_fetch.py b/ci/version_fetch.py
new file mode 100644
index 00000000000..04a690baf26
--- /dev/null
+++ b/ci/version_fetch.py
@@ -0,0 +1,208 @@
+#!/usr/bin/python3
+
+"""
+This Python script allows you to list the
+latest version numbers of Scylla and Cassandra.
+
+You can specify whether you want the
+versions of Scylla OSS or Scylla Enterprise,
+either N latest stable X.Y.latest or
+all non-obsolete RCs. You can also fetch
+the latest version of Cassandra 3.
+
+How are those versions fetched? We use Docker Hub
+tags API.
+"""
+
+import requests
+import re
+import json
+from itertools import groupby, islice
+import sys
+
+DOCKER_HUB_TAGS_ENDPOINT = 'https://registry.hub.docker.com/v1/repositories/%s/tags'
+DOCKER_HUB_SCYLLA_ORG = 'scylladb/'
+
+SCYLLA_OSS = DOCKER_HUB_SCYLLA_ORG + 'scylla'
+SCYLLA_OSS_RELEASED_VERSION_REGEX = re.compile(r'(\d+)\.(\d+)\.(\d+)')
+SCYLLA_OSS_RC_VERSION_REGEX = re.compile(r'(\d+)\.(\d+)\.rc(\d+)')
+
+SCYLLA_ENTERPRISE = DOCKER_HUB_SCYLLA_ORG + 'scylla-enterprise'
+SCYLLA_ENTERPRISE_RELEASED_VERSION_REGEX = re.compile(r'(\d{4})\.(\d+)\.(\d+)')
+SCYLLA_ENTERPRISE_RC_VERSION_REGEX = re.compile(r'(\d{4})\.(\d+)\.rc(\d+)')
+
+CASSANDRA_ENDPOINT = 'https://dlcdn.apache.org/cassandra/'
+
+CASSANDRA3_REGEX = re.compile(r'a href="(3)\.(\d+)\.(\d+)/"')
+
+COMMAND_LINE_ARGUMENT = re.compile(
+ r'((?:(scylla-oss-stable):(\d+))|(?:(scylla-enterprise-stable):(\d+))|(?:(cassandra3-stable):(\d+))|(?:(scylla-oss-rc))|(?:(scylla-enterprise-rc)))')
+
+
+def fetch_last_scylla_oss_minor_versions(count):
+ # Download Docker tags for repository
+ tags_data = requests.get(DOCKER_HUB_TAGS_ENDPOINT % (SCYLLA_OSS)).json()
+ tags_data = map(lambda e: e['name'], tags_data)
+
+ # Parse only those tags which match 'NUM.NUM.NUM'
+ # into tuple (NUM, NUM, NUM)
+ tags_data = filter(SCYLLA_OSS_RELEASED_VERSION_REGEX.fullmatch, tags_data)
+ tags_data = map(lambda e: SCYLLA_OSS_RELEASED_VERSION_REGEX.match(
+ e).groups(), tags_data)
+ tags_data = map(lambda e: tuple(map(int, e)), tags_data)
+
+ # Group by (major, minor) and select latest patch version
+ tags_data = sorted(tags_data)
+ tags_data = groupby(tags_data, key=lambda e: (e[0], e[1]))
+ tags_data = ((e[0][0], e[0][1], max(e[1])[2])
+ for e in tags_data)
+
+ # Return the latest ones
+ tags_data = list(tags_data)[-count:]
+ tags_data = [f'{e[0]}.{e[1]}.{e[2]}' for e in tags_data]
+ return tags_data
+
+
+def fetch_all_scylla_oss_rc_versions():
+ # Download Docker tags for repository
+ tags_data = requests.get(DOCKER_HUB_TAGS_ENDPOINT % (SCYLLA_OSS)).json()
+ tags_data = list(map(lambda e: e['name'], tags_data))
+
+ # Parse only those tags which match 'NUM.NUM.rcNUM'
+ # into tuple (NUM, NUM, NUM)
+ rc_tags_data = filter(SCYLLA_OSS_RC_VERSION_REGEX.fullmatch, tags_data)
+ rc_tags_data = map(lambda e: SCYLLA_OSS_RC_VERSION_REGEX.match(
+ e).groups(), rc_tags_data)
+ rc_tags_data = map(lambda e: tuple(map(int, e)), rc_tags_data)
+
+ # Parse only those tags which match 'NUM.NUM.NUM'
+ # into tuple (NUM, NUM)
+ stable_tags_data = filter(
+ SCYLLA_OSS_RELEASED_VERSION_REGEX.fullmatch, tags_data)
+ stable_tags_data = map(lambda e: SCYLLA_OSS_RELEASED_VERSION_REGEX.match(
+ e).groups(), stable_tags_data)
+ stable_tags_data = map(lambda e: tuple(map(int, e[0:2])), stable_tags_data)
+ stable_tags_data = set(stable_tags_data)
+
+ # Group by (major, minor) and select latest RC version
+ rc_tags_data = sorted(rc_tags_data)
+ rc_tags_data = groupby(rc_tags_data, key=lambda e: (e[0], e[1]))
+ rc_tags_data = ((e[0][0], e[0][1], max(e[1])[2])
+ for e in rc_tags_data)
+
+ # Filter out those RCs that are obsoleted by released stable version
+ rc_tags_data = filter(lambda e: (
+ e[0], e[1]) not in stable_tags_data, rc_tags_data)
+ rc_tags_data = [f'{e[0]}.{e[1]}.rc{e[2]}' for e in rc_tags_data]
+ return rc_tags_data
+
+
+def fetch_last_scylla_enterprise_minor_versions(count):
+ # Download Docker tags for repository
+ tags_data = requests.get(DOCKER_HUB_TAGS_ENDPOINT %
+ (SCYLLA_ENTERPRISE)).json()
+ tags_data = map(lambda e: e['name'], tags_data)
+
+ # Parse only those tags which match 'YEAR.NUM.NUM'
+ # into tuple (YEAR, NUM, NUM)
+ tags_data = filter(
+ SCYLLA_ENTERPRISE_RELEASED_VERSION_REGEX.fullmatch, tags_data)
+ tags_data = map(lambda e: SCYLLA_ENTERPRISE_RELEASED_VERSION_REGEX.match(
+ e).groups(), tags_data)
+ tags_data = map(lambda e: tuple(map(int, e)), tags_data)
+
+ # Group by (major, minor) and select latest patch version
+ tags_data = sorted(tags_data)
+ tags_data = groupby(tags_data, key=lambda e: (e[0], e[1]))
+ tags_data = ((e[0][0], e[0][1], max(e[1])[2])
+ for e in tags_data)
+
+ # Return the latest ones
+ tags_data = list(tags_data)[-count:]
+ tags_data = [f'{e[0]}.{e[1]}.{e[2]}' for e in tags_data]
+ return tags_data
+
+
+def fetch_all_scylla_enterprise_rc_versions():
+ # Download Docker tags for repository
+ tags_data = requests.get(DOCKER_HUB_TAGS_ENDPOINT %
+ (SCYLLA_ENTERPRISE)).json()
+ tags_data = list(map(lambda e: e['name'], tags_data))
+
+ # Parse only those tags which match 'YEAR.NUM.rcNUM'
+ # into tuple (YEAR, NUM, NUM)
+ rc_tags_data = filter(
+ SCYLLA_ENTERPRISE_RC_VERSION_REGEX.fullmatch, tags_data)
+ rc_tags_data = map(lambda e: SCYLLA_ENTERPRISE_RC_VERSION_REGEX.match(
+ e).groups(), rc_tags_data)
+ rc_tags_data = map(lambda e: tuple(map(int, e)), rc_tags_data)
+
+ # Parse only those tags which match 'YEAR.NUM.NUM'
+ # into tuple (YEAR, NUM)
+ stable_tags_data = filter(
+ SCYLLA_ENTERPRISE_RELEASED_VERSION_REGEX.fullmatch, tags_data)
+ stable_tags_data = map(lambda e: SCYLLA_ENTERPRISE_RELEASED_VERSION_REGEX.match(
+ e).groups(), stable_tags_data)
+ stable_tags_data = map(lambda e: tuple(map(int, e[0:2])), stable_tags_data)
+
+ # Group by (major, minor) and select latest RC version
+ rc_tags_data = sorted(rc_tags_data)
+ rc_tags_data = groupby(rc_tags_data, key=lambda e: (e[0], e[1]))
+ rc_tags_data = ((e[0][0], e[0][1], max(e[1])[2])
+ for e in rc_tags_data)
+
+ # Filter out those RCs that are obsoleted by released stable version
+ rc_tags_data = filter(lambda e: (
+ e[0], e[1]) not in stable_tags_data, rc_tags_data)
+ rc_tags_data = [f'{e[0]}.{e[1]}.rc{e[2]}' for e in rc_tags_data]
+ return rc_tags_data
+
+
+def fetch_last_cassandra3_minor_versions(count):
+ # Download folder listing for Cassandra download site
+ data = requests.get(CASSANDRA_ENDPOINT).text
+
+ # Parse only those version numbers which match '3.NUM.NUM'
+ # into tuple (3, NUM, NUM)
+ data = CASSANDRA3_REGEX.finditer(data)
+ data = map(lambda e: e.groups(), data)
+ data = map(lambda e: tuple(map(int, e)), data)
+
+ # Group by (major, minor) and select latest patch version
+ data = sorted(data)
+ data = groupby(data, key=lambda e: (e[0], e[1]))
+ data = ((e[0][0], e[0][1], max(e[1])[2])
+ for e in data)
+
+ # Return the latest ones
+ data = list(data)[-count:]
+ data = [f'{e[0]}.{e[1]}.{e[2]}' for e in data]
+ return data
+
+
+if __name__ == '__main__':
+ names = set()
+
+ for arg in sys.argv[1:]:
+ if not COMMAND_LINE_ARGUMENT.fullmatch(arg):
+ print("Usage:", sys.argv[0], "[scylla-oss-stable:COUNT] [scylla-oss-rc] [scylla-enterprise-stable:COUNT] [scylla-enterprise-rc] [cassandra3-stable:COUNT]...", file=sys.stderr)
+ sys.exit(1)
+
+ groups = COMMAND_LINE_ARGUMENT.match(arg).groups()
+ groups = [g for g in groups if g][1:]
+
+ mode_name = groups[0]
+ if mode_name == 'scylla-oss-stable':
+ names.update(fetch_last_scylla_oss_minor_versions(int(groups[1])))
+ elif mode_name == 'scylla-enterprise-stable':
+ names.update(
+ fetch_last_scylla_enterprise_minor_versions(int(groups[1])))
+ elif mode_name == 'cassandra3-stable':
+ names.update(
+ fetch_last_cassandra3_minor_versions(int(groups[1])))
+ elif mode_name == 'scylla-oss-rc':
+ names.update(fetch_all_scylla_oss_rc_versions())
+ elif mode_name == 'scylla-enterprise-rc':
+ names.update(fetch_all_scylla_enterprise_rc_versions())
+
+ print(json.dumps(list(names)))
diff --git a/docs.yaml b/docs.yaml
deleted file mode 100644
index b38b86397d7..00000000000
--- a/docs.yaml
+++ /dev/null
@@ -1,67 +0,0 @@
-title: Java Driver for Apache Cassandra
-summary: High performance Java client for Apache Cassandra
-homepage: http://datastax.github.io/java-driver/
-theme: datastax
-sections:
- - title: Manual
- prefix: /manual
- sources:
- - type: markdown
- files: 'manual/**/*.md'
- # The 'manual' section was called 'features' in older releases. Leave both
- # definitions and Documentor will pick up whichever exists and ignore the
- # other.
- - title: Features
- prefix: /features
- sources:
- - type: markdown
- files: 'features/**/*.md'
- - title: Changelog
- prefix: /changelog
- sources:
- - type: markdown
- files: 'changelog/**/*.md'
- - title: Upgrading
- prefix: /upgrade_guide
- sources:
- - type: markdown
- files: 'upgrade_guide/**/*.md'
- - title: FAQ
- prefix: /faq
- sources:
- - type: markdown
- files: 'faq/**/*.md'
-links:
- - title: Code
- href: https://github.com/datastax/java-driver/
- - title: Docs
- href: http://docs.datastax.com/en/developer/java-driver/
- - title: Issues
- href: https://datastax-oss.atlassian.net/browse/JAVA/
- - title: Mailing List
- href: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user
- - title: Releases
- href: http://downloads.datastax.com/java-driver/
-api_docs:
- 3.3: http://docs.datastax.com/en/drivers/java/3.3
- 4.0-alpha: http://docs.datastax.com/en/drivers/java/4.0
- 3.2: http://docs.datastax.com/en/drivers/java/3.2
- 3.1: http://docs.datastax.com/en/drivers/java/3.1
- 3.0: http://docs.datastax.com/en/drivers/java/3.0
- 2.1: http://docs.datastax.com/en/drivers/java/2.1
- 2.0: http://docs.datastax.com/en/drivers/java/2.0
-versions:
- - name: '3.3'
- ref: '3.3.0'
- - name: '4.0-alpha'
- ref: '9f0edeb'
- - name: '3.2'
- ref: '3.2_docfixes'
- - name: '3.1'
- ref: '3.1_docfixes'
- - name: '3.0'
- ref: '3.0_docfixes'
- - name: '2.1'
- ref: '2.1.10.3'
- - name: '2.0'
- ref: '2.0.12.3'
diff --git a/docs/Makefile b/docs/Makefile
index 79bbe97a0dc..9b2c899ab50 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -1,81 +1,111 @@
# You can set these variables from the command line.
-POETRY = $(HOME)/.poetry/bin/poetry
+POETRY = poetry
SPHINXOPTS =
SPHINXBUILD = $(POETRY) run sphinx-build
PAPER =
BUILDDIR = _build
-SOURCE_DIR = _source
+SOURCEDIR = _source
-# Internal variables.
+# Internal variables
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCE_DIR)
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCEDIR)
+TESTSPHINXOPTS = $(ALLSPHINXOPTS) -W --keep-going
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCEDIR)
+
+# Windows variables
+ifeq ($(OS),Windows_NT)
+ POETRY = $(APPDATA)\Python\Scripts\poetry
+endif
+
+define javadoc
+ cd .. && ./docs/_utils/javadoc.sh
+endef
.PHONY: all
all: dirhtml
+# Setup commands
+.PHONY: setupenv
+setupenv:
+ pip install -q poetry
+
+.PHONY: setup
+setup:
+ $(POETRY) install
+ $(POETRY) update
+ cp -TLr source $(SOURCEDIR)
+ cd $(SOURCEDIR) && find . -name README.md -execdir mv '{}' index.md ';'
+
+# Clean commands
.PHONY: pristine
pristine: clean
git clean -dfX
-.PHONY: setup
-setup:
- ./_utils/setup.sh
- cp -TLr source $(SOURCE_DIR)
- cd $(SOURCE_DIR) && find . -name README.md -execdir mv '{}' index.md ';'
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
- rm -rf $(SOURCE_DIR)/*
-
-.PHONY: preview
-preview: setup
- cd .. && ./docs/_utils/javadoc.sh
- $(POETRY) run sphinx-autobuild -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml --port 5500
+ rm -rf $(SOURCEDIR)/*
+# Generate output commands
.PHONY: dirhtml
dirhtml: setup
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-.PHONY: singlehtml
-singlehtml: setup
- $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+.PHONY: javadoc
+javadoc: setup
+ @$(javadoc)
@echo
- @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+.PHONY: multiversion
+multiversion: setup
+ $(POETRY) run ./_utils/multiversion.sh
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: epub
-epub: setup
+epub: setup
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: epub3
-epub3: setup
+epub3:setup
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
@echo
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
.PHONY: dummy
-dummy: setup
+dummy: setup
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
@echo
@echo "Build finished. Dummy builder generates no files."
-.PHONY: linkcheck
-linkcheck: setup
- $(SPHINXBUILD) -b linkcheck $(SOURCE_DIR) $(BUILDDIR)/linkcheck
+# Preview commands
+.PHONY: preview
+preview: setup
+ $(POETRY) run sphinx-autobuild -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml --port 5500 --re-ignore 'api/*'
-.PHONY: multiversion
-multiversion: setup
- @mkdir -p $(HOME)/.cache/pypoetry/virtualenvs
- $(POETRY) run ./_utils/multiversion.sh
+.PHONY: multiversionpreview
+multiversionpreview: multiversion
+ $(POETRY) run python -m http.server 5500 --directory $(BUILDDIR)/dirhtml
+
+.PHONY: redirects
+redirects: setup
+ $(POETRY) run redirects-cli fromfile --yaml-file _utils/redirects.yaml --output-dir $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-.PHONY: multiversionpreview
-multiversionpreview: multiversion
- $(POETRY) run python3 -m http.server 5500 --directory $(BUILDDIR)/dirhtml
+# Test commands
+.PHONY: test
+test: setup
+ $(SPHINXBUILD) -b dirhtml $(TESTSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+.PHONY: linkcheck
+linkcheck: setup
+ $(SPHINXBUILD) -b linkcheck $(SOURCEDIR) $(BUILDDIR)/linkcheck
\ No newline at end of file
diff --git a/docs/_utils/javadoc.sh b/docs/_utils/javadoc.sh
index 5ec6ab16dc0..5fadf3954d4 100755
--- a/docs/_utils/javadoc.sh
+++ b/docs/_utils/javadoc.sh
@@ -1,7 +1,7 @@
#!/bin/bash
# Install dependencies
-mvn install -DskipTests
+mvn install -DskipTests -T 1C
# Define output folder
OUTPUT_DIR="docs/_build/dirhtml/api"
@@ -11,7 +11,7 @@ if [[ "$SPHINX_MULTIVERSION_OUTPUTDIR" != "" ]]; then
fi
# Generate javadoc
-mvn javadoc:javadoc
+mvn javadoc:javadoc -T 1C
[ -d $OUTPUT_DIR ] && rm -r $OUTPUT_DIR
mkdir -p "$OUTPUT_DIR"
mv -f driver-core/target/site/apidocs/* $OUTPUT_DIR
diff --git a/docs/_utils/multiversion.sh b/docs/_utils/multiversion.sh
index 19270f32932..89895a896c6 100755
--- a/docs/_utils/multiversion.sh
+++ b/docs/_utils/multiversion.sh
@@ -1,5 +1,5 @@
#! /bin/bash
cd .. && sphinx-multiversion docs/source docs/_build/dirhtml \
- --pre-build './docs/_utils/javadoc.sh' \
- --pre-build "find . -mindepth 2 -name README.md -execdir mv '{}' index.md ';'"
+ --pre-build "find . -mindepth 2 -name README.md -execdir mv '{}' index.md ';'" \
+ --post-build './docs/_utils/javadoc.sh'
diff --git a/docs/_utils/redirections.yaml b/docs/_utils/redirections.yaml
deleted file mode 100644
index 0e5f1ff9e07..00000000000
--- a/docs/_utils/redirections.yaml
+++ /dev/null
@@ -1 +0,0 @@
-api: /api/overview-summary.html
diff --git a/docs/_utils/redirects.yaml b/docs/_utils/redirects.yaml
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/docs/_utils/setup.sh b/docs/_utils/setup.sh
deleted file mode 100755
index b8f50243e4f..00000000000
--- a/docs/_utils/setup.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#! /bin/bash
-
-if pwd | egrep -q '\s'; then
- echo "Working directory name contains one or more spaces."
- exit 1
-fi
-
-which python3 || { echo "Failed to find python3. Try installing Python for your operative system: https://www.python.org/downloads/" && exit 1; }
-which poetry || curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/1.1.3/get-poetry.py | python3 - && source ${HOME}/.poetry/env
-poetry install
-poetry update
diff --git a/docs/pyproject.toml b/docs/pyproject.toml
index bb5778d26da..580711c9f9b 100644
--- a/docs/pyproject.toml
+++ b/docs/pyproject.toml
@@ -6,17 +6,15 @@ authors = ["Java Driver Contributors"]
[tool.poetry.dependencies]
python = "^3.7"
-pyyaml = "5.3"
+pyyaml = "6.0"
pygments = "2.2.0"
-recommonmark = "0.5.0"
-sphinx-scylladb-theme = "~0.1.12"
+recommonmark = "0.7.1"
+redirects_cli ="~0.1.2"
+sphinx-scylladb-theme = "~1.3.1"
sphinx-sitemap = "2.1.0"
-sphinx-autobuild = "0.7.1"
-Sphinx = "2.4.4"
-sphinx-multiversion-scylla = "~0.2.6"
-
-[tool.poetry.dev-dependencies]
-pytest = "5.2"
+sphinx-autobuild = "2021.3.14"
+Sphinx = "4.3.2"
+sphinx-multiversion-scylla = "~0.2.12"
[build-system]
requires = ["poetry>=0.12"]
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 2a1ca68daea..056562c84a5 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -1,21 +1,27 @@
# -*- coding: utf-8 -*-
import os
-import sys
from datetime import date
-import yaml
import re
from docutils import nodes
-from sphinx.util import logging
from recommonmark.transform import AutoStructify
from recommonmark.parser import CommonMarkParser, splitext, urlparse
from sphinx_scylladb_theme.utils import multiversion_regex_builder
-logger = logging.getLogger(__name__)
# -- General configuration ------------------------------------------------
-# Add any Sphinx extension'¡' module names here, as strings. They can be
+# Build documentation for the following tags and branches
+TAGS = []
+BRANCHES = ['scylla-3.7.2.x', 'scylla-3.10.2.x', 'scylla-3.11.0.x', 'scylla-3.11.2.x', 'scylla-4.7.2.x', 'scylla-4.10.0.x', 'scylla-4.11.1.x', 'scylla-4.12.0.x', 'scylla-4.13.0.x']
+# Set the latest version.
+LATEST_VERSION = 'scylla-4.13.0.x'
+# Set which versions are not released yet.
+UNSTABLE_VERSIONS = []
+# Set which versions are deprecated
+DEPRECATED_VERSIONS = []
+
+# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
@@ -23,6 +29,7 @@
'sphinx.ext.mathjax',
'sphinx.ext.githubpages',
'sphinx.ext.extlinks',
+ 'sphinx_sitemap',
'sphinx.ext.autosectionlabel',
'sphinx_scylladb_theme',
'sphinx_multiversion',
@@ -77,7 +84,8 @@ def setup(app):
app.add_transform(AutoStructify)
# Replace DataStax links
- replacements = {r'https://docs.datastax.com/en/drivers/java\/(.*?)\/': "https://java-driver.docs.scylladb.com/stable/api/"}
+ current_slug = os.getenv("SPHINX_MULTIVERSION_NAME", "stable")
+ replacements = {r'docs.datastax.com/en/drivers/java\/(.*?)\/': "java-driver.docs.scylladb.com/" + current_slug + "/api/"}
app.add_config_value('replacements', replacements, True)
app.connect('source-read', replace_relative_links)
@@ -89,23 +97,10 @@ def setup(app):
copyright = str(date.today().year) + ', ScyllaDB. All rights reserved.'
author = u'Scylla Project Contributors'
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = u'3.7.1'
-# The full version, including alpha/beta/rc tags.
-release = u'3.7.1'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-language = None
-
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'lib', 'lib64','**/_common/*', 'README.md', '.git', '.github', '_utils', '_templates', 'rst_include']
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '_utils']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
@@ -113,21 +108,34 @@ def setup(app):
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
-# Custom lexer
-from pygments.lexers.shell import BashLexer
-from sphinx.highlighting import lexers
+# -- Options for not found extension -------------------------------------------
-class DitaaLexer(BashLexer):
- pass
-lexers['ditaa'] = DitaaLexer(startinline=True)
+# Template used to render the 404.html generated by this extension.
+notfound_template = '404.html'
+
+# Prefix added to all the URLs generated in the 404 page.
+notfound_urls_prefix = ''
+
+# -- Options for multiversion extension ----------------------------------
+
+# Whitelist pattern for tags
+smv_tag_whitelist = multiversion_regex_builder(TAGS)
+# Whitelist pattern for branches
+smv_branch_whitelist = multiversion_regex_builder(BRANCHES)
+# Defines which version is considered to be the latest stable version.
+# Must be listed in smv_tag_whitelist or smv_branch_whitelist.
+smv_latest_version = LATEST_VERSION
+smv_rename_latest_version = 'stable'
+# Whitelist pattern for remotes (set to None to use local branches only)
+smv_remote_whitelist = r'^origin$'
+# Pattern for released versions
+smv_released_pattern = r'^tags/.*$'
+# Format for versioned output directories inside the build directory
+smv_outputdir_format = '{ref.name}'
-# Adds version variables for monitoring and manager versions when used in inline text
+# -- Options for sitemap extension ---------------------------------------
-rst_epilog = """
-.. |mon_version| replace:: 3.1
-.. |man_version| replace:: 2.0
-.. |mon_root| replace:: :doc:`Scylla Monitoring Stack `
-"""
+sitemap_url_scheme = 'stable/{link}'
# -- Options for HTML output ----------------------------------------------
@@ -135,33 +143,22 @@ class DitaaLexer(BashLexer):
# a list of builtin themes.
#
html_theme = 'sphinx_scylladb_theme'
-# html_theme_path = ["../.."]
-
-html_style = ''
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
+ 'conf_py_path': 'docs/source/',
'branch_substring_removed': 'scylla-',
- 'header_links': [
- ('Scylla Java Driver', 'https://java-driver.docs.scylladb.com/'),
- ('Scylla Cloud', 'https://docs.scylladb.com/scylla-cloud/'),
- ('Scylla University', 'https://university.scylladb.com/'),
- ('ScyllaDB Home', 'https://www.scylladb.com/')],
+ 'github_repository': 'scylladb/java-driver',
'github_issues_repository': 'scylladb/java-driver',
- 'show_sidebar_index': True,
+ 'hide_edit_this_page_button': 'false',
+ 'versions_unstable': UNSTABLE_VERSIONS,
+ 'versions_deprecated': DEPRECATED_VERSIONS,
'hide_version_dropdown': ['scylla-3.x'],
}
-extlinks = {
- 'manager': ('/operating-scylla/manager/%s/',''),
- 'manager_lst': ('/operating-scylla/manager/2.0/%s/',''),
- 'monitor': ('/operating-scylla/monitoring/%s/',''),
- 'monitor_lst': ('/operating-scylla/monitoring/3.1/%s/','')
-}
-
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
@@ -180,94 +177,3 @@ class DitaaLexer(BashLexer):
# Dictionary of values to pass into the template engine’s context for all pages
html_context = {'html_baseurl': html_baseurl}
-
-# -- Options for not found extension -------------------------------------------
-
-# Template used to render the 404.html generated by this extension.
-notfound_template = '404.html'
-
-# Prefix added to all the URLs generated in the 404 page.
-notfound_urls_prefix = ''
-
-# -- Options for redirect extension ---------------------------------------
-
-# Read a YAML dictionary of redirections and generate an HTML file for each
-redirects_file = "_utils/redirections.yaml"
-
-# -- Options for multiversion extension ----------------------------------
-
-# Whitelist pattern for tags (set to None to ignore all tags)
-TAGS = []
-smv_tag_whitelist = multiversion_regex_builder(TAGS)
-# Whitelist pattern for branches (set to None to ignore all branches)
-BRANCHES = ['scylla-3.x', 'scylla-3.7.2.x', 'scylla-3.10.2.x']
-smv_branch_whitelist = multiversion_regex_builder(BRANCHES)
-# Defines which version is considered to be the latest stable version.
-# Must be listed in smv_tag_whitelist or smv_branch_whitelist.
-smv_latest_version = 'scylla-3.10.2.x'
-smv_rename_latest_version = 'stable'
-# Whitelist pattern for remotes (set to None to use local branches only)
-smv_remote_whitelist = r"^origin$"
-# Pattern for released versions
-smv_released_pattern = r'^tags/.*$'
-# Format for versioned output directories inside the build directory
-smv_outputdir_format = '{ref.name}'
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
- # The paper size ('letterpaper' or 'a4paper').
- #
- # 'papersize': 'letterpaper',
-
- # The font size ('10pt', '11pt' or '12pt').
- #
- # 'pointsize': '10pt',
-
- # Additional stuff for the LaTeX preamble.
- #
- # 'preamble': '',
-
- # Latex figure (float) alignment
- #
- # 'figure_align': 'htbp',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- (master_doc, 'ScyllaDocumentation.tex', u'Scylla Documentation Documentation',
- u'Scylla Project Contributors', 'manual'),
-]
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- (master_doc, 'scylladocumentation', u'Scylla Documentation Documentation',
- [author], 1)
-]
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- (master_doc, 'ScyllaDocumentation', u'Scylla Documentation Documentation',
- author, 'ScyllaDocumentation', 'One line description of project.',
- 'Miscellaneous'),
-]
-
-# -- Options for Epub output ----------------------------------------------
-
-# Bibliographic Dublin Core info.
-epub_title = project
-epub_author = author
-epub_publisher = author
-epub_copyright = copyright
-
-# A list of files that should not be packed into the epub file.
-epub_exclude_files = ['search.html']
diff --git a/driver-core/pom.xml b/driver-core/pom.xml
index ed6d9f6fac8..5fe304881b3 100644
--- a/driver-core/pom.xml
+++ b/driver-core/pom.xml
@@ -26,7 +26,7 @@
com.scylladbscylla-driver-parent
- 3.10.2.0-SNAPSHOT
+ 3.11.2.1-SNAPSHOTscylla-driver-core
diff --git a/driver-core/src/main/java/com/datastax/driver/core/BatchStatement.java b/driver-core/src/main/java/com/datastax/driver/core/BatchStatement.java
index e5fc175fe62..e2b7a805483 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/BatchStatement.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/BatchStatement.java
@@ -220,6 +220,7 @@ public int requestSizeInBytes(ProtocolVersion protocolVersion, CodecRegistry cod
case V3:
case V4:
case V5:
+ case V6:
size += CBUtil.sizeOfConsistencyLevel(getConsistencyLevel());
size += QueryFlag.serializedSize(protocolVersion);
// Serial CL and default timestamp also depend on session-level defaults (QueryOptions).
diff --git a/driver-core/src/main/java/com/datastax/driver/core/BoundStatement.java b/driver-core/src/main/java/com/datastax/driver/core/BoundStatement.java
index dad4ada8cb4..9317bd0a58b 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/BoundStatement.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/BoundStatement.java
@@ -337,6 +337,7 @@ public int requestSizeInBytes(ProtocolVersion protocolVersion, CodecRegistry cod
case V3:
case V4:
case V5:
+ case V6:
size += CBUtil.sizeOfConsistencyLevel(getConsistencyLevel());
size += QueryFlag.serializedSize(protocolVersion);
if (wrapper.values.length > 0) {
diff --git a/driver-core/src/main/java/com/datastax/driver/core/CloudConfigFactory.java b/driver-core/src/main/java/com/datastax/driver/core/CloudConfigFactory.java
index 98faf16c956..f11b1fcecf4 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/CloudConfigFactory.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/CloudConfigFactory.java
@@ -228,8 +228,8 @@ protected InetSocketAddress getSniProxyAddress(JsonNode proxyMetadata) {
throw new IllegalStateException(
"Invalid proxy metadata: missing port from field sni_proxy_address");
}
- return InetSocketAddress.createUnresolved(
- sniProxyHostAndPort.getHostText(), sniProxyHostAndPort.getPort());
+ String host = GuavaCompatibility.INSTANCE.getHost(sniProxyHostAndPort);
+ return InetSocketAddress.createUnresolved(host, sniProxyHostAndPort.getPort());
} else {
throw new IllegalStateException("Invalid proxy metadata: missing field sni_proxy_address");
}
diff --git a/driver-core/src/main/java/com/datastax/driver/core/Cluster.java b/driver-core/src/main/java/com/datastax/driver/core/Cluster.java
index 6c6b2c00552..cdac5cff4d0 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/Cluster.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/Cluster.java
@@ -133,6 +133,10 @@ public class Cluster implements Closeable {
static final int NEW_NODE_DELAY_SECONDS =
SystemProperties.getInt("com.datastax.driver.NEW_NODE_DELAY_SECONDS", 1);
+ // Used in integration tests to force the driver to negotiate the protocol
+ // version even if it was explicitly set.
+ @VisibleForTesting static boolean shouldAlwaysNegotiateProtocolVersion = false;
+
// Some per-JVM number that allows to generate unique cluster names when
// multiple Cluster instance are created in the same JVM.
private static final AtomicInteger CLUSTER_ID = new AtomicInteger(0);
@@ -748,6 +752,10 @@ public static class Builder implements Initializer {
private boolean allowBetaProtocolVersion = false;
private boolean noCompact = false;
private boolean isCloud = false;
+ private boolean useAdvancedShardAwareness = true;
+ private boolean schemaQueriesPaged = true;
+ private int localPortLow = ProtocolOptions.DEFAULT_LOCAL_PORT_LOW;
+ private int localPortHigh = ProtocolOptions.DEFAULT_LOCAL_PORT_HIGH;
private Collection listeners;
@@ -863,11 +871,11 @@ public Builder withMaxSchemaAgreementWaitSeconds(int maxSchemaAgreementWaitSecon
*
*
By default, the driver will "auto-detect" which protocol version it can use when
* connecting to the first node. More precisely, it will try first with {@link
- * ProtocolVersion#NEWEST_SUPPORTED}, and if not supported fallback to the highest version
- * supported by the first node it connects to. Please note that once the version is
- * "auto-detected", it won't change: if the first node the driver connects to is a Cassandra 1.2
- * node and auto-detection is used (the default), then the native protocol version 1 will be use
- * for the lifetime of the Cluster instance.
+ * ProtocolVersion#DEFAULT}, and if not supported fallback to the highest version supported by
+ * the first node it connects to. Please note that once the version is "auto-detected", it won't
+ * change: if the first node the driver connects to is a Cassandra 1.2 node and auto-detection
+ * is used (the default), then the native protocol version 1 will be use for the lifetime of the
+ * Cluster instance.
*
*
By using {@link Builder#allowBetaProtocolVersion()}, it is possible to force driver to
* connect to Cassandra node that supports the latest protocol beta version. Leaving this flag
@@ -1473,6 +1481,57 @@ public Builder withCloudSecureConnectBundle(InputStream cloudConfigInputStream)
return addCloudConfigToBuilder(cloudConfig);
}
+ /**
+ * Disables advanced shard awareness. By default, this driver chooses local port while making a
+ * connection to node, to signal which shard it wants to connect to. This allows driver to
+ * estabilish connection pool faster, especially when there are multiple clients connecting
+ * concurrently. If this causes any issues, you can disable it using this method. The most
+ * common issues are the NAT between client and node (which messes up client port numbers) and
+ * shard aware port (default: 19042) blocked by firewall.
+ *
+ * @return this builder.
+ */
+ public Builder withoutAdvancedShardAwareness() {
+ this.useAdvancedShardAwareness = false;
+ return this;
+ }
+
+ /**
+ * Disables paging in schema queries. By default, Queries that fetch schema from the cluster are
+ * paged. This option causes the least impact on the cluster latencies when a new client
+ * connects. Turning off paging may result in faster driver initialisation at the expense of
+ * higher cluster latencies.
+ *
+ * @return this builder.
+ */
+ public Builder withoutPagingInSchemaQueries() {
+ this.schemaQueriesPaged = false;
+ return this;
+ }
+
+ /**
+ * Sets local port range for use by advanced shard awareness. Driver will use ports from this
+ * range as local ports when connecting to cluster. If {@link #withoutAdvancedShardAwareness()}
+ * was called, then setting this range does not affect anything.
+ *
+ * @param low Lower bound of range, inclusive.
+ * @param high Upper bound of range, inclusive.
+ * @return this builder.
+ */
+ public Builder withLocalPortRange(int low, int high) {
+ if (low < 1 || 65535 < low || high < 1 || 65535 < high) {
+ throw new IllegalArgumentException("Port numbers must be between 1 and 65535");
+ }
+
+ if (high - low < 1000) {
+ throw new IllegalArgumentException("Port range should be sufficiently large");
+ }
+
+ this.localPortLow = low;
+ this.localPortHigh = high;
+ return this;
+ }
+
private Builder addCloudConfigToBuilder(CloudConfig cloudConfig) {
Builder builder =
withEndPointFactory(new SniEndPointFactory(cloudConfig.getProxyAddress()))
@@ -1519,15 +1578,26 @@ public Configuration getConfiguration() {
maxSchemaAgreementWaitSeconds,
sslOptions,
authProvider,
- noCompact)
+ noCompact,
+ useAdvancedShardAwareness,
+ localPortLow,
+ localPortHigh)
.setCompression(compression);
MetricsOptions metricsOptions = new MetricsOptions(metricsEnabled, jmxEnabled);
+ QueryOptions queryOptions = configurationBuilder.getQueryOptions();
+ if (queryOptions == null) {
+ queryOptions = new QueryOptions();
+ }
+
+ queryOptions.setSchemaQueriesPaged(schemaQueriesPaged);
+
return configurationBuilder
.withProtocolOptions(protocolOptions)
.withMetricsOptions(metricsOptions)
.withPolicies(policiesBuilder.build())
+ .withQueryOptions(queryOptions)
.build();
}
@@ -1843,7 +1913,9 @@ long delayMs() {
}
private void negotiateProtocolVersionAndConnect() {
- boolean shouldNegotiate = (configuration.getProtocolOptions().initialProtocolVersion == null);
+ boolean shouldNegotiate =
+ (configuration.getProtocolOptions().initialProtocolVersion == null
+ || shouldAlwaysNegotiateProtocolVersion);
while (true) {
try {
controlConnection.connect();
diff --git a/driver-core/src/main/java/com/datastax/driver/core/CodecUtils.java b/driver-core/src/main/java/com/datastax/driver/core/CodecUtils.java
index 94295a0bb14..afaa7176bca 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/CodecUtils.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/CodecUtils.java
@@ -64,6 +64,7 @@ public static int readSize(ByteBuffer input, ProtocolVersion version) {
case V3:
case V4:
case V5:
+ case V6:
return input.getInt();
default:
throw version.unsupported();
@@ -92,6 +93,7 @@ public static void writeSize(ByteBuffer output, int size, ProtocolVersion versio
case V3:
case V4:
case V5:
+ case V6:
output.putInt(size);
break;
default:
@@ -131,6 +133,7 @@ public static void writeValue(ByteBuffer output, ByteBuffer value, ProtocolVersi
case V3:
case V4:
case V5:
+ case V6:
if (value == null) {
output.putInt(-1);
} else {
@@ -217,6 +220,7 @@ private static int sizeOfCollectionSize(ProtocolVersion version) {
case V3:
case V4:
case V5:
+ case V6:
return 4;
default:
throw version.unsupported();
@@ -237,6 +241,7 @@ private static int sizeOfValue(ByteBuffer value, ProtocolVersion version) {
case V3:
case V4:
case V5:
+ case V6:
return value == null ? 4 : 4 + value.remaining();
default:
throw version.unsupported();
diff --git a/driver-core/src/main/java/com/datastax/driver/core/Configuration.java b/driver-core/src/main/java/com/datastax/driver/core/Configuration.java
index 3ef6922df1b..714d173bb81 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/Configuration.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/Configuration.java
@@ -348,6 +348,10 @@ public Builder withDefaultKeyspace(String keyspace) {
return this;
}
+ public QueryOptions getQueryOptions() {
+ return queryOptions;
+ }
+
/**
* Builds the final object from this builder.
*
diff --git a/driver-core/src/main/java/com/datastax/driver/core/Connection.java b/driver-core/src/main/java/com/datastax/driver/core/Connection.java
index 5b0fb2cac3c..346b63c32ab 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/Connection.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/Connection.java
@@ -72,7 +72,13 @@
import io.netty.util.Timer;
import io.netty.util.TimerTask;
import io.netty.util.concurrent.GlobalEventExecutor;
+import java.io.IOException;
import java.lang.ref.WeakReference;
+import java.net.BindException;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.security.InvalidParameterException;
+import java.text.MessageFormat;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
@@ -106,6 +112,10 @@ class Connection {
private static final int FLUSHER_SCHEDULE_PERIOD_NS =
SystemProperties.getInt("com.datastax.driver.FLUSHER_SCHEDULE_PERIOD_NS", 10000);
+ private static final long ADV_SHARD_AWARENESS_BLOCK_ON_NAT = 1000000L * 60L * 1000L;
+
+ private static final long ADV_SHARD_AWARENESS_BLOCK_ON_ERROR = 5 * 60 * 1000;
+
enum State {
OPEN,
TRASHED,
@@ -121,6 +131,8 @@ enum State {
private final String name;
private volatile Integer shardId = null;
+ private int requestedShardId = -1;
+
@VisibleForTesting volatile Channel channel;
private final Factory factory;
@@ -169,44 +181,111 @@ protected Connection(String name, EndPoint endPoint, Factory factory, Owner owne
}
ListenableFuture initAsync() {
+ return initAsync(-1, 0);
+ }
+
+ ListenableFuture initAsync(final int shardId, int serverPort) {
if (factory.isShutdown)
return Futures.immediateFailedFuture(
new ConnectionException(endPoint, "Connection factory is shut down"));
- ProtocolVersion protocolVersion =
- factory.protocolVersion == null
- ? ProtocolVersion.NEWEST_SUPPORTED
- : factory.protocolVersion;
+ this.requestedShardId = shardId;
+
+ final ProtocolVersion protocolVersion =
+ factory.protocolVersion == null ? ProtocolVersion.DEFAULT : factory.protocolVersion;
final SettableFuture channelReadyFuture = SettableFuture.create();
try {
- Bootstrap bootstrap = factory.newBootstrap();
- ProtocolOptions protocolOptions = factory.configuration.getProtocolOptions();
- bootstrap.handler(
- new Initializer(
- this,
- protocolVersion,
- protocolOptions.getCompression().compressor(),
- protocolOptions.getSSLOptions(),
- factory.configuration.getPoolingOptions().getHeartbeatIntervalSeconds(),
- factory.configuration.getNettyOptions(),
- factory.configuration.getCodecRegistry(),
- factory.configuration.getMetricsOptions().isEnabled()
- ? factory.manager.metrics
- : null));
-
- ChannelFuture future = bootstrap.connect(endPoint.resolve());
+ final ProtocolOptions protocolOptions = factory.configuration.getProtocolOptions();
+ final Bootstrap bootstrap = factory.newBootstrap();
+ prepareBootstrap(bootstrap, protocolVersion, protocolOptions);
+
+ final InetSocketAddress serverAddress =
+ (serverPort == 0)
+ ? endPoint.resolve()
+ : new InetSocketAddress(endPoint.resolve().getAddress(), serverPort);
+
+ final Owner owner = ownerRef.get();
+ final HostConnectionPool pool =
+ owner instanceof HostConnectionPool ? (HostConnectionPool) owner : null;
+ final ShardingInfo shardingInfo = pool == null ? null : pool.host.getShardingInfo();
+ if ((shardingInfo == null) && shardId != -1) {
+ throw new InvalidParameterException(
+ MessageFormat.format(
+ "Requested connection to shard {0} of host {1}:{2}, but sharding info or pool is absent",
+ shardId, serverAddress.getAddress().getHostAddress(), serverPort));
+ }
- writer.incrementAndGet();
- future.addListener(
+ ChannelFuture future;
+ final int lowPort, highPort;
+ if (pool != null) {
+ lowPort = pool.manager.configuration().getProtocolOptions().getLowLocalPort();
+ highPort = pool.manager.configuration().getProtocolOptions().getHighLocalPort();
+ } else {
+ lowPort = highPort = -1;
+ }
+
+ if (shardId == -1) {
+ future = bootstrap.connect(serverAddress);
+ } else {
+ int localPort =
+ PortAllocator.getNextAvailablePort(
+ shardingInfo.getShardsCount(), shardId, lowPort, highPort);
+ if (localPort == -1) {
+ throw new RuntimeException("Can't find free local port to use");
+ }
+
+ future = bootstrap.connect(serverAddress, new InetSocketAddress(localPort));
+ logger.debug(
+ "Connecting to shard {} using local port {} (shardCount: {})\n",
+ shardId,
+ localPort,
+ shardingInfo.getShardsCount());
+ }
+
+ final ChannelFutureListener channelListener =
new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
- writer.decrementAndGet();
if (future.cause() != null) {
+ // Local port busy, let's try another one
+ if (shardId != -1 && future.cause().getCause() instanceof BindException) {
+ int localPort =
+ PortAllocator.getNextAvailablePort(
+ shardingInfo.getShardsCount(), shardId, lowPort, highPort);
+ if (localPort != -1) {
+ if (future.channel() != null) {
+ future
+ .channel()
+ .close()
+ .addListener(
+ new ChannelFutureListener() {
+ @Override
+ public void operationComplete(ChannelFuture future)
+ throws Exception {
+ if (future.cause() != null) {
+ logger.warn("Error while closing old channel", future.cause());
+ }
+ }
+ });
+ }
+ prepareBootstrap(bootstrap, protocolVersion, protocolOptions);
+ ChannelFuture newFuture =
+ bootstrap.connect(serverAddress, new InetSocketAddress(localPort));
+ newFuture.addListener(this);
+ logger.debug(
+ "Retrying connecting to shard {} using local port {} (shardCount: {})\n",
+ shardId,
+ localPort,
+ shardingInfo.getShardsCount());
+ return;
+ }
+ }
logger.warn("Error creating netty channel to " + endPoint, future.cause());
}
+ writer.decrementAndGet();
+
// Note: future.channel() can be null in some error cases, so we need to guard against
// it in the rest of the code below.
channel = future.channel();
@@ -228,16 +307,22 @@ public void operationComplete(ChannelFuture future) throws Exception {
Connection.this.factory.allChannels.add(channel);
}
if (!future.isSuccess()) {
- if (logger.isDebugEnabled())
+ if (logger.isDebugEnabled()) {
logger.debug(
String.format(
"%s Error connecting to %s%s",
Connection.this,
Connection.this.endPoint,
extractMessage(future.cause())));
+ }
channelReadyFuture.setException(
new TransportException(
Connection.this.endPoint, "Cannot connect", future.cause()));
+ if (shardId != -1) {
+ // We are using advanced shard awareness, so pool must be non-null.
+ pool.tempBlockAdvShardAwareness(ADV_SHARD_AWARENESS_BLOCK_ON_ERROR);
+ }
+
} else {
assert channel != null;
logger.debug(
@@ -247,7 +332,10 @@ public void operationComplete(ChannelFuture future) throws Exception {
}
}
}
- });
+ };
+
+ writer.incrementAndGet();
+ future.addListener(channelListener);
} catch (RuntimeException e) {
closeAsync().force();
throw e;
@@ -314,6 +402,23 @@ public void onFailure(Throwable t) {
return initFuture;
}
+ private Bootstrap prepareBootstrap(
+ Bootstrap bootstrap, ProtocolVersion protocolVersion, ProtocolOptions protocolOptions) {
+ bootstrap.handler(
+ new Initializer(
+ this,
+ protocolVersion,
+ protocolOptions.getCompression().compressor(),
+ protocolOptions.getSSLOptions(),
+ factory.configuration.getPoolingOptions().getHeartbeatIntervalSeconds(),
+ factory.configuration.getNettyOptions(),
+ factory.configuration.getCodecRegistry(),
+ factory.configuration.getMetricsOptions().isEnabled()
+ ? factory.manager.metrics
+ : null));
+ return bootstrap;
+ }
+
private static String extractMessage(Throwable t) {
if (t == null) return "";
String msg = t.getMessage() == null || t.getMessage().isEmpty() ? t.toString() : t.getMessage();
@@ -333,12 +438,13 @@ private AsyncFunction onChannelReady(
public ListenableFuture apply(Void input) throws Exception {
Future startupOptionsFuture = write(new Requests.Options());
return GuavaCompatibility.INSTANCE.transformAsync(
- startupOptionsFuture, onOptionsResponse(initExecutor), initExecutor);
+ startupOptionsFuture, onOptionsResponse(protocolVersion, initExecutor), initExecutor);
}
};
}
- private AsyncFunction onOptionsResponse(final Executor initExecutor) {
+ private AsyncFunction onOptionsResponse(
+ final ProtocolVersion protocolVersion, final Executor initExecutor) {
return new AsyncFunction() {
@Override
public ListenableFuture apply(Message.Response response) throws Exception {
@@ -350,6 +456,16 @@ public ListenableFuture apply(Message.Response response) throws Exception
if (sharding != null) {
getHost().setShardingInfo(sharding.shardingInfo);
Connection.this.shardId = sharding.shardId;
+ if (Connection.this.requestedShardId != -1
+ && Connection.this.requestedShardId != sharding.shardId) {
+ logger.warn(
+ "Advanced shard awareness: requested connection to shard {}, but connected to {}. Is there a NAT between client and server?",
+ Connection.this.requestedShardId,
+ sharding.shardId);
+ // Owner is a HostConnectionPool if we are using adv. shard awareness
+ ((HostConnectionPool) Connection.this.ownerRef.get())
+ .tempBlockAdvShardAwareness(ADV_SHARD_AWARENESS_BLOCK_ON_NAT);
+ }
} else {
getHost().setShardingInfo(null);
Connection.this.shardId = 0;
@@ -361,6 +477,9 @@ public ListenableFuture apply(Message.Response response) throws Exception
return MoreFutures.VOID_SUCCESS;
case ERROR:
Responses.Error error = (Responses.Error) response;
+ if (isUnsupportedProtocolVersion(error))
+ throw unsupportedProtocolVersionException(
+ protocolVersion, error.serverProtocolVersion);
throw new TransportException(
endPoint,
String.format(
@@ -425,11 +544,6 @@ private AsyncFunction onStartupResponse(
return new AsyncFunction() {
@Override
public ListenableFuture apply(Message.Response response) throws Exception {
-
- if (protocolVersion.compareTo(ProtocolVersion.V5) >= 0 && response.type != ERROR) {
- switchToV5Framing();
- }
-
switch (response.type) {
case READY:
return checkClusterName(protocolVersion, initExecutor);
@@ -468,6 +582,7 @@ public ListenableFuture apply(Message.Response response) throws Exception
case V3:
case V4:
case V5:
+ case V6:
return authenticateV2(authenticator, protocolVersion, initExecutor);
default:
throw defunct(protocolVersion.unsupported());
@@ -650,7 +765,9 @@ private boolean isUnsupportedProtocolVersion(Responses.Error error) {
// Testing for a specific string is a tad fragile but well, we don't have much choice
// C* 2.1 reports a server error instead of protocol error, see CASSANDRA-9451
return (error.code == ExceptionCode.PROTOCOL_ERROR || error.code == ExceptionCode.SERVER_ERROR)
- && error.message.contains("Invalid or unsupported protocol version");
+ && (error.message.contains("Invalid or unsupported protocol version")
+ // JAVA-2924: server is behind driver and considers the proposed version as beta
+ || error.message.contains("Beta version of the protocol used"));
}
private UnsupportedProtocolVersionException unsupportedProtocolVersionException(
@@ -1039,6 +1156,72 @@ public String toString() {
"Connection[%s, inFlight=%d, closed=%b]", name, inFlight.get(), isClosed());
}
+ static class PortAllocator {
+ private static final AtomicInteger lastPort = new AtomicInteger(-1);
+
+ public static int getNextAvailablePort(int shardCount, int shardId, int lowPort, int highPort) {
+ int lastPortValue, foundPort = -1;
+ do {
+ lastPortValue = lastPort.get();
+
+ // We will scan from lastPortValue
+ // (or lowPort is there was no lastPort or lastPort is too low)
+ int scanStart = lastPortValue == -1 ? lowPort : lastPortValue;
+ if (scanStart < lowPort) {
+ scanStart = lowPort;
+ }
+
+ // Round it up to "% shardCount == shardId"
+ scanStart += (shardCount - scanStart % shardCount) + shardId;
+
+ // Scan from scanStart upwards to highPort.
+ for (int port = scanStart; port <= highPort; port += shardCount) {
+ if (isTcpPortAvailable(port)) {
+ foundPort = port;
+ break;
+ }
+ }
+
+ // If we started scanning from a high scanStart port
+ // there might have been not enough ports left that are
+ // smaller than highPort. Scan from the beginning
+ // from the lowPort.
+ if (foundPort == -1) {
+ scanStart = lowPort + (shardCount - lowPort % shardCount) + shardId;
+
+ for (int port = scanStart; port <= highPort; port += shardCount) {
+ if (isTcpPortAvailable(port)) {
+ foundPort = port;
+ break;
+ }
+ }
+ }
+
+ // No luck! All ports taken!
+ if (foundPort == -1) {
+ return -1;
+ }
+ } while (!lastPort.compareAndSet(lastPortValue, foundPort));
+
+ return foundPort;
+ }
+
+ public static boolean isTcpPortAvailable(int port) {
+ try {
+ ServerSocket serverSocket = new ServerSocket();
+ try {
+ serverSocket.setReuseAddress(false);
+ serverSocket.bind(new InetSocketAddress(port), 1);
+ return true;
+ } finally {
+ serverSocket.close();
+ }
+ } catch (IOException ex) {
+ return false;
+ }
+ }
+ }
+
static class Factory {
final Timer timer;
@@ -1116,11 +1299,17 @@ Connection open(Host host)
Connection open(HostConnectionPool pool)
throws ConnectionException, InterruptedException, UnsupportedProtocolVersionException,
ClusterNameMismatchException {
+ return open(pool, -1, 0);
+ }
+
+ Connection open(HostConnectionPool pool, int shardId, int serverPort)
+ throws ConnectionException, InterruptedException, UnsupportedProtocolVersionException,
+ ClusterNameMismatchException {
pool.host.convictionPolicy.signalConnectionsOpening(1);
Connection connection =
new Connection(buildConnectionName(pool.host), pool.host.getEndPoint(), this, pool);
try {
- connection.initAsync().get();
+ connection.initAsync(shardId, serverPort).get();
return connection;
} catch (ExecutionException e) {
throw launderAsyncInitException(e);
@@ -1285,6 +1474,7 @@ private void flush(FlushItem item) {
flusher.start();
}
+ @ChannelHandler.Sharable
class Dispatcher extends SimpleChannelInboundHandler {
final StreamIdGenerator streamIdHandler;
@@ -1756,6 +1946,8 @@ private static class Initializer extends ChannelInitializer {
new Message.ProtocolEncoder(ProtocolVersion.V4);
private static final Message.ProtocolEncoder messageEncoderV5 =
new Message.ProtocolEncoder(ProtocolVersion.V5);
+ private static final Message.ProtocolEncoder messageEncoderV6 =
+ new Message.ProtocolEncoder(ProtocolVersion.V6);
private static final Frame.Encoder frameEncoder = new Frame.Encoder();
private final ProtocolVersion protocolVersion;
@@ -1822,6 +2014,8 @@ protected void initChannel(SocketChannel channel) throws Exception {
pipeline.addLast("frameDecoder", new Frame.Decoder());
pipeline.addLast("frameEncoder", frameEncoder);
+ pipeline.addLast("framingFormatHandler", new FramingFormatHandler(connection.factory));
+
if (compressor != null
// Frame-level compression is only done in legacy protocol versions. In V5 and above, it
// happens at a higher level ("segment" that groups multiple frames), so never install
@@ -1853,45 +2047,14 @@ private Message.ProtocolEncoder messageEncoderFor(ProtocolVersion version) {
return messageEncoderV4;
case V5:
return messageEncoderV5;
+ case V6:
+ return messageEncoderV6;
default:
throw new DriverInternalError("Unsupported protocol version " + protocolVersion);
}
}
}
- /**
- * Rearranges the pipeline to deal with the new framing structure in protocol v5 and above. This
- * has to be done manually, because it only happens once we've confirmed that the server supports
- * v5.
- */
- void switchToV5Framing() {
- assert factory.protocolVersion.compareTo(ProtocolVersion.V5) >= 0;
-
- // We want to do this on the event loop, to make sure it doesn't race with incoming requests
- assert channel.eventLoop().inEventLoop();
-
- ChannelPipeline pipeline = channel.pipeline();
- SegmentCodec segmentCodec =
- new SegmentCodec(
- channel.alloc(), factory.configuration.getProtocolOptions().getCompression());
-
- // Outbound: "message -> segment -> bytes" instead of "message -> frame -> bytes"
- Message.ProtocolEncoder requestEncoder =
- (Message.ProtocolEncoder) pipeline.get("messageEncoder");
- pipeline.replace(
- "messageEncoder",
- "messageToSegmentEncoder",
- new MessageToSegmentEncoder(channel.alloc(), requestEncoder));
- pipeline.replace(
- "frameEncoder", "segmentToBytesEncoder", new SegmentToBytesEncoder(segmentCodec));
-
- // Inbound: "frame <- segment <- bytes" instead of "frame <- bytes"
- pipeline.replace(
- "frameDecoder", "bytesToSegmentDecoder", new BytesToSegmentDecoder(segmentCodec));
- pipeline.addAfter(
- "bytesToSegmentDecoder", "segmentToFrameDecoder", new SegmentToFrameDecoder());
- }
-
/** A component that "owns" a connection, and should be notified when it dies. */
interface Owner {
void onConnectionDefunct(Connection connection);
diff --git a/driver-core/src/main/java/com/datastax/driver/core/ControlConnection.java b/driver-core/src/main/java/com/datastax/driver/core/ControlConnection.java
index 3558f006d9d..da52ffaba4a 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/ControlConnection.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/ControlConnection.java
@@ -301,7 +301,7 @@ private Connection tryConnect(Host host, boolean isInitialConnection)
// If no protocol version was specified, set the default as soon as a connection succeeds (it's
// needed to parse UDTs in refreshSchema)
if (cluster.connectionFactory.protocolVersion == null)
- cluster.connectionFactory.protocolVersion = ProtocolVersion.NEWEST_SUPPORTED;
+ cluster.connectionFactory.protocolVersion = ProtocolVersion.DEFAULT;
try {
logger.trace("[Control connection] Registering for events");
@@ -611,6 +611,17 @@ private static void updateInfo(
InetAddress nativeAddress = row.getInet("native_address");
int nativePort = row.getInt("native_port");
broadcastRpcAddress = new InetSocketAddress(nativeAddress, nativePort);
+ } else if (row.getColumnDefinitions().contains("native_transport_address")) {
+ // DSE 6.8 introduced native_transport_address and native_transport_port for the
+ // listen address. Also included is native_transport_port_ssl (in case users
+ // want to setup a different port for SSL and non-SSL conns).
+ InetAddress nativeAddress = row.getInet("native_transport_address");
+ int nativePort = row.getInt("native_transport_port");
+ if (cluster.getCluster().getConfiguration().getProtocolOptions().getSSLOptions() != null
+ && !row.isNull("native_transport_port_ssl")) {
+ nativePort = row.getInt("native_transport_port_ssl");
+ }
+ broadcastRpcAddress = new InetSocketAddress(nativeAddress, nativePort);
} else if (row.getColumnDefinitions().contains("rpc_address")) {
InetAddress rpcAddress = row.getInet("rpc_address");
broadcastRpcAddress = new InetSocketAddress(rpcAddress, cluster.connectionFactory.getPort());
@@ -831,6 +842,14 @@ private void refreshNodeListAndTokenMap(
InetAddress nativeAddress = row.getInet("native_address");
int nativePort = row.getInt("native_port");
broadcastRpcAddress = new InetSocketAddress(nativeAddress, nativePort);
+ } else if (row.getColumnDefinitions().contains("native_transport_address")) {
+ InetAddress nativeAddress = row.getInet("native_transport_address");
+ int nativePort = row.getInt("native_transport_port");
+ if (cluster.getCluster().getConfiguration().getProtocolOptions().getSSLOptions() != null
+ && !row.isNull("native_transport_port_ssl")) {
+ nativePort = row.getInt("native_transport_port_ssl");
+ }
+ broadcastRpcAddress = new InetSocketAddress(nativeAddress, nativePort);
} else {
InetAddress rpcAddress = row.getInet("rpc_address");
broadcastRpcAddress =
@@ -956,7 +975,11 @@ private boolean isValidPeer(Row peerRow, boolean logIfInvalid) {
&& !peerRow.isNull("native_port");
} else {
isValid &=
- peerRow.getColumnDefinitions().contains("rpc_address") && !peerRow.isNull("rpc_address");
+ (peerRow.getColumnDefinitions().contains("rpc_address") && !peerRow.isNull("rpc_address"))
+ || (peerRow.getColumnDefinitions().contains("native_transport_address")
+ && peerRow.getColumnDefinitions().contains("native_transport_port")
+ && !peerRow.isNull("native_transport_address")
+ && !peerRow.isNull("native_transport_port"));
}
if (EXTENDED_PEER_CHECK) {
@@ -984,6 +1007,9 @@ private String formatInvalidPeer(Row peerRow) {
formatMissingOrNullColumn(peerRow, "native_address", sb);
formatMissingOrNullColumn(peerRow, "native_port", sb);
} else {
+ formatMissingOrNullColumn(peerRow, "native_transport_address", sb);
+ formatMissingOrNullColumn(peerRow, "native_transport_port", sb);
+ formatMissingOrNullColumn(peerRow, "native_transport_port_ssl", sb);
formatMissingOrNullColumn(peerRow, "rpc_address", sb);
}
if (EXTENDED_PEER_CHECK) {
diff --git a/driver-core/src/main/java/com/datastax/driver/core/DataTypeClassNameParser.java b/driver-core/src/main/java/com/datastax/driver/core/DataTypeClassNameParser.java
index ef840356f3c..b4617b14f4e 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/DataTypeClassNameParser.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/DataTypeClassNameParser.java
@@ -123,7 +123,7 @@ static DataType parseOne(
TypeCodec.varchar()
.deserialize(Bytes.fromHexString("0x" + parser.readOne()), protocolVersion);
parser.skipBlankAndComma();
- Map rawFields = parser.getNameAndTypeParameters();
+ Map rawFields = parser.getNameAndTypeParameters(protocolVersion);
List fields = new ArrayList(rawFields.size());
for (Map.Entry entry : rawFields.entrySet())
fields.add(
@@ -199,7 +199,7 @@ static ParseResult parseWithComposite(
count--;
Parser collectionParser = new Parser(last, 0);
collectionParser.parseNextName(); // skips columnToCollectionType
- Map params = collectionParser.getCollectionsParameters();
+ Map params = collectionParser.getCollectionsParameters(protocolVersion);
for (Map.Entry entry : params.entrySet())
collections.put(entry.getKey(), parseOne(entry.getValue(), protocolVersion, codecRegistry));
}
@@ -320,18 +320,18 @@ public List getTypeParameters() {
"Syntax error parsing '%s' at char %d: unexpected end of string", str, idx));
}
- public Map getCollectionsParameters() {
+ public Map getCollectionsParameters(ProtocolVersion protocolVersion) {
if (isEOS()) return Collections.emptyMap();
if (str.charAt(idx) != '(') throw new IllegalStateException();
++idx; // skipping '('
- return getNameAndTypeParameters();
+ return getNameAndTypeParameters(protocolVersion);
}
// Must be at the start of the first parameter to read
- public Map getNameAndTypeParameters() {
+ public Map getNameAndTypeParameters(ProtocolVersion protocolVersion) {
// The order of the hashmap matters for UDT
Map map = new LinkedHashMap();
@@ -345,8 +345,7 @@ public Map getNameAndTypeParameters() {
String name = null;
try {
name =
- TypeCodec.varchar()
- .deserialize(Bytes.fromHexString("0x" + bbHex), ProtocolVersion.NEWEST_SUPPORTED);
+ TypeCodec.varchar().deserialize(Bytes.fromHexString("0x" + bbHex), protocolVersion);
} catch (NumberFormatException e) {
throwSyntaxError(e.getMessage());
}
diff --git a/driver-core/src/main/java/com/datastax/driver/core/DefaultEndPointFactory.java b/driver-core/src/main/java/com/datastax/driver/core/DefaultEndPointFactory.java
index c1378abf27d..b9caa3ea1a4 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/DefaultEndPointFactory.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/DefaultEndPointFactory.java
@@ -49,6 +49,16 @@ public EndPoint create(Row peersRow) {
InetSocketAddress translateAddress =
cluster.manager.translateAddress(new InetSocketAddress(nativeAddress, nativePort));
return new TranslatedAddressEndPoint(translateAddress);
+ } else if (peersRow.getColumnDefinitions().contains("native_transport_address")) {
+ InetAddress nativeAddress = peersRow.getInet("native_transport_address");
+ int nativePort = peersRow.getInt("native_transport_port");
+ if (cluster.getConfiguration().getProtocolOptions().getSSLOptions() != null
+ && !peersRow.isNull("native_transport_port_ssl")) {
+ nativePort = peersRow.getInt("native_transport_port_ssl");
+ }
+ InetSocketAddress translateAddress =
+ cluster.manager.translateAddress(new InetSocketAddress(nativeAddress, nativePort));
+ return new TranslatedAddressEndPoint(translateAddress);
} else {
InetAddress broadcastAddress = peersRow.getInet("peer");
InetAddress rpcAddress = peersRow.getInet("rpc_address");
diff --git a/driver-core/src/main/java/com/datastax/driver/core/ExceptionCode.java b/driver-core/src/main/java/com/datastax/driver/core/ExceptionCode.java
index b6c6d0af50f..4a745da5ff0 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/ExceptionCode.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/ExceptionCode.java
@@ -36,6 +36,8 @@ enum ExceptionCode {
READ_FAILURE(0x1300),
FUNCTION_FAILURE(0x1400),
WRITE_FAILURE(0x1500),
+ CDC_WRITE_FAILURE(0x1600),
+ CAS_WRITE_UNKNOWN(0x1700),
// 2xx: problem validating the request
SYNTAX_ERROR(0x2000),
diff --git a/driver-core/src/main/java/com/datastax/driver/core/Frame.java b/driver-core/src/main/java/com/datastax/driver/core/Frame.java
index 60b8846e551..ac5ee54727f 100644
--- a/driver-core/src/main/java/com/datastax/driver/core/Frame.java
+++ b/driver-core/src/main/java/com/datastax/driver/core/Frame.java
@@ -112,6 +112,7 @@ private static int readStreamId(ByteBuf fullFrame, ProtocolVersion version) {
case V3:
case V4:
case V5:
+ case V6:
return fullFrame.readShort();
default:
throw version.unsupported();
@@ -162,6 +163,7 @@ static int lengthFor(ProtocolVersion version) {
case V3:
case V4:
case V5:
+ case V6:
return 9;
default:
throw version.unsupported();
@@ -180,6 +182,7 @@ public void encodeInto(ByteBuf destination) {
case V3:
case V4:
case V5:
+ case V6:
destination.writeShort(streamId);
break;
default:
diff --git a/driver-core/src/main/java/com/datastax/driver/core/FramingFormatHandler.java b/driver-core/src/main/java/com/datastax/driver/core/FramingFormatHandler.java
new file mode 100644
index 00000000000..91459a1ab34
--- /dev/null
+++ b/driver-core/src/main/java/com/datastax/driver/core/FramingFormatHandler.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright DataStax, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.datastax.driver.core;
+
+import com.datastax.driver.core.Message.Response.Type;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.ChannelPipeline;
+import io.netty.handler.codec.MessageToMessageDecoder;
+import java.util.List;
+
+/**
+ * A handler to deal with different protocol framing formats.
+ *
+ *
This handler detects when a handshake is successful; then, if necessary, adapts the pipeline
+ * to the modern framing format introduced in protocol v5.
+ */
+public class FramingFormatHandler extends MessageToMessageDecoder {
+
+ private final Connection.Factory factory;
+
+ FramingFormatHandler(Connection.Factory factory) {
+ this.factory = factory;
+ }
+
+ @Override
+ protected void decode(ChannelHandlerContext ctx, Frame frame, List