diff --git a/.github/workflows/docker_build_and_test.yml b/.github/workflows/docker_build_and_test.yml
index 695c08672f..1d30aa85ea 100644
--- a/.github/workflows/docker_build_and_test.yml
+++ b/.github/workflows/docker_build_and_test.yml
@@ -23,6 +23,7 @@ on:
description: Docker image type to build and test
options:
- "jvm"
+ - "native"
kafka_url:
description: Kafka url to be used to build the docker image
required: true
diff --git a/.github/workflows/docker_official_image_build_and_test.yml b/.github/workflows/docker_official_image_build_and_test.yml
new file mode 100644
index 0000000000..a315cd0e0d
--- /dev/null
+++ b/.github/workflows/docker_official_image_build_and_test.yml
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: Docker Official Image Build Test
+
+on:
+ workflow_dispatch:
+ inputs:
+ image_type:
+ type: choice
+ description: Docker image type to build and test
+ options:
+ - "jvm"
+ kafka_version:
+ description: Kafka version for the docker official image. This should be >=3.7.0
+ required: true
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v3
+ with:
+ python-version: "3.10"
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install -r docker/requirements.txt
+ - name: Build image and run tests
+ working-directory: ./docker
+ run: |
+ python docker_official_image_build_test.py kafka/test -tag=test -type=${{ github.event.inputs.image_type }} -v=${{ github.event.inputs.kafka_version }}
+ - name: Run CVE scan
+ uses: aquasecurity/trivy-action@master
+ with:
+ image-ref: 'kafka/test:test'
+ format: 'table'
+ severity: 'CRITICAL,HIGH'
+ output: scan_report_${{ github.event.inputs.image_type }}.txt
+ exit-code: '1'
+ - name: Upload test report
+ if: always()
+ uses: actions/upload-artifact@v3
+ with:
+ name: report_${{ github.event.inputs.image_type }}.html
+ path: docker/test/report_${{ github.event.inputs.image_type }}.html
+ - name: Upload CVE scan report
+ if: always()
+ uses: actions/upload-artifact@v3
+ with:
+ name: scan_report_${{ github.event.inputs.image_type }}.txt
+ path: scan_report_${{ github.event.inputs.image_type }}.txt
diff --git a/.github/workflows/docker_promote.yml b/.github/workflows/docker_promote.yml
index 3449265877..04872f9d59 100644
--- a/.github/workflows/docker_promote.yml
+++ b/.github/workflows/docker_promote.yml
@@ -19,10 +19,10 @@ on:
workflow_dispatch:
inputs:
rc_docker_image:
- description: RC docker image that needs to be promoted (Example:- apache/kafka:3.6.0-rc0)
+ description: RC docker image that needs to be promoted (Example:- apache/kafka:3.8.0-rc0 (OR) apache/kafka-native:3.8.0-rc0)
required: true
promoted_docker_image:
- description: Docker image name of the promoted image (Example:- apache/kafka:3.6.0)
+ description: Docker image name of the promoted image (Example:- apache/kafka:3.8.0 (OR) apache/kafka-native:3.8.0)
required: true
jobs:
diff --git a/.github/workflows/docker_rc_release.yml b/.github/workflows/docker_rc_release.yml
index c7082dcac9..22dd924b51 100644
--- a/.github/workflows/docker_rc_release.yml
+++ b/.github/workflows/docker_rc_release.yml
@@ -23,8 +23,9 @@ on:
description: Docker image type to be built and pushed
options:
- "jvm"
+ - "native"
rc_docker_image:
- description: RC docker image that needs to be built and pushed to Dockerhub (Example:- apache/kafka:3.6.0-rc0)
+ description: RC docker image that needs to be built and pushed to Dockerhub (Example:- apache/kafka:3.8.0-rc0 (OR) apache/kafka-native:3.8.0-rc0)
required: true
kafka_url:
description: Kafka url to be used to build the docker image
diff --git a/.github/workflows/docker_scan.yml b/.github/workflows/docker_scan.yml
index 7d9ecfe619..bdc8eafbe6 100644
--- a/.github/workflows/docker_scan.yml
+++ b/.github/workflows/docker_scan.yml
@@ -21,6 +21,7 @@ on:
workflow_dispatch:
jobs:
scan_jvm:
+ if: github.repository == 'apache/kafka'
runs-on: ubuntu-latest
strategy:
matrix:
diff --git a/.github/workflows/prepare_docker_official_image_source.yml b/.github/workflows/prepare_docker_official_image_source.yml
new file mode 100644
index 0000000000..4549104583
--- /dev/null
+++ b/.github/workflows/prepare_docker_official_image_source.yml
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: Docker Prepare Docker Official Image Source
+
+on:
+ workflow_dispatch:
+ inputs:
+ image_type:
+ type: choice
+ description: Docker image type to build and test
+ options:
+ - "jvm"
+ kafka_version:
+ description: Kafka version for the docker official image. This should be >=3.7.0
+ required: true
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v3
+ with:
+ python-version: "3.10"
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install -r docker/requirements.txt
+ - name: Build Docker Official Image Artifact
+ working-directory: ./docker
+ run: |
+ python prepare_docker_official_image_source.py -type=${{ github.event.inputs.image_type }} -v=${{ github.event.inputs.kafka_version }}
+ - name: Upload Docker Official Image Artifact
+ if: success()
+ uses: actions/upload-artifact@v4
+ with:
+ name: ${{ github.event.inputs.kafka_version }}
+ path: docker/docker_official_images/${{ github.event.inputs.kafka_version }}
diff --git a/.gitignore b/.gitignore
index 376c781ad3..015df8ead8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,6 +34,8 @@ Vagrantfile.local
config/server-*
config/zookeeper-*
+gradle/wrapper/*.jar
+gradlew.bat
results
tests/results
@@ -58,4 +60,5 @@ jmh-benchmarks/src/main/generated
storage/kafka-tiered-storage/
docker/test/report_*.html
+kafka.Kafka
__pycache__
diff --git a/Jenkinsfile b/Jenkinsfile
index 50b7f6a298..0a795637ff 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -36,6 +36,12 @@ def doTest(env, target = "test") {
junit '**/build/test-results/**/TEST-*.xml'
}
+def runTestOnDevBranch(env) {
+ if (!isChangeRequest(env)) {
+ doTest(env)
+ }
+}
+
def doStreamsArchetype() {
echo 'Verify that Kafka Streams archetype compiles'
@@ -132,7 +138,7 @@ pipeline {
}
steps {
doValidation()
- doTest(env)
+ runTestOnDevBranch(env)
echo 'Skipping Kafka Streams archetype test for Java 11'
}
}
@@ -151,7 +157,7 @@ pipeline {
}
steps {
doValidation()
- doTest(env)
+ runTestOnDevBranch(env)
echo 'Skipping Kafka Streams archetype test for Java 17'
}
}
diff --git a/LICENSE-binary b/LICENSE-binary
index a395f15e42..01a2b03025 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -60,12 +60,12 @@ reload4j-1.2.25
rocksdbjni-7.9.2
scala-collection-compat_2.12-2.10.0
scala-collection-compat_2.13-2.10.0
-scala-library-2.12.18
-scala-library-2.13.12
+scala-library-2.12.19
+scala-library-2.13.14
scala-logging_2.12-3.9.4
scala-logging_2.13-3.9.4
-scala-reflect-2.12.18
-scala-reflect-2.13.12
+scala-reflect-2.12.19
+scala-reflect-2.13.14
scala-java8-compat_2.12-1.0.2
scala-java8-compat_2.13-1.0.2
snappy-java-1.1.10.5
diff --git a/bin/kafka-run-class.sh b/bin/kafka-run-class.sh
index 351f1932b7..ef731a0615 100755
--- a/bin/kafka-run-class.sh
+++ b/bin/kafka-run-class.sh
@@ -49,7 +49,7 @@ should_include_file() {
base_dir=$(dirname $0)/..
if [ -z "$SCALA_VERSION" ]; then
- SCALA_VERSION=2.13.12
+ SCALA_VERSION=2.13.14
if [[ -f "$base_dir/gradle.properties" ]]; then
SCALA_VERSION=`grep "^scalaVersion=" "$base_dir/gradle.properties" | cut -d= -f 2`
fi
@@ -208,7 +208,7 @@ fi
# JMX settings
if [ -z "$KAFKA_JMX_OPTS" ]; then
- KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
+ KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote=true -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
fi
# JMX port to use
@@ -353,9 +353,16 @@ CLASSPATH=${CLASSPATH#:}
# If Cygwin is detected, classpath is converted to Windows format.
(( WINDOWS_OS_FORMAT )) && CLASSPATH=$(cygpath --path --mixed "${CLASSPATH}")
-# Launch mode
-if [ "x$DAEMON_MODE" = "xtrue" ]; then
- nohup "$JAVA" $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_CMD_OPTS $KAFKA_JDK_COMPATIBILITY_OPTS -cp "$CLASSPATH" $KAFKA_OPTS "$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null &
+# If KAFKA_MODE=native, it will bring up Kafka in the native mode.
+# It expects the Kafka executable binary to be present at $base_dir/kafka.Kafka.
+# This is specifically used to run system tests on native Kafka - by bringing up Kafka in the native mode.
+if [[ "x$KAFKA_MODE" == "xnative" ]] && [[ "$*" == *"kafka.Kafka"* ]]; then
+ exec $base_dir/kafka.Kafka start --config "$2" $KAFKA_LOG4J_CMD_OPTS $KAFKA_JMX_OPTS $KAFKA_OPTS
else
- exec "$JAVA" $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_CMD_OPTS $KAFKA_JDK_COMPATIBILITY_OPTS -cp "$CLASSPATH" $KAFKA_OPTS "$@"
+ # Launch mode
+ if [ "x$DAEMON_MODE" = "xtrue" ]; then
+ nohup "$JAVA" $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_CMD_OPTS $KAFKA_JDK_COMPATIBILITY_OPTS -cp "$CLASSPATH" $KAFKA_OPTS "$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null &
+ else
+ exec "$JAVA" $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_CMD_OPTS $KAFKA_JDK_COMPATIBILITY_OPTS -cp "$CLASSPATH" $KAFKA_OPTS "$@"
+ fi
fi
diff --git a/bin/windows/kafka-run-class.bat b/bin/windows/kafka-run-class.bat
index 616dabd70f..e42245efc0 100755
--- a/bin/windows/kafka-run-class.bat
+++ b/bin/windows/kafka-run-class.bat
@@ -27,7 +27,7 @@ set BASE_DIR=%CD%
popd
IF ["%SCALA_VERSION%"] EQU [""] (
- set SCALA_VERSION=2.13.12
+ set SCALA_VERSION=2.13.14
)
IF ["%SCALA_BINARY_VERSION%"] EQU [""] (
diff --git a/build.gradle b/build.gradle
index 030b4f1f2a..72622a100e 100644
--- a/build.gradle
+++ b/build.gradle
@@ -114,6 +114,13 @@ ext {
repo = file("$rootDir/.git").isDirectory() ? Grgit.open(currentDir: project.getRootDir()) : null
commitId = determineCommitId()
+
+ addParametersForTests = { name, options ->
+ // -parameters generates arguments with parameter names in TestInfo#getDisplayName.
+ // ref: https://github.com/junit-team/junit5/blob/4c0dddad1b96d4a20e92a2cd583954643ac56ac0/junit-jupiter-params/src/main/java/org/junit/jupiter/params/ParameterizedTest.java#L161-L164
+ if (name == "compileTestJava" || name == "compileTestScala")
+ options.compilerArgs << "-parameters"
+ }
}
allprojects {
@@ -161,6 +168,21 @@ allprojects {
}
}
task printAllDependencies(type: DependencyReportTask) {}
+
+ tasks.withType(Javadoc) {
+ options.charSet = 'UTF-8'
+ options.docEncoding = 'UTF-8'
+ options.encoding = 'UTF-8'
+ options.memberLevel = JavadocMemberLevel.PUBLIC // Document only public members/API
+ // Turn off doclint for now, see https://blog.joda.org/2014/02/turning-off-doclint-in-jdk-8-javadoc.html for rationale
+ options.addStringOption('Xdoclint:none', '-quiet')
+
+ // The URL structure was changed to include the locale after Java 8
+ if (JavaVersion.current().isJava11Compatible())
+ options.links "https://docs.oracle.com/en/java/javase/${JavaVersion.current().majorVersion}/docs/api/"
+ else
+ options.links "https://docs.oracle.com/javase/8/docs/api/"
+ }
}
def determineCommitId() {
@@ -225,7 +247,7 @@ subprojects {
options.encoding = 'UTF-8'
options.compilerArgs << "-Xlint:all"
// temporary exclusions until all the warnings are fixed
- if (!project.path.startsWith(":connect"))
+ if (!project.path.startsWith(":connect") && !project.path.startsWith(":storage"))
options.compilerArgs << "-Xlint:-rawtypes"
options.compilerArgs << "-Xlint:-serial"
options.compilerArgs << "-Xlint:-try"
@@ -238,12 +260,16 @@ subprojects {
// --source/--target 8 is deprecated in Java 20, suppress warning until Java 8 support is dropped in Kafka 4.0
if (JavaVersion.current().isCompatibleWith(JavaVersion.VERSION_20))
options.compilerArgs << "-Xlint:-options"
+
+ addParametersForTests(name, options)
}
- // We should only set this if Java version is < 9 (--release is recommended for >= 9), but the Scala plugin for IntelliJ sets
- // `-target` incorrectly if this is unset
- sourceCompatibility = minJavaVersion
- targetCompatibility = minJavaVersion
+ java {
+ // We should only set this if Java version is < 9 (--release is recommended for >= 9), but the Scala plugin for IntelliJ sets
+ // `-target` incorrectly if this is unset
+ sourceCompatibility = minJavaVersion
+ targetCompatibility = minJavaVersion
+ }
if (shouldPublish) {
@@ -265,6 +291,24 @@ subprojects {
} else {
apply plugin: 'com.github.johnrengelman.shadow'
project.shadow.component(mavenJava)
+
+ // Fix for avoiding inclusion of runtime dependencies marked as 'shadow' in MANIFEST Class-Path.
+ // https://github.com/johnrengelman/shadow/issues/324
+ afterEvaluate {
+ pom.withXml { xml ->
+ if (xml.asNode().get('dependencies') == null) {
+ xml.asNode().appendNode('dependencies')
+ }
+ def dependenciesNode = xml.asNode().get('dependencies').get(0)
+ project.configurations.shadowed.allDependencies.each {
+ def dependencyNode = dependenciesNode.appendNode('dependency')
+ dependencyNode.appendNode('groupId', it.group)
+ dependencyNode.appendNode('artifactId', it.name)
+ dependencyNode.appendNode('version', it.version)
+ dependencyNode.appendNode('scope', 'runtime')
+ }
+ }
+ }
}
afterEvaluate {
@@ -274,7 +318,7 @@ subprojects {
artifact task
}
- artifactId = archivesBaseName
+ artifactId = base.archivesName.get()
pom {
name = 'Apache Kafka'
url = 'https://kafka.apache.org'
@@ -562,21 +606,6 @@ subprojects {
task docsJar(dependsOn: javadocJar)
- javadoc {
- options.charSet = 'UTF-8'
- options.docEncoding = 'UTF-8'
- options.encoding = 'UTF-8'
- options.memberLevel = JavadocMemberLevel.PUBLIC // Document only public members/API
- // Turn off doclint for now, see https://blog.joda.org/2014/02/turning-off-doclint-in-jdk-8-javadoc.html for rationale
- options.addStringOption('Xdoclint:none', '-quiet')
-
- // The URL structure was changed to include the locale after Java 8
- if (JavaVersion.current().isJava11Compatible())
- options.links "https://docs.oracle.com/en/java/javase/${JavaVersion.current().majorVersion}/docs/api/"
- else
- options.links "https://docs.oracle.com/javase/8/docs/api/"
- }
-
task systemTestLibs(dependsOn: jar)
if (!sourceSets.test.allSource.isEmpty()) {
@@ -619,7 +648,7 @@ subprojects {
scalaCompileOptions.keepAliveMode = userKeepAliveMode
scalaCompileOptions.additionalParameters = [
- "-deprecation",
+ "-deprecation:false",
"-unchecked",
"-encoding", "utf8",
"-Xlog-reflective-calls",
@@ -676,6 +705,8 @@ subprojects {
if (versions.baseScala == "2.13" || JavaVersion.current().isJava9Compatible())
scalaCompileOptions.additionalParameters += ["-release", String.valueOf(minJavaVersion)]
+ addParametersForTests(name, options)
+
configure(scalaCompileOptions.forkOptions) {
memoryMaximumSize = defaultMaxHeapSize
jvmArgs = defaultJvmArgs
@@ -707,8 +738,8 @@ subprojects {
}
test.dependsOn('spotbugsMain')
- tasks.withType(com.github.spotbugs.snom.SpotBugsTask) {
- reports {
+ tasks.withType(com.github.spotbugs.snom.SpotBugsTask).configureEach {
+ reports.configure {
// Continue supporting `xmlFindBugsReport` for compatibility
xml.enabled(project.hasProperty('xmlSpotBugsReport') || project.hasProperty('xmlFindBugsReport'))
html.enabled(!project.hasProperty('xmlSpotBugsReport') && !project.hasProperty('xmlFindBugsReport'))
@@ -743,13 +774,14 @@ subprojects {
}
if (userEnableTestCoverage) {
- def coverageGen = it.path == ':core' ? 'reportScoverage' : 'jacocoTestReport'
+ def coverageGen = it.path == ':core' ? 'reportTestScoverage' : 'jacocoTestReport'
tasks.register('reportCoverage').configure { dependsOn(coverageGen) }
}
dependencyCheck {
suppressionFile = "$rootDir/gradle/resources/dependencycheck-suppressions.xml"
skipProjects = [ ":jmh-benchmarks", ":trogdor" ]
+ skipConfigurations = [ "zinc" ]
}
}
@@ -815,7 +847,9 @@ tasks.create(name: "jarConnect", dependsOn: connectPkgs.collect { it + ":jar" })
tasks.create(name: "testConnect", dependsOn: connectPkgs.collect { it + ":test" }) {}
project(':server') {
- archivesBaseName = "kafka-server"
+ base {
+ archivesName = "kafka-server"
+ }
dependencies {
implementation project(':clients')
@@ -824,6 +858,7 @@ project(':server') {
implementation project(':transaction-coordinator')
implementation project(':raft')
implementation libs.metrics
+ implementation libs.jacksonDatabind
implementation libs.slf4jApi
@@ -886,7 +921,10 @@ project(':core') {
}
if (userEnableTestCoverage)
apply plugin: "org.scoverage"
- archivesBaseName = "kafka_${versions.baseScala}"
+
+ base {
+ archivesName = "kafka_${versions.baseScala}"
+ }
configurations {
generator
@@ -1250,7 +1288,9 @@ project(':core') {
}
project(':metadata') {
- archivesBaseName = "kafka-metadata"
+ base {
+ archivesName = "kafka-metadata"
+ }
configurations {
generator
@@ -1327,7 +1367,9 @@ project(':metadata') {
}
project(':group-coordinator') {
- archivesBaseName = "kafka-group-coordinator"
+ base {
+ archivesName = "kafka-group-coordinator"
+ }
configurations {
generator
@@ -1338,6 +1380,8 @@ project(':group-coordinator') {
implementation project(':clients')
implementation project(':metadata')
implementation project(':storage')
+ implementation libs.jacksonDatabind
+ implementation libs.jacksonJDK8Datatypes
implementation libs.slf4jApi
implementation libs.metrics
@@ -1368,13 +1412,17 @@ project(':group-coordinator') {
enabled = false
}
+ checkstyle {
+ configProperties = checkstyleConfigProperties("import-control-group-coordinator.xml")
+ }
+
task processMessages(type:JavaExec) {
mainClass = "org.apache.kafka.message.MessageGenerator"
classpath = configurations.generator
args = [ "-p", "org.apache.kafka.coordinator.group.generated",
"-o", "src/generated/java/org/apache/kafka/coordinator/group/generated",
"-i", "src/main/resources/common/message",
- "-m", "MessageDataGenerator"
+ "-m", "MessageDataGenerator", "JsonConverterGenerator"
]
inputs.dir("src/main/resources/common/message")
.withPropertyName("messages")
@@ -1388,7 +1436,9 @@ project(':group-coordinator') {
}
project(':transaction-coordinator') {
- archivesBaseName = "kafka-transaction-coordinator"
+ base {
+ archivesName = "kafka-transaction-coordinator"
+ }
sourceSets {
main {
@@ -1409,7 +1459,9 @@ project(':transaction-coordinator') {
}
project(':examples') {
- archivesBaseName = "kafka-examples"
+ base {
+ archivesName = "kafka-examples"
+ }
dependencies {
implementation project(':clients')
@@ -1439,10 +1491,13 @@ project(':generator') {
}
project(':clients') {
- archivesBaseName = "kafka-clients"
+ base {
+ archivesName = "kafka-clients"
+ }
configurations {
generator
+ shadowed
}
dependencies {
@@ -1453,10 +1508,10 @@ project(':clients') {
implementation libs.opentelemetryProto
// libraries which should be added as runtime dependencies in generated pom.xml should be defined here:
- shadow libs.zstd
- shadow libs.lz4
- shadow libs.snappy
- shadow libs.slf4jApi
+ shadowed libs.zstd
+ shadowed libs.lz4
+ shadowed libs.snappy
+ shadowed libs.slf4jApi
compileOnly libs.jacksonDatabind // for SASL/OAUTHBEARER bearer token parsing
compileOnly libs.jacksonJDK8Datatypes
@@ -1509,10 +1564,9 @@ project(':clients') {
// dependencies excluded from the final jar, since they are declared as runtime dependencies
dependencies {
- exclude(dependency(libs.snappy))
- exclude(dependency(libs.zstd))
- exclude(dependency(libs.lz4))
- exclude(dependency(libs.slf4jApi))
+ project.configurations.shadowed.allDependencies.each {
+ exclude(dependency(it.group + ':' + it.name))
+ }
// exclude proto files from the jar
exclude "**/opentelemetry/proto/**/*.proto"
exclude "**/google/protobuf/*.proto"
@@ -1614,7 +1668,9 @@ project(':clients') {
}
project(':raft') {
- archivesBaseName = "kafka-raft"
+ base {
+ archivesName = "kafka-raft"
+ }
configurations {
generator
@@ -1710,7 +1766,9 @@ project(':raft') {
}
project(':server-common') {
- archivesBaseName = "kafka-server-common"
+ base {
+ archivesName = "kafka-server-common"
+ }
dependencies {
api project(':clients')
@@ -1770,7 +1828,9 @@ project(':server-common') {
}
project(':storage:storage-api') {
- archivesBaseName = "kafka-storage-api"
+ base {
+ archivesName = "kafka-storage-api"
+ }
dependencies {
implementation project(':clients')
@@ -1838,7 +1898,9 @@ project(':storage:storage-api') {
}
project(':storage') {
- archivesBaseName = "kafka-storage"
+ base {
+ archivesName = "kafka-storage"
+ }
configurations {
generator
@@ -1857,6 +1919,7 @@ project(':storage') {
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':core')
testImplementation project(':core').sourceSets.test.output
+ testImplementation project(':server')
testImplementation project(':server-common')
testImplementation project(':server-common').sourceSets.test.output
testImplementation libs.hamcrest
@@ -2019,7 +2082,9 @@ project(':s3stream') {
}
project(':tools:tools-api') {
- archivesBaseName = "kafka-tools-api"
+ base {
+ archivesName = "kafka-tools-api"
+ }
dependencies {
implementation project(':clients')
@@ -2074,8 +2139,10 @@ project(':tools:tools-api') {
}
project(':tools') {
- archivesBaseName = "kafka-tools"
- // TODO: compare the kafka and fix the incorrected exclude
+ base {
+ archivesName = "kafka-tools"
+ }
+
dependencies {
implementation (project(':clients')){
exclude group: 'org.slf4j', module: '*'
@@ -2138,6 +2205,7 @@ project(':tools') {
testImplementation project(':connect:runtime')
testImplementation project(':connect:runtime').sourceSets.test.output
testImplementation project(':storage:storage-api').sourceSets.main.output
+ testImplementation project(':group-coordinator')
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
@@ -2170,7 +2238,9 @@ project(':tools') {
}
project(':trogdor') {
- archivesBaseName = "trogdor"
+ base {
+ archivesName = "trogdor"
+ }
dependencies {
implementation project(':clients')
@@ -2220,7 +2290,9 @@ project(':trogdor') {
}
project(':shell') {
- archivesBaseName = "kafka-shell"
+ base {
+ archivesName = "kafka-shell"
+ }
dependencies {
implementation libs.argparse4j
@@ -2272,7 +2344,10 @@ project(':shell') {
}
project(':streams') {
- archivesBaseName = "kafka-streams"
+ base {
+ archivesName = "kafka-streams"
+ }
+
ext.buildStreamsVersionFileName = "kafka-streams-version.properties"
configurations {
@@ -2292,12 +2367,16 @@ project(':streams') {
testCompileOnly project(':streams:test-utils')
testImplementation project(':clients').sourceSets.test.output
+ testImplementation project(':server')
testImplementation project(':core')
testImplementation project(':tools')
testImplementation project(':core').sourceSets.test.output
testImplementation project(':storage')
+ testImplementation project(':group-coordinator')
+ testImplementation project(':transaction-coordinator')
testImplementation project(':server-common')
testImplementation project(':server-common').sourceSets.test.output
+ testImplementation project(':server')
testImplementation libs.log4j
testImplementation libs.junitJupiter
testImplementation libs.junitVintageEngine
@@ -2429,7 +2508,11 @@ project(':streams') {
project(':streams:streams-scala') {
apply plugin: 'scala'
- archivesBaseName = "kafka-streams-scala_${versions.baseScala}"
+
+ base {
+ archivesName = "kafka-streams-scala_${versions.baseScala}"
+ }
+
dependencies {
api project(':streams')
@@ -2442,6 +2525,7 @@ project(':streams:streams-scala') {
// So we make sure to not include it in the dependencies.
api libs.scalaCollectionCompat
}
+ testImplementation project(':group-coordinator')
testImplementation project(':core')
testImplementation project(':core').sourceSets.test.output
testImplementation project(':server-common').sourceSets.test.output
@@ -2490,7 +2574,9 @@ project(':streams:streams-scala') {
}
project(':streams:test-utils') {
- archivesBaseName = "kafka-streams-test-utils"
+ base {
+ archivesName = "kafka-streams-test-utils"
+ }
dependencies {
api project(':streams')
@@ -2525,7 +2611,9 @@ project(':streams:test-utils') {
}
project(':streams:examples') {
- archivesBaseName = "kafka-streams-examples"
+ base {
+ archivesName = "kafka-streams-examples"
+ }
dependencies {
// this dependency should be removed after we unify data API
@@ -2562,7 +2650,9 @@ project(':streams:examples') {
}
project(':streams:upgrade-system-tests-0100') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-0100"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-0100"
+ }
dependencies {
testImplementation(libs.kafkaStreams_0100) {
@@ -2578,7 +2668,9 @@ project(':streams:upgrade-system-tests-0100') {
}
project(':streams:upgrade-system-tests-0101') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-0101"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-0101"
+ }
dependencies {
testImplementation(libs.kafkaStreams_0101) {
@@ -2594,7 +2686,9 @@ project(':streams:upgrade-system-tests-0101') {
}
project(':streams:upgrade-system-tests-0102') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-0102"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-0102"
+ }
dependencies {
testImplementation libs.kafkaStreams_0102
@@ -2607,7 +2701,9 @@ project(':streams:upgrade-system-tests-0102') {
}
project(':streams:upgrade-system-tests-0110') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-0110"
+ base{
+ archivesName = "kafka-streams-upgrade-system-tests-0110"
+ }
dependencies {
testImplementation libs.kafkaStreams_0110
@@ -2620,7 +2716,9 @@ project(':streams:upgrade-system-tests-0110') {
}
project(':streams:upgrade-system-tests-10') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-10"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-10"
+ }
dependencies {
testImplementation libs.kafkaStreams_10
@@ -2633,7 +2731,9 @@ project(':streams:upgrade-system-tests-10') {
}
project(':streams:upgrade-system-tests-11') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-11"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-11"
+ }
dependencies {
testImplementation libs.kafkaStreams_11
@@ -2646,7 +2746,9 @@ project(':streams:upgrade-system-tests-11') {
}
project(':streams:upgrade-system-tests-20') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-20"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-20"
+ }
dependencies {
testImplementation libs.kafkaStreams_20
@@ -2659,7 +2761,9 @@ project(':streams:upgrade-system-tests-20') {
}
project(':streams:upgrade-system-tests-21') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-21"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-21"
+ }
dependencies {
testImplementation libs.kafkaStreams_21
@@ -2672,7 +2776,9 @@ project(':streams:upgrade-system-tests-21') {
}
project(':streams:upgrade-system-tests-22') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-22"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-22"
+ }
dependencies {
testImplementation libs.kafkaStreams_22
@@ -2685,7 +2791,9 @@ project(':streams:upgrade-system-tests-22') {
}
project(':streams:upgrade-system-tests-23') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-23"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-23"
+ }
dependencies {
testImplementation libs.kafkaStreams_23
@@ -2698,7 +2806,9 @@ project(':streams:upgrade-system-tests-23') {
}
project(':streams:upgrade-system-tests-24') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-24"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-24"
+ }
dependencies {
testImplementation libs.kafkaStreams_24
@@ -2711,7 +2821,9 @@ project(':streams:upgrade-system-tests-24') {
}
project(':streams:upgrade-system-tests-25') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-25"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-25"
+ }
dependencies {
testImplementation libs.kafkaStreams_25
@@ -2724,7 +2836,9 @@ project(':streams:upgrade-system-tests-25') {
}
project(':streams:upgrade-system-tests-26') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-26"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-26"
+ }
dependencies {
testImplementation libs.kafkaStreams_26
@@ -2737,7 +2851,9 @@ project(':streams:upgrade-system-tests-26') {
}
project(':streams:upgrade-system-tests-27') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-27"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-27"
+ }
dependencies {
testImplementation libs.kafkaStreams_27
@@ -2750,7 +2866,9 @@ project(':streams:upgrade-system-tests-27') {
}
project(':streams:upgrade-system-tests-28') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-28"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-28"
+ }
dependencies {
testImplementation libs.kafkaStreams_28
@@ -2763,7 +2881,9 @@ project(':streams:upgrade-system-tests-28') {
}
project(':streams:upgrade-system-tests-30') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-30"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-30"
+ }
dependencies {
testImplementation libs.kafkaStreams_30
@@ -2776,7 +2896,9 @@ project(':streams:upgrade-system-tests-30') {
}
project(':streams:upgrade-system-tests-31') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-31"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-31"
+ }
dependencies {
testImplementation libs.kafkaStreams_31
@@ -2789,7 +2911,9 @@ project(':streams:upgrade-system-tests-31') {
}
project(':streams:upgrade-system-tests-32') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-32"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-32"
+ }
dependencies {
testImplementation libs.kafkaStreams_32
@@ -2802,7 +2926,9 @@ project(':streams:upgrade-system-tests-32') {
}
project(':streams:upgrade-system-tests-33') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-33"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-33"
+ }
dependencies {
testImplementation libs.kafkaStreams_33
@@ -2815,7 +2941,9 @@ project(':streams:upgrade-system-tests-33') {
}
project(':streams:upgrade-system-tests-34') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-34"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-34"
+ }
dependencies {
testImplementation libs.kafkaStreams_34
@@ -2828,7 +2956,9 @@ project(':streams:upgrade-system-tests-34') {
}
project(':streams:upgrade-system-tests-35') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-35"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-35"
+ }
dependencies {
testImplementation libs.kafkaStreams_35
@@ -2841,7 +2971,9 @@ project(':streams:upgrade-system-tests-35') {
}
project(':streams:upgrade-system-tests-36') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-36"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-36"
+ }
dependencies {
testImplementation libs.kafkaStreams_36
@@ -2854,7 +2986,9 @@ project(':streams:upgrade-system-tests-36') {
}
project(':streams:upgrade-system-tests-37') {
- archivesBaseName = "kafka-streams-upgrade-system-tests-37"
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-37"
+ }
dependencies {
testImplementation libs.kafkaStreams_37
@@ -2880,6 +3014,8 @@ project(':jmh-benchmarks') {
exclude group: 'net.sf.jopt-simple', module: 'jopt-simple'
}
implementation project(':server-common')
+ implementation project(':server')
+ implementation project(':raft')
implementation project(':clients')
implementation project(':group-coordinator')
implementation project(':metadata')
@@ -2937,7 +3073,9 @@ project(':jmh-benchmarks') {
}
project(':log4j-appender') {
- archivesBaseName = "kafka-log4j-appender"
+ base {
+ archivesName = "kafka-log4j-appender"
+ }
dependencies {
implementation project(':clients')
@@ -2956,7 +3094,9 @@ project(':log4j-appender') {
}
project(':connect:api') {
- archivesBaseName = "connect-api"
+ base {
+ archivesName = "connect-api"
+ }
dependencies {
api project(':clients')
@@ -2991,7 +3131,9 @@ project(':connect:api') {
}
project(':connect:transforms') {
- archivesBaseName = "connect-transforms"
+ base {
+ archivesName = "connect-transforms"
+ }
dependencies {
api project(':connect:api')
@@ -3027,7 +3169,9 @@ project(':connect:transforms') {
}
project(':connect:json') {
- archivesBaseName = "connect-json"
+ base {
+ archivesName = "connect-json"
+ }
dependencies {
api project(':connect:api')
@@ -3037,7 +3181,7 @@ project(':connect:json') {
api libs.jacksonAfterburner
implementation libs.slf4jApi
-
+
testImplementation libs.junitJupiter
testRuntimeOnly libs.slf4jlog4j
@@ -3071,7 +3215,9 @@ project(':connect:runtime') {
swagger
}
- archivesBaseName = "connect-runtime"
+ base {
+ archivesName = "connect-runtime"
+ }
dependencies {
// connect-runtime is used in tests, use `api` for modules below for backwards compatibility even though
@@ -3107,6 +3253,8 @@ project(':connect:runtime') {
testImplementation project(':metadata')
testImplementation project(':core').sourceSets.test.output
testImplementation project(':server-common')
+ testImplementation project(':server')
+ testImplementation project(':group-coordinator')
testImplementation project(':storage')
testImplementation project(':connect:test-plugins')
@@ -3209,7 +3357,9 @@ project(':connect:runtime') {
}
project(':connect:file') {
- archivesBaseName = "connect-file"
+ base {
+ archivesName = "connect-file"
+ }
dependencies {
implementation project(':connect:api')
@@ -3249,7 +3399,9 @@ project(':connect:file') {
}
project(':connect:basic-auth-extension') {
- archivesBaseName = "connect-basic-auth-extension"
+ base {
+ archivesName = "connect-basic-auth-extension"
+ }
dependencies {
implementation project(':connect:api')
@@ -3289,7 +3441,9 @@ project(':connect:basic-auth-extension') {
}
project(':connect:mirror') {
- archivesBaseName = "connect-mirror"
+ base {
+ archivesName = "connect-mirror"
+ }
dependencies {
implementation project(':connect:api')
@@ -3319,6 +3473,7 @@ project(':connect:mirror') {
testImplementation project(':connect:runtime').sourceSets.test.output
testImplementation project(':core')
testImplementation project(':core').sourceSets.test.output
+ testImplementation project(':server')
testRuntimeOnly project(':connect:runtime')
testRuntimeOnly libs.slf4jlog4j
@@ -3376,7 +3531,9 @@ project(':connect:mirror') {
}
project(':connect:mirror-client') {
- archivesBaseName = "connect-mirror-client"
+ base {
+ archivesName = "connect-mirror-client"
+ }
dependencies {
implementation project(':clients')
@@ -3411,7 +3568,9 @@ project(':connect:mirror-client') {
}
project(':connect:test-plugins') {
- archivesBaseName = "connect-test-plugins"
+ base {
+ archivesName = "connect-test-plugins"
+ }
dependencies {
api project(':connect:api')
@@ -3428,16 +3587,4 @@ task aggregatedJavadoc(type: Javadoc, dependsOn: compileJava) {
classpath = files(projectsWithJavadoc.collect { it.sourceSets.main.compileClasspath })
includes = projectsWithJavadoc.collectMany { it.javadoc.getIncludes() }
excludes = projectsWithJavadoc.collectMany { it.javadoc.getExcludes() }
-
- options.charSet = 'UTF-8'
- options.docEncoding = 'UTF-8'
- options.encoding = 'UTF-8'
- // Turn off doclint for now, see https://blog.joda.org/2014/02/turning-off-doclint-in-jdk-8-javadoc.html for rationale
- options.addStringOption('Xdoclint:none', '-quiet')
-
- // The URL structure was changed to include the locale after Java 8
- if (JavaVersion.current().isJava11Compatible())
- options.links "https://docs.oracle.com/en/java/javase/${JavaVersion.current().majorVersion}/docs/api/"
- else
- options.links "https://docs.oracle.com/javase/8/docs/api/"
}
diff --git a/checkstyle/import-control-core.xml b/checkstyle/import-control-core.xml
index 782d2fe461..ed6c53a322 100644
--- a/checkstyle/import-control-core.xml
+++ b/checkstyle/import-control-core.xml
@@ -37,7 +37,7 @@
-
+
@@ -57,6 +57,7 @@
+
@@ -106,6 +107,8 @@
+
+
@@ -115,4 +118,17 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/checkstyle/import-control-group-coordinator.xml b/checkstyle/import-control-group-coordinator.xml
new file mode 100644
index 0000000000..51a94efb7f
--- /dev/null
+++ b/checkstyle/import-control-group-coordinator.xml
@@ -0,0 +1,82 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/checkstyle/import-control-jmh-benchmarks.xml b/checkstyle/import-control-jmh-benchmarks.xml
index 1160e3f67d..ddbf22e210 100644
--- a/checkstyle/import-control-jmh-benchmarks.xml
+++ b/checkstyle/import-control-jmh-benchmarks.xml
@@ -47,6 +47,7 @@
+
@@ -55,6 +56,7 @@
+
diff --git a/checkstyle/import-control-metadata.xml b/checkstyle/import-control-metadata.xml
index 897932492b..9e549776af 100644
--- a/checkstyle/import-control-metadata.xml
+++ b/checkstyle/import-control-metadata.xml
@@ -119,6 +119,7 @@
+
@@ -172,18 +173,22 @@
-
-
-
+
+
+
+
+
+
+
diff --git a/checkstyle/import-control-server-common.xml b/checkstyle/import-control-server-common.xml
index 2c5c652e97..24a9cd3440 100644
--- a/checkstyle/import-control-server-common.xml
+++ b/checkstyle/import-control-server-common.xml
@@ -105,15 +105,19 @@
+
-
+
+
+
+
diff --git a/checkstyle/import-control-server.xml b/checkstyle/import-control-server.xml
index 0fad8c1627..02bf92088f 100644
--- a/checkstyle/import-control-server.xml
+++ b/checkstyle/import-control-server.xml
@@ -46,6 +46,8 @@
+
+
@@ -82,6 +84,9 @@
+
+
+
diff --git a/checkstyle/import-control-storage.xml b/checkstyle/import-control-storage.xml
index d8c5e287d3..623f2c6f45 100644
--- a/checkstyle/import-control-storage.xml
+++ b/checkstyle/import-control-storage.xml
@@ -53,6 +53,7 @@
+
@@ -65,6 +66,7 @@
+
@@ -73,9 +75,10 @@
+
-
+
@@ -111,6 +114,7 @@
+
diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml
index 8bbf572821..71c8e0dd4b 100644
--- a/checkstyle/import-control.xml
+++ b/checkstyle/import-control.xml
@@ -76,8 +76,10 @@
-
+
+
+
@@ -138,6 +140,7 @@
+
@@ -166,6 +169,7 @@
+
@@ -207,6 +211,11 @@
+
+
+
+
+
@@ -234,35 +243,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
@@ -303,6 +283,7 @@
+
@@ -323,11 +304,13 @@
-
+
+
+
@@ -413,6 +396,9 @@
+
+
+
@@ -451,13 +437,15 @@
+
+
+
-
@@ -468,8 +456,9 @@
-
+
+
@@ -483,6 +472,9 @@
+
+
+
@@ -532,6 +524,7 @@
+
@@ -595,6 +588,7 @@
+
@@ -605,6 +599,7 @@
+
@@ -644,6 +639,7 @@
+
diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml
index e1b21aa201..5d78a0e577 100644
--- a/checkstyle/suppressions.xml
+++ b/checkstyle/suppressions.xml
@@ -43,11 +43,14 @@
+
+
+ files="(Microbenchmarks|SaslServerAuthenticator).java"/>
+ files="(Utils|Topic|Lz4BlockOutputStream|AclData|JoinGroupRequest).java"/>
@@ -100,7 +103,7 @@
files="(AbstractRequest|AbstractResponse|KerberosLogin|WorkerSinkTaskTest|TransactionManagerTest|SenderTest|KafkaAdminClient|ConsumerCoordinatorTest|KafkaAdminClientTest|KafkaRaftClientTest).java"/>
+ files="(ConsumerCoordinator|BufferPool|MetricName|Node|ConfigDef|RecordBatch|SslFactory|SslTransportLayer|MetadataResponse|KerberosLogin|Selector|Sender|Serdes|TokenInformation|Agent|PluginUtils|MiniTrogdorCluster|TasksRequest|KafkaProducer|AbstractStickyAssignor|KafkaRaftClient|Authorizer|FetchSessionHandler|RecordAccumulator|Shell).java"/>
@@ -144,7 +147,7 @@
+ files="(DistributedHerder|DistributedConfig|KafkaConfigBackingStore|IncrementalCooperativeAssignor).java"/>
+ files="(JsonConverter|ConnectHeaders).java"/>
+ files="(KafkaConfigBackingStore|ConnectMetricsRegistry).java"/>
@@ -348,7 +351,7 @@
+ files="(GroupMetadataManager|GroupMetadataManagerTest).java"/>
partitions)
if (!log.isTraceEnabled()) {
return String.format("%d partition(s)", partitions.size());
}
- return "(" + Utils.join(partitions, ", ") + ")";
+ return "(" + partitions.stream().map(TopicPartition::toString).collect(Collectors.joining(", ")) + ")";
}
private String topicIdPartitionsToLogString(Collection partitions) {
if (!log.isTraceEnabled()) {
return String.format("%d partition(s)", partitions.size());
}
- return "(" + Utils.join(partitions, ", ") + ")";
+ return "(" + partitions.stream().map(TopicIdPartition::toString).collect(Collectors.joining(", ")) + ")";
}
/**
@@ -438,16 +438,16 @@ String verifyFullFetchResponsePartitions(Set topicPartitions, Se
extraIds = findMissing(ids, sessionTopicNames.keySet());
}
if (!omitted.isEmpty()) {
- bld.append("omittedPartitions=(").append(Utils.join(omitted, ", ")).append("), ");
+ bld.append("omittedPartitions=(").append(omitted.stream().map(TopicPartition::toString).collect(Collectors.joining(", "))).append("), ");
}
if (!extra.isEmpty()) {
- bld.append("extraPartitions=(").append(Utils.join(extra, ", ")).append("), ");
+ bld.append("extraPartitions=(").append(extra.stream().map(TopicPartition::toString).collect(Collectors.joining(", "))).append("), ");
}
if (!extraIds.isEmpty()) {
- bld.append("extraIds=(").append(Utils.join(extraIds, ", ")).append("), ");
+ bld.append("extraIds=(").append(extraIds.stream().map(Uuid::toString).collect(Collectors.joining(", "))).append("), ");
}
if ((!omitted.isEmpty()) || (!extra.isEmpty()) || (!extraIds.isEmpty())) {
- bld.append("response=(").append(Utils.join(topicPartitions, ", ")).append(")");
+ bld.append("response=(").append(topicPartitions.stream().map(TopicPartition::toString).collect(Collectors.joining(", "))).append(")");
return bld.toString();
}
return null;
@@ -470,11 +470,11 @@ String verifyIncrementalFetchResponsePartitions(Set topicPartiti
findMissing(topicPartitions, sessionPartitions.keySet());
StringBuilder bld = new StringBuilder();
if (!extra.isEmpty())
- bld.append("extraPartitions=(").append(Utils.join(extra, ", ")).append("), ");
+ bld.append("extraPartitions=(").append(extra.stream().map(TopicPartition::toString).collect(Collectors.joining(", "))).append("), ");
if (!extraIds.isEmpty())
- bld.append("extraIds=(").append(Utils.join(extraIds, ", ")).append("), ");
+ bld.append("extraIds=(").append(extraIds.stream().map(Uuid::toString).collect(Collectors.joining(", "))).append("), ");
if ((!extra.isEmpty()) || (!extraIds.isEmpty())) {
- bld.append("response=(").append(Utils.join(topicPartitions, ", ")).append(")");
+ bld.append("response=(").append(topicPartitions.stream().map(TopicPartition::toString).collect(Collectors.joining(", "))).append(")");
return bld.toString();
}
return null;
@@ -499,7 +499,7 @@ private String responseDataToLogString(Set topicPartitions) {
}
StringBuilder bld = new StringBuilder();
bld.append(" with response=(").
- append(Utils.join(topicPartitions, ", ")).
+ append(topicPartitions.stream().map(TopicPartition::toString).collect(Collectors.joining(", "))).
append(")");
String prefix = ", implied=(";
String suffix = "";
@@ -599,7 +599,9 @@ public boolean handleResponse(FetchResponse response, short version) {
* The client will initiate the session close on next fetch request.
*/
public void notifyClose() {
- log.debug("Set the metadata for next fetch request to close the existing session ID={}", nextMetadata.sessionId());
+ if (log.isDebugEnabled()) {
+ log.debug("Set the metadata for next fetch request to close the existing session ID={}", nextMetadata.sessionId());
+ }
nextMetadata = nextMetadata.nextCloseExisting();
}
diff --git a/clients/src/main/java/org/apache/kafka/clients/InFlightRequests.java b/clients/src/main/java/org/apache/kafka/clients/InFlightRequests.java
index 4235be5c8d..2bd70206fb 100644
--- a/clients/src/main/java/org/apache/kafka/clients/InFlightRequests.java
+++ b/clients/src/main/java/org/apache/kafka/clients/InFlightRequests.java
@@ -44,11 +44,7 @@ public InFlightRequests(int maxInFlightRequestsPerConnection) {
*/
public void add(NetworkClient.InFlightRequest request) {
String destination = request.destination;
- Deque reqs = this.requests.get(destination);
- if (reqs == null) {
- reqs = new ArrayDeque<>();
- this.requests.put(destination, reqs);
- }
+ Deque reqs = this.requests.computeIfAbsent(destination, k -> new ArrayDeque<>());
reqs.addFirst(request);
inFlightRequestCount.incrementAndGet();
}
diff --git a/clients/src/main/java/org/apache/kafka/clients/KafkaClient.java b/clients/src/main/java/org/apache/kafka/clients/KafkaClient.java
index 18a7eefe20..a03d57b40f 100644
--- a/clients/src/main/java/org/apache/kafka/clients/KafkaClient.java
+++ b/clients/src/main/java/org/apache/kafka/clients/KafkaClient.java
@@ -70,7 +70,7 @@ public interface KafkaClient extends Closeable {
/**
* Check if the connection of the node has failed, based on the connection state. Such connection failure are
- * usually transient and can be resumed in the next {@link #ready(org.apache.kafka.common.Node, long)} }
+ * usually transient and can be resumed in the next {@link #ready(org.apache.kafka.common.Node, long)}
* call, but there are cases where transient failures needs to be caught and re-acted upon.
*
* @param node the node to check
diff --git a/clients/src/main/java/org/apache/kafka/clients/NodeApiVersions.java b/clients/src/main/java/org/apache/kafka/clients/NodeApiVersions.java
index e1a4b87905..838718652f 100644
--- a/clients/src/main/java/org/apache/kafka/clients/NodeApiVersions.java
+++ b/clients/src/main/java/org/apache/kafka/clients/NodeApiVersions.java
@@ -185,7 +185,7 @@ public String toString(boolean lineBreaks) {
bld.append("(");
if (lineBreaks)
bld.append("\n\t");
- bld.append(Utils.join(apiKeysText.values(), separator));
+ bld.append(String.join(separator, apiKeysText.values()));
if (lineBreaks)
bld.append("\n");
bld.append(")");
diff --git a/clients/src/main/java/org/apache/kafka/clients/admin/Admin.java b/clients/src/main/java/org/apache/kafka/clients/admin/Admin.java
index dbd0124c00..0beef4edb2 100644
--- a/clients/src/main/java/org/apache/kafka/clients/admin/Admin.java
+++ b/clients/src/main/java/org/apache/kafka/clients/admin/Admin.java
@@ -764,7 +764,7 @@ default CreateDelegationTokenResult createDelegationToken() {
*
*
* @param options The options to use when creating delegation token.
- * @return The DeleteRecordsResult.
+ * @return The CreateDelegationTokenResult.
*/
CreateDelegationTokenResult createDelegationToken(CreateDelegationTokenOptions options);
diff --git a/clients/src/main/java/org/apache/kafka/clients/admin/AdminClient.java b/clients/src/main/java/org/apache/kafka/clients/admin/AdminClient.java
index 75f1c5f10d..989a6fa5d3 100644
--- a/clients/src/main/java/org/apache/kafka/clients/admin/AdminClient.java
+++ b/clients/src/main/java/org/apache/kafka/clients/admin/AdminClient.java
@@ -25,7 +25,7 @@
*
* Client code should use the newer {@link Admin} interface in preference to this class.
*
- * This class may be removed in a later release, but has not be marked as deprecated to avoid unnecessary noise.
+ * This class may be removed in a later release, but has not been marked as deprecated to avoid unnecessary noise.
*/
public abstract class AdminClient implements Admin {
diff --git a/clients/src/main/java/org/apache/kafka/clients/admin/ConsumerGroupDescription.java b/clients/src/main/java/org/apache/kafka/clients/admin/ConsumerGroupDescription.java
index a9d555cb39..13ec5965ee 100644
--- a/clients/src/main/java/org/apache/kafka/clients/admin/ConsumerGroupDescription.java
+++ b/clients/src/main/java/org/apache/kafka/clients/admin/ConsumerGroupDescription.java
@@ -21,13 +21,13 @@
import org.apache.kafka.common.GroupType;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.acl.AclOperation;
-import org.apache.kafka.common.utils.Utils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Objects;
import java.util.Set;
+import java.util.stream.Collectors;
/**
* A detailed description of a single consumer group in the cluster.
@@ -161,7 +161,7 @@ public Set authorizedOperations() {
public String toString() {
return "(groupId=" + groupId +
", isSimpleConsumerGroup=" + isSimpleConsumerGroup +
- ", members=" + Utils.join(members, ",") +
+ ", members=" + members.stream().map(MemberDescription::toString).collect(Collectors.joining(",")) +
", partitionAssignor=" + partitionAssignor +
", type=" + type +
", state=" + state +
diff --git a/clients/src/main/java/org/apache/kafka/clients/admin/CreateTopicsResult.java b/clients/src/main/java/org/apache/kafka/clients/admin/CreateTopicsResult.java
index 100e996b5d..0d065d7bd5 100644
--- a/clients/src/main/java/org/apache/kafka/clients/admin/CreateTopicsResult.java
+++ b/clients/src/main/java/org/apache/kafka/clients/admin/CreateTopicsResult.java
@@ -46,7 +46,7 @@ protected CreateTopicsResult(Map> fu
*/
public Map> values() {
return futures.entrySet().stream()
- .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().thenApply(v -> (Void) null)));
+ .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().thenApply(v -> null)));
}
/**
diff --git a/clients/src/main/java/org/apache/kafka/clients/admin/DescribeConfigsResult.java b/clients/src/main/java/org/apache/kafka/clients/admin/DescribeConfigsResult.java
index 653c97d905..b2fc40f403 100644
--- a/clients/src/main/java/org/apache/kafka/clients/admin/DescribeConfigsResult.java
+++ b/clients/src/main/java/org/apache/kafka/clients/admin/DescribeConfigsResult.java
@@ -53,21 +53,18 @@ public Map> values() {
*/
public KafkaFuture