diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 09cd85e..b20e2e0 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -25,7 +25,7 @@ jobs: java-version: 17 - name: Setup Gradle - uses: gradle/gradle-build-action@v2 + uses: gradle/actions/setup-gradle@v3 - name: Compile code run: ./gradlew assemble @@ -65,14 +65,14 @@ jobs: uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Cache layers id: cache-buildx uses: actions/cache@v3 with: path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-s3-${{ hashFiles('Dockerfile.s3', 'src/**', '*.gradle') }} + key: ${{ runner.os }}-buildx-s3-${{ hashFiles('./docker/legacy/Dockerfile.s3', 'src/**', '*.gradle') }} restore-keys: | ${{ runner.os }}-buildx-s3- ${{ runner.os }}-buildx- @@ -89,10 +89,10 @@ jobs: fi - name: Build backend docker - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . - file: ./Dockerfile.s3 + file: ././docker/legacy/Dockerfile.s3 platforms: linux/amd64,linux/arm64 cache-from: type=local,src=/tmp/.buildx-cache cache-to: ${{ steps.cache-parameters.outputs.cache-to }} @@ -104,10 +104,10 @@ jobs: # will use the internal cache from the previous build step, and load it into the current memory - name: Build backend docker locally - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: ./ - file: ./Dockerfile.s3 + file: ././docker/legacy/Dockerfile.s3 cache-from: ${{ steps.cache-parameters.outputs.load-cache-from }} load: true tags: ${{ steps.docker_meta.outputs.tags }} @@ -154,14 +154,14 @@ jobs: uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Cache frontend layers id: cache-buildx uses: actions/cache@v3 with: path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-generic-${{ hashFiles('Dockerfile', 'src/**', '*.gradle') }} + key: ${{ runner.os }}-buildx-generic-${{ hashFiles('./docker/legacy/Dockerfile', 'src/**', '*.gradle') }} restore-keys: | ${{ runner.os }}-buildx-generic- ${{ runner.os }}-buildx- @@ -178,7 +178,7 @@ jobs: fi - name: Build docker - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . platforms: linux/amd64,linux/arm64 @@ -191,7 +191,7 @@ jobs: ${{ steps.docker_meta.outputs.labels }} - name: Build docker locally - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . cache-from: ${{ steps.cache-parameters.outputs.load-cache-from }} diff --git a/.github/workflows/release-dockerhub.yml b/.github/workflows/release-dockerhub.yml new file mode 100644 index 0000000..589358a --- /dev/null +++ b/.github/workflows/release-dockerhub.yml @@ -0,0 +1,92 @@ +# Create release files +name: Release DockerHub + +on: + release: + types: [published] + +env: + DOCKER_IMAGE: radarbase/kafka-connect-transform-keyvalue + DOCKER_IMAGE_S3: radarbase/kafka-connect-transform-s3 + +jobs: + + # Build and push tagged release docker image + docker: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + # Add Docker labels and tags + - name: Docker meta + id: docker_meta + uses: docker/metadata-action@v4 + with: + images: ${{ env.DOCKER_IMAGE }} + tags: | + type=match,pattern=v(.*),group=1 + + # Add Docker labels and tags + - name: Docker meta S3 + id: docker_meta_s3 + uses: docker/metadata-action@v4 + with: + images: ${{ env.DOCKER_IMAGE_S3 }} + tags: | + type=match,pattern=v(.*),group=1 + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v6 + with: + context: . + file: docker/legacy/Dockerfile + push: true + tags: ${{ steps.docker_meta.outputs.tags }} + platforms: linux/arm64,linux/amd64 + # Use runtime labels from docker_meta as well as fixed labels + labels: | + ${{ steps.docker_meta.outputs.labels }} + maintainer=Pim van Nierop + org.opencontainers.image.authors=Pim van Nierop, Pauline Conde + org.opencontainers.image.description=Key-value transformation for Kafka Connect + org.opencontainers.image.vendor=RADAR-base + org.opencontainers.image.licenses=Apache-2.0 + + - name: Build and push S3 + id: docker_build_s3 + uses: docker/build-push-action@v6 + with: + context: . + file: docker/legacy/Dockerfile.s3 + push: true + platforms: linux/arm64,linux/amd64 + # Use runtime labels from docker_meta as well as fixed labels + labels: | + ${{ steps.docker_meta_s3.outputs.labels }} + maintainer=Pim van Nierop + org.opencontainers.image.authors=Pim van Nierop, Pauline Conde + org.opencontainers.image.description=Key-value transformation for Kafka Connect, with S3 connector loaded + org.opencontainers.image.vendor=RADAR-base + org.opencontainers.image.licenses=Apache-2.0 + + - name: Inspect image + run: | + docker pull ${{ env.DOCKER_IMAGE }}:${{ steps.docker_meta.outputs.version }} + docker image inspect ${{ env.DOCKER_IMAGE }}:${{ steps.docker_meta.outputs.version }} + + - name: Inspect image S3 + run: | + docker pull ${{ env.DOCKER_IMAGE_S3 }}:${{ steps.docker_meta_s3.outputs.version }} + docker image inspect ${{ env.DOCKER_IMAGE_S3 }}:${{ steps.docker_meta_s3.outputs.version }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0f4bbb2..2897063 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,4 +1,4 @@ -# Create release files +#See: https://docs.github.com/en/actions/publishing-packages/publishing-docker-images#publishing-images-to-github-packages name: Release on: @@ -6,77 +6,71 @@ on: types: [published] env: - DOCKER_IMAGE: radarbase/kafka-connect-transform-keyvalue - DOCKER_IMAGE_S3: radarbase/kafka-connect-transform-s3 + REGISTRY: ghcr.io + DOCKER_IMAGE: radarbase/kafka-connect-transform-keyvalue-strimzi + DOCKER_IMAGE_S3: radarbase/kafka-connect-transform-s3-strimzi jobs: + # Upload jars to the GitHub release upload: - # The type of runner that the job will run on runs-on: ubuntu-latest - # Steps represent a sequence of tasks that will be executed as part of the job steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v3 - uses: actions/setup-java@v3 with: - distribution: zulu - java-version: 11 + distribution: temurin + java-version: 17 - name: Setup Gradle - uses: gradle/gradle-build-action@v2 + uses: gradle/actions/setup-gradle@v3 - # Compile code - name: Compile code run: ./gradlew assemble - # Upload it to GitHub - name: Upload to GitHub - uses: AButler/upload-release-assets@v2.0.2 + uses: AButler/upload-release-assets@v3.0 with: files: 'build/libs/*;build/distributions/*' repo-token: ${{ secrets.GITHUB_TOKEN }} # Build and push tagged release docker image docker: - # The type of runner that the job will run on runs-on: ubuntu-latest - # Steps represent a sequence of tasks that will be executed as part of the job steps: - uses: actions/checkout@v3 - # Add Docker labels and tags + - name: Log in to the Container registry + uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Docker meta id: docker_meta uses: docker/metadata-action@v4 with: - images: ${{ env.DOCKER_IMAGE }} + images: ${{ env.REGISTRY }}/${{ env.DOCKER_IMAGE }} tags: | type=match,pattern=v(.*),group=1 - # Add Docker labels and tags - name: Docker meta S3 id: docker_meta_s3 uses: docker/metadata-action@v4 with: - images: ${{ env.DOCKER_IMAGE_S3 }} + images: ${{ env.REGISTRY }}/${{ env.DOCKER_IMAGE_S3 }} tags: | type=match,pattern=v(.*),group=1 - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build and push id: docker_build - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . push: true @@ -85,36 +79,36 @@ jobs: # Use runtime labels from docker_meta as well as fixed labels labels: | ${{ steps.docker_meta.outputs.labels }} - maintainer=Joris Borgdorff - org.opencontainers.image.authors=Joris Borgdorff , Pauline Conde - org.opencontainers.image.description=Key-value transformation for Kafka Connect, with S3 connector loaded + maintainer=Pim van Nierop + org.opencontainers.image.authors=Pim van Nierop, Pauline Conde + org.opencontainers.image.description=Key-value transformation for Kafka Connect org.opencontainers.image.vendor=RADAR-base org.opencontainers.image.licenses=Apache-2.0 - name: Build and push S3 id: docker_build_s3 - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.s3 push: true platforms: linux/arm64,linux/amd64 - tags: ${{ steps.docker_meta_s3.outputs.tags }} + # Use runtime labels from docker_meta as well as fixed labels labels: | ${{ steps.docker_meta_s3.outputs.labels }} - maintainer=Joris Borgdorff , Pauline Conde - org.opencontainers.image.authors=Joris Borgdorff , Pauline Conde - org.opencontainers.image.description=Key-value transformation for Kafka Connect + maintainer=Pim van Nierop + org.opencontainers.image.authors=Pim van Nierop, Pauline Conde + org.opencontainers.image.description=Key-value transformation for Kafka Connect, with S3 connector loaded org.opencontainers.image.vendor=RADAR-base org.opencontainers.image.licenses=Apache-2.0 - name: Inspect image run: | - docker pull ${{ env.DOCKER_IMAGE }}:${{ steps.docker_meta.outputs.version }} - docker image inspect ${{ env.DOCKER_IMAGE }}:${{ steps.docker_meta.outputs.version }} + docker pull ${{ env.REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.docker_meta.outputs.version }} + docker image inspect $${{ env.REGISTRY }}/{{ env.DOCKER_IMAGE }}:${{ steps.docker_meta.outputs.version }} - name: Inspect image S3 run: | - docker pull ${{ env.DOCKER_IMAGE_S3 }}:${{ steps.docker_meta_s3.outputs.version }} - docker image inspect ${{ env.DOCKER_IMAGE_S3 }}:${{ steps.docker_meta_s3.outputs.version }} + docker pull ${{ env.REGISTRY }}/${{ env.DOCKER_IMAGE_S3 }}:${{ steps.docker_meta_s3.outputs.version }} + docker image inspect ${{ env.REGISTRY }}/${{ env.DOCKER_IMAGE_S3 }}:${{ steps.docker_meta_s3.outputs.version }} diff --git a/Dockerfile b/Dockerfile index 55552da..015c114 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -ARG BASE_IMAGE=confluentinc/cp-kafka-connect-base:7.8.1 +ARG BASE_IMAGE=quay.io/strimzi/kafka:0.46.0-kafka-3.9.0 FROM --platform=$BUILDPLATFORM gradle:8.9-jdk17 AS builder @@ -9,8 +9,12 @@ RUN gradle jar --no-watch-fs FROM ${BASE_IMAGE} -ENV WAIT_FOR_KAFKA="1" +ENV CONNECT_PLUGIN_PATH=/opt/kafka/plugins -COPY --from=builder /code/build/libs/kafka-connect-transform-keyvalue*.jar /usr/share/"${COMPONENT}"/plugins/ -COPY ./src/main/docker/launch /etc/confluent/docker/launch -COPY ./src/main/docker/kafka-wait /usr/bin/kafka-wait +COPY --from=builder /code/build/libs/kafka-connect-transform-keyvalue*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-transform-keyvalue/ + +USER 1001 + +COPY --chown=1001:1001 ./docker/ensure /opt/kafka/ensure +COPY --chown=1001:1001 ./docker/kafka_connect_run.sh /opt/kafka/kafka_connect_run.sh +RUN chmod +x /opt/kafka/ensure /opt/kafka/kafka_connect_run.sh diff --git a/Dockerfile.s3 b/Dockerfile.s3 index b9a6d02..f933ef2 100644 --- a/Dockerfile.s3 +++ b/Dockerfile.s3 @@ -1,4 +1,14 @@ -ARG BASE_IMAGE=confluentinc/cp-kafka-connect-base:7.8.1 +ARG BASE_IMAGE=quay.io/strimzi/kafka:0.46.0-kafka-3.9.0 + +FROM confluentinc/cp-kafka-connect:8.0.0 AS hub + +ARG KAFKA_CONNECT_S3_VERSION=10.6.7 +# Version of Confluent components used by s3 connector plugin. Then upgrading the s3-connector plugin version, this version should be updated as well to the corresponding version. +ARG CONFLUENT_VERSION=7.7.2 + +RUN mkdir -p /tmp/deps/kafka-connect-s3/ /tmp/deps/kafka-connect-avro-converter/ +RUN confluent-hub install --no-prompt --component-dir /tmp/deps/kafka-connect-s3/ confluentinc/kafka-connect-s3:${KAFKA_CONNECT_S3_VERSION} +RUN confluent-hub install --no-prompt --component-dir /tmp/deps/kafka-connect-avro-converter/ confluentinc/kafka-connect-avro-converter:${CONFLUENT_VERSION} FROM --platform=$BUILDPLATFORM gradle:8.9-jdk17 AS builder @@ -9,13 +19,16 @@ RUN gradle jar --no-watch-fs FROM ${BASE_IMAGE} -ENV WAIT_FOR_KAFKA="1" -ARG KAFKA_CONNECT_S3_VERSION="10.5.23" +USER root -COPY --from=builder /code/build/libs/kafka-connect-transform-keyvalue*.jar /usr/share/"${COMPONENT}"/plugins/ -COPY ./src/main/docker/launch /etc/confluent/docker/launch -COPY ./src/main/docker/kafka-wait /usr/bin/kafka-wait +ENV CONNECT_PLUGIN_PATH=/opt/kafka/plugins -USER root -RUN confluent-hub install --no-prompt --component-dir /usr/share/"${COMPONENT}"/plugins confluentinc/kafka-connect-s3:"${KAFKA_CONNECT_S3_VERSION}" -USER appuser +COPY --from=builder /code/build/libs/kafka-connect-transform-keyvalue*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-transform-keyvalue/ +COPY --from=hub /tmp/deps/* ${CONNECT_PLUGIN_PATH}/ +RUN ln -s ${CONNECT_PLUGIN_PATH}/confluentinc-kafka-connect-avro-converter/lib/kafka-schema-registry-client*.jar ${CONNECT_PLUGIN_PATH}/confluentinc-kafka-connect-s3/lib/kafka-schema-registry-client.jar + +USER 1001 + +COPY --chown=1001:1001 ./docker/ensure /opt/kafka/ensure +COPY --chown=1001:1001 ./docker/kafka_connect_run.sh /opt/kafka/kafka_connect_run.sh +RUN chmod +x /opt/kafka/ensure /opt/kafka/kafka_connect_run.sh diff --git a/README.md b/README.md index d5fa2a7..f28e62b 100644 --- a/README.md +++ b/README.md @@ -77,3 +77,9 @@ transforms=convertTimestamp transforms.convertTimestamp.type=org.radarbase.kafka.connect.transforms.TimestampConverter transforms.convertTimestamp.fields=time,timeReceived,timeCompleted,timestamp ``` + +### Kafka Connect platform + +At present, support for both ConfluentInc. and Strimzi Kafka Connect platform implementations are supported through different +Dockerfiles. Because RADAR-base has the intention to switch from ConfluentInc. to Strimzi based deployment in the future, the +ConfluentInc. Docker assets are considered as legacy components and are located in the `docker/legacy` directory in this repo. \ No newline at end of file diff --git a/build.gradle b/build.gradle index 943f8fb..a514c82 100644 --- a/build.gradle +++ b/build.gradle @@ -3,7 +3,7 @@ plugins { id 'java-library' } -version = '7.8.1' +version = '8.0.0' description = "Kafka Connect transformation used to copy the key and value to a struct in the value of the record." sourceCompatibility = JavaVersion.VERSION_17 @@ -14,7 +14,7 @@ repositories { } dependencies { - compileOnly 'org.apache.kafka:connect-transforms:3.6.2' + compileOnly 'org.apache.kafka:connect-transforms:4.0.0' } wrapper { diff --git a/docker/ensure b/docker/ensure new file mode 100755 index 0000000..18fed8f --- /dev/null +++ b/docker/ensure @@ -0,0 +1,35 @@ +#!/bin/bash + +# Get the schema registry URL from the config. +ss_url=$(grep -E '^key.converter.schema.registry.url=' /tmp/strimzi-connect.properties | cut -d'=' -f2) + +# If the schema registry URL is not set, exit... +if [ -z "$ss_url" ]; then + echo "Schema registry URL is not set in strimzi-connect.properties." + echo "We will not check for schema registry availability." + exit 0 +fi + +echo "===> Checking if Schema Registry is available ..." + +max_timeout=32 +tries=10 +timeout=1 +while true; do + if curl --head --silent --fail "${ss_url}/subjects" > /dev/null; then + echo "Schema registry available." + break + fi + tries=$((tries - 1)) + if [ $tries -eq 0 ]; then + echo "FAILED TO REACH SCHEMA REGISTRY." + exit 6 + fi + echo "Failed to reach schema registry. Retrying in ${timeout} seconds." + sleep ${timeout} + if [ ${timeout} -lt ${max_timeout} ]; then + timeout=$((timeout * 2)) + fi +done + +echo "Schema registry is available." diff --git a/docker/kafka_connect_run.sh b/docker/kafka_connect_run.sh new file mode 100644 index 0000000..25cc0b2 --- /dev/null +++ b/docker/kafka_connect_run.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash + +# Source script: https://github.com/strimzi/strimzi-kafka-operator/blob/main/docker-images/kafka-based/kafka/scripts/kafka_connect_run.sh + +set -e +set +x + +# Prepare hostname - for StrimziPodSets we use the Pod DNS name assigned through the headless service +ADVERTISED_HOSTNAME=$(hostname -f | cut -d "." -f1-4) +export ADVERTISED_HOSTNAME + +# Create dir where keystores and truststores will be stored +mkdir -p /tmp/kafka + +# Generate and print the config file +echo "Starting Kafka Connect with configuration:" +tee /tmp/strimzi-connect.properties < "/opt/kafka/custom-config/kafka-connect.properties" | sed -e 's/sasl.jaas.config=.*/sasl.jaas.config=[hidden]/g' +echo "" + +# Disable Kafka's GC logging (which logs to a file)... +export GC_LOG_ENABLED="false" + +if [ -z "$KAFKA_LOG4J_OPTS" ]; then + if [[ "${KAFKA_VERSION:0:1}" == "3" ]] + then + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$KAFKA_HOME/custom-config/log4j.properties" + else + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$KAFKA_HOME/custom-config/log4j2.properties" + fi +fi + +# We don't need LOG_DIR because we write no log files, but setting it to a +# directory avoids trying to create it (and logging a permission denied error) +export LOG_DIR="$KAFKA_HOME" + +# Enable Prometheus JMX Exporter as Java agent +if [ "$KAFKA_CONNECT_METRICS_ENABLED" = "true" ]; then + KAFKA_OPTS="${KAFKA_OPTS} -javaagent:$(ls "$JMX_EXPORTER_HOME"/jmx_prometheus_javaagent*.jar)=9404:$KAFKA_HOME/custom-config/metrics-config.json" + export KAFKA_OPTS +fi + +. ./set_kafka_jmx_options.sh "${STRIMZI_JMX_ENABLED}" "${STRIMZI_JMX_USERNAME}" "${STRIMZI_JMX_PASSWORD}" + +# Enable Tracing agent (initializes tracing) as Java agent +if [ "$STRIMZI_TRACING" = "jaeger" ] || [ "$STRIMZI_TRACING" = "opentelemetry" ]; then + KAFKA_OPTS="$KAFKA_OPTS -javaagent:$(ls "$KAFKA_HOME"/libs/tracing-agent*.jar)=$STRIMZI_TRACING" + export KAFKA_OPTS + if [ "$STRIMZI_TRACING" = "opentelemetry" ] && [ -z "$OTEL_TRACES_EXPORTER" ]; then + # auto-set OTLP exporter + export OTEL_TRACES_EXPORTER="otlp" + fi +fi + +if [ -n "$STRIMZI_JAVA_SYSTEM_PROPERTIES" ]; then + export KAFKA_OPTS="${KAFKA_OPTS} ${STRIMZI_JAVA_SYSTEM_PROPERTIES}" +fi + +# Disable FIPS if needed +if [ "$FIPS_MODE" = "disabled" ]; then + export KAFKA_OPTS="${KAFKA_OPTS} -Dcom.redhat.fips=false" +fi + +# Configure heap based on the available resources if needed +. ./dynamic_resources.sh + +# Configure Garbage Collection logging +. ./set_kafka_gc_options.sh + +set -x + +### BEGIN CUSTOM RADAR KAFKA CONNECT SCRIPT ### +# Call the ensure script to verify infrastructure, Kafka cluster, schema registry, and other components +echo "===> Running preflight checks ... " +"${KAFKA_HOME}/ensure" +### END CUSTOM RADAR KAFKA CONNECT SCRIPT ### + +# starting Kafka server with final configuration +exec /usr/bin/tini -w -e 143 -- "${KAFKA_HOME}/bin/connect-distributed.sh" /tmp/strimzi-connect.properties diff --git a/docker/legacy/Dockerfile b/docker/legacy/Dockerfile new file mode 100644 index 0000000..ce067d3 --- /dev/null +++ b/docker/legacy/Dockerfile @@ -0,0 +1,16 @@ +ARG BASE_IMAGE=confluentinc/cp-kafka-connect-base:7.8.1 + +FROM --platform=$BUILDPLATFORM gradle:8.9-jdk17 AS builder + +COPY ./*.gradle /code/ +COPY ../../src/main/java /code/src/main/java +WORKDIR /code +RUN gradle jar --no-watch-fs + +FROM ${BASE_IMAGE} + +ENV WAIT_FOR_KAFKA="1" + +COPY --from=builder /code/build/libs/kafka-connect-transform-keyvalue*.jar /usr/share/"${COMPONENT}"/plugins/ +COPY launch /etc/confluent/docker/launch +COPY kafka-wait /usr/bin/kafka-wait diff --git a/docker/legacy/Dockerfile.s3 b/docker/legacy/Dockerfile.s3 new file mode 100644 index 0000000..2be62a3 --- /dev/null +++ b/docker/legacy/Dockerfile.s3 @@ -0,0 +1,21 @@ +ARG BASE_IMAGE=confluentinc/cp-kafka-connect-base:7.8.1 + +FROM --platform=$BUILDPLATFORM gradle:8.9-jdk17 AS builder + +COPY ./*.gradle /code/ +COPY src/main/java /code/src/main/java +WORKDIR /code +RUN gradle jar --no-watch-fs + +FROM ${BASE_IMAGE} + +ENV WAIT_FOR_KAFKA="1" +ARG KAFKA_CONNECT_S3_VERSION="10.5.23" + +COPY --from=builder /code/build/libs/kafka-connect-transform-keyvalue*.jar /usr/share/"${COMPONENT}"/plugins/ +COPY ./docker/legacy/launch /etc/confluent/docker/launch +COPY ./docker/legacy/kafka-wait /usr/bin/kafka-wait + +USER root +RUN confluent-hub install --no-prompt --component-dir /usr/share/"${COMPONENT}"/plugins confluentinc/kafka-connect-s3:"${KAFKA_CONNECT_S3_VERSION}" +USER appuser diff --git a/src/main/docker/kafka-wait b/docker/legacy/kafka-wait similarity index 100% rename from src/main/docker/kafka-wait rename to docker/legacy/kafka-wait diff --git a/src/main/docker/launch b/docker/legacy/launch similarity index 100% rename from src/main/docker/launch rename to docker/legacy/launch