Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade kafka exporter dependency #5575

Merged
merged 3 commits into from Sep 22, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -88,7 +88,7 @@ public void setEnableSaramaLogging(boolean enableSaramaLogging) {
}

@Description("Only log messages with the given severity or above. " +
"Valid levels: [`debug`, `info`, `warn`, `error`, `fatal`]. " +
"Valid levels: [`info`, `debug`, `trace`]. " +
"Default log level is `info`.")
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public String getLogging() {
Expand Down
Expand Up @@ -231,7 +231,7 @@ protected List<Container> getContainers(ImagePullPolicy imagePullPolicy) {
protected List<EnvVar> getEnvVars() {
List<EnvVar> varList = new ArrayList<>();

varList.add(buildEnvVar(ENV_VAR_KAFKA_EXPORTER_LOGGING, logging));
varList.add(buildEnvVar(ENV_VAR_KAFKA_EXPORTER_LOGGING, Integer.toString(loggingMapping(logging))));
varList.add(buildEnvVar(ENV_VAR_KAFKA_EXPORTER_KAFKA_VERSION, version));
varList.add(buildEnvVar(ENV_VAR_KAFKA_EXPORTER_GROUP_REGEX, groupRegex));
varList.add(buildEnvVar(ENV_VAR_KAFKA_EXPORTER_TOPIC_REGEX, topicRegex));
Expand All @@ -246,6 +246,18 @@ protected List<EnvVar> getEnvVars() {
return varList;
}

private int loggingMapping(String logLevel) {
if (logLevel.equalsIgnoreCase("info")) {
return 0;
} else if (logLevel.equalsIgnoreCase("debug")) {
return 1;
} else if (logLevel.equalsIgnoreCase("trace")) {
return 2;
} else {
return 0;
}
}

private List<Volume> getVolumes(boolean isOpenShift) {
List<Volume> volumeList = new ArrayList<>(3);

Expand Down
Expand Up @@ -131,7 +131,7 @@ private Map<String, String> expectedLabels() {

private List<EnvVar> getExpectedEnvVars() {
List<EnvVar> expected = new ArrayList<>();
expected.add(new EnvVarBuilder().withName(KafkaExporter.ENV_VAR_KAFKA_EXPORTER_LOGGING).withValue(exporterOperatorLogging).build());
expected.add(new EnvVarBuilder().withName(KafkaExporter.ENV_VAR_KAFKA_EXPORTER_LOGGING).withValue("1").build());
expected.add(new EnvVarBuilder().withName(KafkaExporter.ENV_VAR_KAFKA_EXPORTER_KAFKA_VERSION).withValue(version).build());
expected.add(new EnvVarBuilder().withName(KafkaExporter.ENV_VAR_KAFKA_EXPORTER_GROUP_REGEX).withValue(groupRegex).build());
expected.add(new EnvVarBuilder().withName(KafkaExporter.ENV_VAR_KAFKA_EXPORTER_TOPIC_REGEX).withValue(topicRegex).build());
Expand Down
14 changes: 7 additions & 7 deletions docker-images/kafka/Dockerfile
Expand Up @@ -27,28 +27,28 @@ COPY ./scripts/ $KAFKA_HOME
# Add Kafka Exporter
#####
ENV KAFKA_EXPORTER_HOME=/opt/kafka-exporter
ENV KAFKA_EXPORTER_VERSION=1.3.1-STRIMZI
ENV KAFKA_EXPORTER_CHECKSUM_AMD64="85e37fe8a7797f53dcf1ef349b3472edc6891d8bb914d1aebb33784bfb850189d47ec989be9a8c764f4fbe991576b81545b04ddbd4ff6946a677066ec0a4619d kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-amd64.tar.gz"
ENV KAFKA_EXPORTER_CHECKSUM_ARM64="a594903265f3497c003d90e211480179aa8d42fb58b43456f001d3eea064d1d571e3b5bb9666c6d45382b1611433c5d616d68b742f84045be04c0c18b9df0427 kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-arm64.tar.gz"
ENV KAFKA_EXPORTER_CHECKSUM_PPC64LE="8b72420d2c6aed25b6ddbae7df66be6a07e659fffa6b3f6cae1132de35c7f0a21bde0fcb3fa9234a8a79839589c18940ef01534551b57669dab09544b5af2883 kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-ppc64le.tar.gz"
ENV KAFKA_EXPORTER_VERSION=1.4.2
ENV KAFKA_EXPORTER_CHECKSUM_AMD64="42fcd2b303e82e3ea518cffe7c528c2c35f9ecace8427d68f556c8a91894056f9d8a84fb5bdac2c447b91870909f0dbcce5548a061149da4ffbf33e16545d488 kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-amd64.tar.gz"
ENV KAFKA_EXPORTER_CHECKSUM_ARM64="9488d558210834a6e99ab0c26513294fe2e9f6bd95257fa56cd48359fbadcb5b8aa0846d12c58dbccbfb8493f525c55004a2a0e2a539eb594371ff1990c516f0 kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-arm64.tar.gz"
ENV KAFKA_EXPORTER_CHECKSUM_PPC64LE="26648800bd2da699cc4e6bfca475b1bcfee0b2271c1c5a531941d42aea42ed55f8d8fdb103e517b7a8c504798c5b5fc6854e099a1a22b7069b319aecf5d410d2 kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-ppc64le.tar.gz"

RUN set -ex; \
if [[ ${TARGETPLATFORM} = "linux/arm64" ]]; then \
curl -LO https://github.com/alesj/kafka_exporter/releases/download/v${KAFKA_EXPORTER_VERSION}/kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-arm64.tar.gz; \
curl -LO https://github.com/danielqsj/kafka_exporter/releases/download/v${KAFKA_EXPORTER_VERSION}/kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-arm64.tar.gz; \
echo $KAFKA_EXPORTER_CHECKSUM_ARM64 > kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-arm64.tar.gz.sha512; \
sha512sum --check kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-arm64.tar.gz.sha512; \
mkdir $KAFKA_EXPORTER_HOME; \
tar xvfz kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-arm64.tar.gz -C $KAFKA_EXPORTER_HOME --strip-components=1; \
rm -f kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-arm64.tar.gz*; \
elif [[ ${TARGETPLATFORM} = "linux/ppc64le" ]]; then \
curl -LO https://github.com/alesj/kafka_exporter/releases/download/v${KAFKA_EXPORTER_VERSION}/kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-ppc64le.tar.gz; \
curl -LO https://github.com/danielqsj/kafka_exporter/releases/download/v${KAFKA_EXPORTER_VERSION}/kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-ppc64le.tar.gz; \
echo $KAFKA_EXPORTER_CHECKSUM_PPC64LE > kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-ppc64le.tar.gz.sha512; \
sha512sum --check kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-ppc64le.tar.gz.sha512; \
mkdir $KAFKA_EXPORTER_HOME; \
tar xvfz kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-ppc64le.tar.gz -C $KAFKA_EXPORTER_HOME --strip-components=1; \
rm -f kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-ppc64le.tar.gz*; \
else \
curl -LO https://github.com/alesj/kafka_exporter/releases/download/v${KAFKA_EXPORTER_VERSION}/kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-amd64.tar.gz; \
curl -LO https://github.com/danielqsj/kafka_exporter/releases/download/v${KAFKA_EXPORTER_VERSION}/kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-amd64.tar.gz; \
echo $KAFKA_EXPORTER_CHECKSUM_AMD64 > kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-amd64.tar.gz.sha512; \
sha512sum --check kafka_exporter-${KAFKA_EXPORTER_VERSION}.linux-amd64.tar.gz.sha512; \
mkdir $KAFKA_EXPORTER_HOME; \
Expand Down
4 changes: 2 additions & 2 deletions docker-images/kafka/exporter-scripts/kafka_exporter_run.sh
Expand Up @@ -21,7 +21,7 @@ if [ "$KAFKA_EXPORTER_ENABLE_SARAMA" = "true" ]; then
fi

if [ -n "$KAFKA_EXPORTER_LOGGING" ]; then
loglevel="--log.level=${KAFKA_EXPORTER_LOGGING}"
loglevel="--verbosity=${KAFKA_EXPORTER_LOGGING}"
fi

# shellcheck disable=SC2027
Expand All @@ -31,7 +31,7 @@ kafkaserver="--kafka.server="$KAFKA_EXPORTER_KAFKA_SERVER

listenaddress="--web.listen-address=:9404"

allgroups="--legacy.partitions"
allgroups="--offset.show-all"

tls="--tls.enabled --tls.ca-file=/etc/kafka-exporter/cluster-ca-certs/ca.crt --tls.cert-file=/etc/kafka-exporter/kafka-exporter-certs/kafka-exporter.crt --tls.key-file=/etc/kafka-exporter/kafka-exporter-certs/kafka-exporter.key"

Expand Down
2 changes: 1 addition & 1 deletion documentation/modules/appendix_crds.adoc
Expand Up @@ -1431,7 +1431,7 @@ Used in: xref:type-KafkaSpec-{context}[`KafkaSpec`]


|https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#resourcerequirements-v1-core[ResourceRequirements]
|logging 1.2+<.<a|Only log messages with the given severity or above. Valid levels: [`debug`, `info`, `warn`, `error`, `fatal`]. Default log level is `info`.
|logging 1.2+<.<a|Only log messages with the given severity or above. Valid levels: [`info`, `debug`, `trace`]. Default log level is `info`.
|string
|enableSaramaLogging 1.2+<.<a|Enable Sarama logging, a Go client library used by the Kafka Exporter.
|boolean
Expand Down
Expand Up @@ -68,7 +68,7 @@ spec:
<2> A regular expression to specify the consumer groups to include in the metrics.
<3> A regular expression to specify the topics to include in the metrics.
<4> link:{BookURLUsing}#con-common-configuration-resources-reference[CPU and memory resources to reserve].
<5> Logging configuration, to log messages with a given severity (debug, info, warn, error, fatal) or above.
<5> Logging configuration, to log messages with a given severity (info, debug, trace) or above.
<6> Boolean to enable Sarama logging, a Go client library used by Kafka Exporter.
<7> link:{BookURLUsing}#assembly-customizing-kubernetes-resources-str[Customization of deployment templates and pods].
<8> link:{BookURLUsing}#con-common-configuration-healthchecks-reference[Healthcheck readiness probes].
Expand Down
Expand Up @@ -5212,7 +5212,7 @@ spec:
description: CPU and memory resources to reserve.
logging:
type: string
description: 'Only log messages with the given severity or above. Valid levels: [`debug`, `info`, `warn`, `error`, `fatal`]. Default log level is `info`.'
description: 'Only log messages with the given severity or above. Valid levels: [`info`, `debug`, `trace`]. Default log level is `info`.'
enableSaramaLogging:
type: boolean
description: Enable Sarama logging, a Go client library used by the Kafka Exporter.
Expand Down
4 changes: 2 additions & 2 deletions packaging/install/cluster-operator/040-Crd-kafka.yaml
Expand Up @@ -6137,8 +6137,8 @@ spec:
logging:
type: string
description: 'Only log messages with the given severity or above.
Valid levels: [`debug`, `info`, `warn`, `error`, `fatal`]. Default
log level is `info`.'
Valid levels: [`info`, `debug`, `trace`]. Default log level
is `info`.'
enableSaramaLogging:
type: boolean
description: Enable Sarama logging, a Go client library used by
Expand Down