Skip to content

Commit

Permalink
Cancel receiver pod start on invalid OIDC config only if authenticati…
Browse files Browse the repository at this point in the history
…on.oidc is enabled (knative-extensions#3761)

* Cancel pod start on invalid OIDC config only if authentication.oidc is enabled

* Update namespaced broker to copy features configmap too.
  • Loading branch information
creydr committed Apr 2, 2024
1 parent 59a4503 commit b2ba6b4
Show file tree
Hide file tree
Showing 9 changed files with 125 additions and 4 deletions.
1 change: 1 addition & 0 deletions control-plane/pkg/reconciler/broker/namespaced_broker.go
Original file line number Diff line number Diff line change
Expand Up @@ -385,6 +385,7 @@ func (r *NamespacedReconciler) configMapsFromSystemNamespace(broker *eventing.Br
configMaps := []string{
"config-kafka-broker-data-plane",
"config-tracing",
"config-features",
"kafka-config-logging",
"config-openshift-trusted-cabundle",
}
Expand Down
10 changes: 10 additions & 0 deletions control-plane/pkg/reconciler/broker/namespaced_broker_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ func namespacedBrokerReconciliation(t *testing.T, format string, env config.Env)
DataPlaneConfigInitialOffset(ConsumerConfigKey, sources.OffsetLatest),
),
reconcilertesting.NewConfigMap("config-tracing", SystemNamespace),
reconcilertesting.NewConfigMap("config-features", SystemNamespace),
reconcilertesting.NewConfigMap("kafka-config-logging", SystemNamespace),
NewConfigMapWithBinaryData(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, nil),
NewService(),
Expand Down Expand Up @@ -181,6 +182,14 @@ func namespacedBrokerReconciliation(t *testing.T, format string, env config.Env)
WithNamespacedBrokerOwnerRef,
WithNamespacedLabel,
),
ToManifestivalResource(t,
reconcilertesting.NewConfigMap(
"config-features",
BrokerNamespace,
),
WithNamespacedBrokerOwnerRef,
WithNamespacedLabel,
),
ToManifestivalResource(t,
reconcilertesting.NewConfigMap(
"kafka-config-logging",
Expand Down Expand Up @@ -360,6 +369,7 @@ func namespacedBrokerFinalization(t *testing.T, format string, env config.Env) {
}, env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat),
reconcilertesting.NewConfigMap(env.DataPlaneConfigConfigMapName, SystemNamespace),
reconcilertesting.NewConfigMap("config-tracing", SystemNamespace),
reconcilertesting.NewConfigMap("config-features", SystemNamespace),
reconcilertesting.NewConfigMap("kafka-config-logging", SystemNamespace),
reconcilertesting.NewDeployment("kafka-broker-receiver", SystemNamespace),
reconcilertesting.NewDeployment("kafka-broker-dispatcher", SystemNamespace),
Expand Down
8 changes: 8 additions & 0 deletions data-plane/config/broker/500-receiver.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ spec:
- mountPath: /etc/tracing
name: config-tracing
readOnly: true
- mountPath: /etc/features
name: config-features
readOnly: true
- mountPath: /etc/receiver-tls-secret
name: broker-receiver-tls-secret
readOnly: true
Expand Down Expand Up @@ -120,6 +123,8 @@ spec:
value: "false"
- name: CONFIG_TRACING_PATH
value: "/etc/tracing"
- name: CONFIG_FEATURES_PATH
value: "/etc/features"
# https://github.com/fabric8io/kubernetes-client/issues/2212
- name: HTTP2_DISABLE
value: "true"
Expand Down Expand Up @@ -175,6 +180,9 @@ spec:
- name: config-tracing
configMap:
name: config-tracing
- name: config-features
configMap:
name: config-features
- name: broker-receiver-tls-secret
secret:
secretName: kafka-broker-ingress-server-tls
Expand Down
8 changes: 8 additions & 0 deletions data-plane/config/channel/500-receiver.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ spec:
- mountPath: /etc/tracing
name: config-tracing
readOnly: true
- mountPath: /etc/features
name: config-features
readOnly: true
- mountPath: /etc/receiver-tls-secret
name: channel-receiver-tls-secret
readOnly: true
Expand Down Expand Up @@ -120,6 +123,8 @@ spec:
value: "false"
- name: CONFIG_TRACING_PATH
value: "/etc/tracing"
- name: CONFIG_FEATURES_PATH
value: "/etc/features"
# https://github.com/fabric8io/kubernetes-client/issues/2212
- name: HTTP2_DISABLE
value: "true"
Expand Down Expand Up @@ -175,6 +180,9 @@ spec:
- name: config-tracing
configMap:
name: config-tracing
- name: config-features
configMap:
name: config-features
- name: channel-receiver-tls-secret
secret:
secretName: kafka-channel-ingress-server-tls
Expand Down
8 changes: 8 additions & 0 deletions data-plane/config/sink/500-receiver.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ spec:
- mountPath: /etc/tracing
name: config-tracing
readOnly: true
- mountPath: /etc/features
name: config-features
readOnly: true
- mountPath: /etc/receiver-tls-secret
name: sink-receiver-tls-secret
readOnly: true
Expand Down Expand Up @@ -120,6 +123,8 @@ spec:
value: "false"
- name: CONFIG_TRACING_PATH
value: "/etc/tracing"
- name: CONFIG_FEATURES_PATH
value: "/etc/features"
# https://github.com/fabric8io/kubernetes-client/issues/2212
- name: HTTP2_DISABLE
value: "true"
Expand Down Expand Up @@ -175,6 +180,9 @@ spec:
- name: config-tracing
configMap:
name: config-tracing
- name: config-features
configMap:
name: config-features
- name: sink-receiver-tls-secret
secret:
secretName: kafka-sink-ingress-server-tls
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Copyright © 2018 Knative Authors (knative-dev@googlegroups.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.knative.eventing.kafka.broker.core.features;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;

public class FeaturesConfig {

private final String DISABLED = "disabled";
private final String ENABLED = "enabled";

public static final String KEY_AUTHENTICATION_OIDC = "authentication-oidc";

private final Map<String, String> features;

public FeaturesConfig(String path) throws IOException {
features = new HashMap<>();
String[] keys = {
KEY_AUTHENTICATION_OIDC,
};

for (String key : keys) {
Path filePath = Paths.get(path, key);
if (Files.exists(filePath)) {
features.put(key, Files.readString(filePath));
}
}
}

public boolean isAuthenticationOIDC() {
return isEnabled(KEY_AUTHENTICATION_OIDC);
}

private boolean isEnabled(String key) {
return features.getOrDefault(key, DISABLED).toLowerCase().equals(ENABLED);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
*/
package dev.knative.eventing.kafka.broker.core.oidc;

import dev.knative.eventing.kafka.broker.core.features.FeaturesConfig;
import io.vertx.core.Future;
import io.vertx.core.Vertx;
import io.vertx.core.http.HttpServerRequest;
Expand Down Expand Up @@ -44,6 +45,13 @@ public Future<JwtClaims> verify(String token, String expectedAudience) {
promise -> {
// execute blocking, as jose .process() is blocking

if (oidcDiscoveryConfig == null) {
promise.fail(
"OIDC discovery config not initialized. This is most likely the case when the pod was started with an invalid OIDC config in place and then later the "
+ FeaturesConfig.KEY_AUTHENTICATION_OIDC
+ " flag was enabled. Restarting the pod should help.");
}

JwtConsumer jwtConsumer = new JwtConsumerBuilder()
.setVerificationKeyResolver(this.oidcDiscoveryConfig.getJwksVerificationKeyResolver())
.setExpectedAudience(expectedAudience)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ public class BaseEnv {
public static final String CONFIG_TRACING_PATH = "CONFIG_TRACING_PATH";
private final String configTracingPath;

public static final String CONFIG_FEATURES_PATH = "CONFIG_FEATURES_PATH";
private final String configFeaturesPath;

public static final String WAIT_STARTUP_SECONDS = "WAIT_STARTUP_SECONDS";
private final int waitStartupSeconds;

Expand All @@ -61,6 +64,7 @@ public BaseEnv(Function<String, String> envProvider) {
this.producerConfigFilePath = requireNonNull(envProvider.apply(PRODUCER_CONFIG_FILE_PATH));
this.dataPlaneConfigFilePath = requireNonNull(envProvider.apply(DATA_PLANE_CONFIG_FILE_PATH));
this.configTracingPath = requireNonNull(envProvider.apply(CONFIG_TRACING_PATH));
this.configFeaturesPath = envProvider.apply(CONFIG_FEATURES_PATH);
this.waitStartupSeconds = Integer.parseInt(envProvider.apply(WAIT_STARTUP_SECONDS));
}

Expand Down Expand Up @@ -100,6 +104,10 @@ public String getConfigTracingPath() {
return configTracingPath;
}

public String getConfigFeaturesPath() {
return configFeaturesPath;
}

public int getWaitStartupSeconds() {
return waitStartupSeconds;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import dev.knative.eventing.kafka.broker.core.ReactiveProducerFactory;
import dev.knative.eventing.kafka.broker.core.eventbus.ContractMessageCodec;
import dev.knative.eventing.kafka.broker.core.eventbus.ContractPublisher;
import dev.knative.eventing.kafka.broker.core.features.FeaturesConfig;
import dev.knative.eventing.kafka.broker.core.file.FileWatcher;
import dev.knative.eventing.kafka.broker.core.metrics.Metrics;
import dev.knative.eventing.kafka.broker.core.oidc.OIDCDiscoveryConfig;
Expand Down Expand Up @@ -68,6 +69,8 @@ public static void start(final String[] args, final ReactiveProducerFactory kafk
OpenTelemetrySdk openTelemetry =
TracingConfig.fromDir(env.getConfigTracingPath()).setup();

FeaturesConfig featuresConfig = new FeaturesConfig(env.getConfigFeaturesPath());

// Read producer properties and override some defaults
Properties producerConfigs = Configurations.readPropertiesSync(env.getProducerConfigFilePath());
producerConfigs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
Expand Down Expand Up @@ -100,10 +103,22 @@ public static void start(final String[] args, final ReactiveProducerFactory kafk
httpsServerOptions.setTracingPolicy(TracingPolicy.PROPAGATE);

// Setup OIDC discovery config
OIDCDiscoveryConfig oidcDiscoveryConfig = OIDCDiscoveryConfig.build(vertx)
.toCompletionStage()
.toCompletableFuture()
.get();
OIDCDiscoveryConfig oidcDiscoveryConfig = null;
try {
oidcDiscoveryConfig = OIDCDiscoveryConfig.build(vertx)
.toCompletionStage()
.toCompletableFuture()
.get();
} catch (ExecutionException ex) {
if (featuresConfig.isAuthenticationOIDC()) {
logger.error("Could not load OIDC config while OIDC authentication feature is enabled.");
throw ex;
} else {
logger.warn(
"Could not load OIDC configuration. This will lead to problems, when the {} flag will be enabled later",
FeaturesConfig.KEY_AUTHENTICATION_OIDC);
}
}

// Configure the verticle to deploy and the deployment options
final Supplier<Verticle> receiverVerticleFactory = new ReceiverVerticleFactory(
Expand Down

0 comments on commit b2ba6b4

Please sign in to comment.