Skip to content

Commit

Permalink
DGS-9450 Add command line tool to register/auto-rotate DEKs
Browse files Browse the repository at this point in the history
  • Loading branch information
rayokota committed Dec 13, 2023
1 parent 41667c8 commit 0b44598
Show file tree
Hide file tree
Showing 6 changed files with 186 additions and 8 deletions.
28 changes: 28 additions & 0 deletions bin/register-deks
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/bin/bash
#
# Copyright 2023 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

base_dir=$(dirname $0)/..

# Production jars
export CLASSPATH=$CLASSPATH:$base_dir/share/java/kafka-serde-tools/*

# Development jars. `mvn package` should collect all the required dependency jars here
for dir in $base_dir/package-kafka-serde-tools/target/kafka-serde-tools-package-*-development; do
export CLASSPATH=$CLASSPATH:$dir/share/java/kafka-serde-tools/*
done

exec $(dirname $0)/schema-registry-run-class io.confluent.kafka.schemaregistry.encryption.tools.RegisterDeks "$@"
1 change: 1 addition & 0 deletions client-console-scripts/src/assembly/resources.xml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
<include>kafka-avro-console-*</include>
<include>kafka-json-schema-console-*</include>
<include>kafka-protobuf-console-*</include>
<include>register-deks</include>
<include>schema-registry-run-class</include>
</includes>
</fileSet>
Expand Down
4 changes: 4 additions & 0 deletions client-encryption/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@
<groupId>com.google.crypto.tink</groupId>
<artifactId>tink</artifactId>
</dependency>
<dependency>
<groupId>info.picocli</groupId>
<artifactId>picocli</artifactId>
</dependency>

<dependency>
<groupId>io.confluent</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ public String type() {
}

@Override
public FieldTransform newTransform(RuleContext ctx) throws RuleException {
FieldTransform transform = new FieldEncryptionExecutorTransform();
public FieldEncryptionExecutorTransform newTransform(RuleContext ctx) throws RuleException {
FieldEncryptionExecutorTransform transform = new FieldEncryptionExecutorTransform();
transform.init(ctx);
return transform;
}
Expand Down Expand Up @@ -241,7 +241,7 @@ public void close() throws RuleException {
}
}

class FieldEncryptionExecutorTransform implements FieldTransform {
public class FieldEncryptionExecutorTransform implements FieldTransform {
private Cryptor cryptor;
private String kekName;
private KekInfo kek;
Expand All @@ -250,7 +250,7 @@ class FieldEncryptionExecutorTransform implements FieldTransform {
public void init(RuleContext ctx) throws RuleException {
cryptor = getCryptor(ctx);
kekName = getKekName(ctx);
kek = getKek(ctx, kekName);
kek = getOrCreateKek(ctx);
dekExpiryDays = getDekExpiryDays(ctx);
}

Expand Down Expand Up @@ -280,7 +280,7 @@ protected String getKekName(RuleContext ctx) throws RuleException {
return name;
}

protected KekInfo getKek(RuleContext ctx, String kekName) throws RuleException {
protected KekInfo getOrCreateKek(RuleContext ctx) throws RuleException {
boolean isRead = ctx.ruleMode() == RuleMode.READ;
KekId kekId = new KekId(kekName, isRead);

Expand Down Expand Up @@ -370,7 +370,7 @@ private KekInfo storeKekToRegistry(RuleContext ctx, KekId key, KekInfo kekInfo)
}
}

protected DekInfo getDek(RuleContext ctx, String kekName, KekInfo kek, Integer version)
public DekInfo getOrCreateDek(RuleContext ctx, Integer version)
throws RuleException, GeneralSecurityException {
boolean isRead = ctx.ruleMode() == RuleMode.READ;
DekId dekId = new DekId(kekName, ctx.subject(), version, cryptor.getDekFormat(), isRead);
Expand Down Expand Up @@ -506,7 +506,7 @@ public Object transform(RuleContext ctx, FieldContext fieldCtx, Object fieldValu
throw new RuleException(
"Type '" + fieldCtx.getType() + "' not supported for encryption");
}
dek = getDek(ctx, kekName, kek, isDekRotated() ? LATEST_VERSION : null);
dek = getOrCreateDek(ctx, isDekRotated() ? LATEST_VERSION : null);
ciphertext = cryptor.encrypt(dek.getRawDek(), plaintext, EMPTY_AAD);
if (isDekRotated()) {
ciphertext = prefixVersion(dek.getVersion(), ciphertext);
Expand All @@ -529,7 +529,7 @@ public Object transform(RuleContext ctx, FieldContext fieldCtx, Object fieldValu
version = kv.getKey();
ciphertext = kv.getValue();
}
dek = getDek(ctx, kekName, kek, version);
dek = getOrCreateDek(ctx, version);
plaintext = cryptor.decrypt(dek.getRawDek(), ciphertext, EMPTY_AAD);
return toObject(fieldCtx.getType(), plaintext);
default:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
/*
* Copyright 2023 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package io.confluent.kafka.schemaregistry.encryption.tools;

import com.google.common.collect.ImmutableList;
import io.confluent.kafka.schemaregistry.ParsedSchema;
import io.confluent.kafka.schemaregistry.SchemaProvider;
import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientFactory;
import io.confluent.kafka.schemaregistry.client.rest.entities.Rule;
import io.confluent.kafka.schemaregistry.client.rest.entities.RuleMode;
import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
import io.confluent.kafka.schemaregistry.encryption.FieldEncryptionExecutor;
import io.confluent.kafka.schemaregistry.encryption.FieldEncryptionExecutor.FieldEncryptionExecutorTransform;
import io.confluent.kafka.schemaregistry.rules.RuleContext;
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
import picocli.CommandLine.Parameters;

@Command(name = "register-deks", mixinStandardHelpOptions = true,
description = "Register and/or auto-rotate DEKs according to a specified data contract.",
sortOptions = false, sortSynopsis = false)
public class RegisterDeks implements Callable<Integer> {

private static final Logger LOG = LoggerFactory.getLogger(RegisterDeks.class);

@Parameters(index = "0")
private String baseUrl;
@Parameters(index = "1")
private String subject;
@Parameters(index = "2", arity = "0..1", defaultValue = "-1")
private int version;
@Option(names = {"-p", "--property"})
private Map<String, String> configs;

public RegisterDeks() {
}

@Override
public Integer call() throws Exception {
Map<String, String> configs = this.configs != null
? new HashMap<>(this.configs)
: new HashMap<>();
configs.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, baseUrl);

try (SchemaRegistryClient client = SchemaRegistryClientFactory.newClient(
Collections.singletonList(baseUrl),
1000,
ImmutableList.of(new AvroSchemaProvider()),
configs,
Collections.emptyMap()
)) {
SchemaMetadata schemaMetadata = client.getSchemaMetadata(subject, version);
Optional<ParsedSchema> schema = parseSchema(schemaMetadata);
if (!schema.isPresent()) {
LOG.error("No schema found");
return 1;
}
ParsedSchema parsedSchema = schema.get();
if (parsedSchema.ruleSet() == null || parsedSchema.ruleSet().getDomainRules() == null) {
LOG.info("No rules found");
return 0;
}
try (FieldEncryptionExecutor executor = new FieldEncryptionExecutor()) {
executor.configure(configs);
List<Rule> rules = parsedSchema.ruleSet().getDomainRules();
for (int i = 0; i < rules.size(); i++) {
Rule rule = rules.get(i);
if (rule.isDisabled() || !FieldEncryptionExecutor.TYPE.equals(rule.getType())) {
continue;
}
RuleContext ctx = new RuleContext(Collections.emptyMap(), null, parsedSchema,
subject, null, null, null, null, false, RuleMode.WRITE, rule, i, rules);
FieldEncryptionExecutorTransform transform = executor.newTransform(ctx);
transform.getOrCreateDek(ctx, transform.isDekRotated() ? -1 : null);
}
}
return 0;
}
}

private Optional<ParsedSchema> parseSchema(SchemaMetadata schemaMetadata) throws Exception {
SchemaProvider provider;
switch (schemaMetadata.getSchemaType()) {
case "AVRO":
provider = new AvroSchemaProvider();
break;
case "JSON":
provider = (SchemaProvider)
Class.forName("io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider")
.getDeclaredConstructor()
.newInstance();
break;
case "PROTOBUF":
provider = (SchemaProvider)
Class.forName("io.confluent.kafka.schemaregistry.json.JsonSchemaProvider")
.getDeclaredConstructor()
.newInstance();
break;
default:
throw new IllegalArgumentException("Unsupported schema type "
+ schemaMetadata.getSchemaType());
}
return provider.parseSchema(new Schema(null, schemaMetadata), false, false);
}

public static void main(String[] args) {
CommandLine commandLine = new CommandLine(new RegisterDeks());
commandLine.setUsageHelpLongOptionsMaxWidth(30);
int exitCode = commandLine.execute(args);
System.exit(exitCode);
}
}

5 changes: 5 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,11 @@
<artifactId>JSONata4Java</artifactId>
<version>${jsonata-version}</version>
</dependency>
<dependency>
<groupId>info.picocli</groupId>
<artifactId>picocli</artifactId>
<version>4.7.5</version>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
Expand Down

0 comments on commit 0b44598

Please sign in to comment.