Skip to content

Commit

Permalink
Configurable KafkaStreams cleanup execution
Browse files Browse the repository at this point in the history
542 few minor fixes

Docs, Javadocs
  • Loading branch information
pszymczyk authored and garyrussell committed Jan 29, 2018
1 parent 2526509 commit 63b2bcb
Show file tree
Hide file tree
Showing 4 changed files with 190 additions and 1 deletion.
@@ -0,0 +1,47 @@
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.springframework.kafka.core;

import org.apache.kafka.streams.KafkaStreams;

/**
* Specifies time of {@link KafkaStreams#cleanUp()} execution.
*
* @author Pawel Szymczyk
*/
public class CleanupConfig {

private final boolean onStart;
private final boolean onStop;

public CleanupConfig() {
this(false, true);
}

public CleanupConfig(boolean onStart, boolean onStop) {
this.onStart = onStart;
this.onStop = onStop;
}

public boolean cleanupOnStart() {
return this.onStart;
}

public boolean cleanupOnStop() {
return this.onStop;
}
}
Expand Up @@ -44,6 +44,7 @@ public class StreamsBuilderFactoryBean extends AbstractFactoryBean<StreamsBuilde
private static final int DEFAULT_CLOSE_TIMEOUT = 10;

private final StreamsConfig streamsConfig;
private final CleanupConfig cleanupConfig;

private KafkaStreams kafkaStreams;

Expand All @@ -61,14 +62,46 @@ public class StreamsBuilderFactoryBean extends AbstractFactoryBean<StreamsBuilde

private volatile boolean running;

/**
* Construct an instance with the supplied streams configuration.
* @param streamsConfig the streams configuration.
*/
public StreamsBuilderFactoryBean(StreamsConfig streamsConfig) {
this(streamsConfig, new CleanupConfig());
}

/**
* Construct an instance with the supplied streams configuration and
* clean up configuration.
* @param streamsConfig the streams configuration.
* @param cleanupConfig the cleanup configuration.
* @since 2.1.2.
*/
public StreamsBuilderFactoryBean(StreamsConfig streamsConfig, CleanupConfig cleanupConfig) {
Assert.notNull(streamsConfig, "'streamsConfig' must not be null");
this.streamsConfig = streamsConfig;
this.cleanupConfig = cleanupConfig;
}

/**
* Construct an instance with the supplied streams configuration.
* @param streamsConfig the streams configuration.
*/
public StreamsBuilderFactoryBean(Map<String, Object> streamsConfig) {
this(streamsConfig, new CleanupConfig());
}

/**
* Construct an instance with the supplied streams configuration and
* clean up configuration.
* @param streamsConfig the streams configuration.
* @param cleanupConfig the cleanup configuration.
* @since 2.1.2.
*/
public StreamsBuilderFactoryBean(Map<String, Object> streamsConfig, CleanupConfig cleanupConfig) {
Assert.notNull(streamsConfig, "'streamsConfig' must not be null");
this.streamsConfig = new StreamsConfig(streamsConfig);
this.cleanupConfig = cleanupConfig;
}

public void setClientSupplier(KafkaClientSupplier clientSupplier) {
Expand Down Expand Up @@ -133,6 +166,9 @@ public synchronized void start() {
this.kafkaStreams = new KafkaStreams(getObject().build(), this.streamsConfig, this.clientSupplier);
this.kafkaStreams.setStateListener(this.stateListener);
this.kafkaStreams.setUncaughtExceptionHandler(this.exceptionHandler);
if (this.cleanupConfig.cleanupOnStart()) {
this.kafkaStreams.cleanUp();
}
this.kafkaStreams.start();
this.running = true;
}
Expand All @@ -148,7 +184,9 @@ public synchronized void stop() {
try {
if (this.kafkaStreams != null) {
this.kafkaStreams.close(this.closeTimeout, TimeUnit.SECONDS);
this.kafkaStreams.cleanUp();
if (this.cleanupConfig.cleanupOnStop()) {
this.kafkaStreams.cleanUp();
}
this.kafkaStreams = null;
}
}
Expand Down
@@ -0,0 +1,101 @@
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.springframework.kafka.core;

import static org.assertj.core.api.Assertions.assertThat;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;

import org.apache.kafka.streams.StreamsConfig;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;

/**
* @author Pawel Szymczyk
*/
@RunWith(SpringRunner.class)
@DirtiesContext
@EmbeddedKafka
public class StreamsBuilderFactoryBeanTest {

private static final String APPLICATION_ID = "testCleanupStreams";

private static Path stateStoreDir;

@BeforeClass
public static void setup() throws IOException {
stateStoreDir = Files.createTempDirectory("test-state-dir");
}

@Autowired
private StreamsBuilderFactoryBean streamsBuilderFactoryBean;

@Test
public void testCleanupStreams() throws IOException {
Path stateStore = Files.createDirectory(Paths.get(stateStoreDir.toString(), APPLICATION_ID, "0_0"));
assertThat(stateStore).exists();
streamsBuilderFactoryBean.stop();
assertThat(stateStore).doesNotExist();

stateStore = Files.createDirectory(Paths.get(stateStoreDir.toString(), APPLICATION_ID, "0_0"));
assertThat(stateStore).exists();
streamsBuilderFactoryBean.start();
assertThat(stateStore).doesNotExist();
}

@Configuration
@EnableKafka
@EnableKafkaStreams
public static class KafkaStreamsConfiguration {

@Value("${" + KafkaEmbedded.SPRING_EMBEDDED_KAFKA_BROKERS + "}")
private String brokerAddresses;

@Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_BUILDER_BEAN_NAME)
public StreamsBuilderFactoryBean defaultKafkaStreamsBuilder() throws IOException {
return new StreamsBuilderFactoryBean(kStreamsConfigs(), new CleanupConfig(true, true));
}

@Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public StreamsConfig kStreamsConfigs() throws IOException {
Map<String, Object> props = new HashMap<>();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID);
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddresses);
props.put(StreamsConfig.STATE_DIR_CONFIG, stateStoreDir.toString());
return new StreamsConfig(props);
}
}

}
3 changes: 3 additions & 0 deletions src/reference/asciidoc/streams.adoc
Expand Up @@ -119,6 +119,9 @@ Only you need is to declare `StreamsConfig` bean with the `defaultKafkaStreamsCo
A `StreamsBuilder` bean with the `defaultKafkaStreamsBuilder` name will be declare in the application context automatically.
Any additional `StreamsBuilderFactoryBean` beans can be declared and used as well.

By default, when the factory bean is stopped, the `KafkaStreams.cleanUp()` method is called.
Starting with _version 2.1.2_, the factory bean has additional constructors, taking a `CleanupConfig` object that has properties to allow you to control whether the `cleanUp()` method is called during `start()`, `stop()`, or neither.

==== Kafka Streams Example

Putting it all together:
Expand Down

0 comments on commit 63b2bcb

Please sign in to comment.