diff --git a/_includes/recipes/filtering/ksql/markup/try_it.html b/_includes/recipes/filtering/ksql/markup/try_it.html index e76ec7573c..333bd816a7 100644 --- a/_includes/recipes/filtering/ksql/markup/try_it.html +++ b/_includes/recipes/filtering/ksql/markup/try_it.html @@ -3,7 +3,7 @@

1. Get Confluent Platform

If you haven't already, get Confluent Platform.


-
{% include shared-content/docker-install.txt %}
+
{% include shared-content/cp-docker-install.txt %}
diff --git a/_includes/recipes/filtering/kstreams/harness/recipe-steps/dev/console-producer.sh b/_includes/recipes/filtering/kstreams/harness/recipe-steps/dev/console-producer.sh index fada2475a9..fe0def929b 100755 --- a/_includes/recipes/filtering/kstreams/harness/recipe-steps/dev/console-producer.sh +++ b/_includes/recipes/filtering/kstreams/harness/recipe-steps/dev/console-producer.sh @@ -1 +1 @@ -docker exec -i schema-registry /usr/bin/kafka-avro-console-producer --topic user-events --broker-list broker:9092 --property value.schema="$(< src/main/avro/user.avsc)" +docker-compose exec -i schema-registry /usr/bin/kafka-avro-console-producer --topic user-events --broker-list broker:9092 --property value.schema="$(< src/main/avro/user.avsc)" diff --git a/_includes/recipes/filtering/kstreams/markup/try_it.html b/_includes/recipes/filtering/kstreams/markup/try_it.html index e3f3a8369f..94836284f1 100644 --- a/_includes/recipes/filtering/kstreams/markup/try_it.html +++ b/_includes/recipes/filtering/kstreams/markup/try_it.html @@ -1,17 +1,29 @@ -
-

1. Get Confluent Platform

-

If you haven't already, get Confluent Platform.

-
{% include shared-content/docker-install.txt %}
+ + +
+

0. Install Docker

+ {% include shared-content/docker-setup.html %}
-

2. Initialize the project

+

1. Initialize the project

To get started, make a new directory anywhere you'd like for this project:

mkdir filter-events && cd filter-events
-

Then create the following Gradle build file, named build.gradle for the project:

+

Create the following docker-compose.yaml file:

+
{% include_raw shared-content/cp-docker-compose.txt %}
+ +

Download those Docker images:

+
docker-composer pull
+ +
+ +
+

2. Set up the build

+ +

Then create the following Gradle build file, named build.gradle for the project:

{% include_raw recipes/filtering/kstreams/code/build.gradle %}

Next, create a directory for configuration data:

@@ -57,7 +69,7 @@

5. Compile and run the Kafka Streams program

6. Produce events to the input topic

-

In a new terminaL terminal, run:

+

In a new terminal, run:

{% include_raw recipes/filtering/kstreams/harness/recipe-steps/dev/console-producer.sh %}

When the console producer starts, it will log some messages and hang, waiting for your input. Type in one line at a time and press enter to send it. Each line represents an event. To send all of the events below, paste the following into the prompt and press enter:

diff --git a/_includes/shared-content/cp-docker-compose.txt b/_includes/shared-content/cp-docker-compose.txt new file mode 100644 index 0000000000..16786fe1e3 --- /dev/null +++ b/_includes/shared-content/cp-docker-compose.txt @@ -0,0 +1,100 @@ +version: "3.0" + +services: + + ksql-cli: + image: confluentinc/cp-ksql-cli:{{ site.cp_version }} + depends_on: + - ksql-server + + ksql-server: + image: confluentinc/cp-ksql-server:{{ site.cp_version }} + ports: + - 8088:8088 + depends_on: + - kafka1 + - zookeeper + - schema-registry + environment: + KSQL_BOOTSTRAP_SERVERS: kafka1:9092 + KSQL_LISTENERS: http://0.0.0.0:8088 + KSQL_KSQL_SERVICE_ID: kafka_workshop + KSQL_CUB_KAFKA_TIMEOUT: 300 + KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schema-registry:8081 + KSQL_KSQL_COMMIT_INTERVAL_MS: 2000 + KSQL_KSQL_CACHE_MAX_BYTES_BUFFERING: 10000000 + KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: earliest + + + zookeeper: + image: confluentinc/cp-zookeeper:{{ site.cp_version }} + restart: always + environment: + ZOOKEEPER_SERVER_ID: 1 + ZOOKEEPER_CLIENT_PORT: "2181" + ZOOKEEPER_TICK_TIME: "2000" + ZOOKEEPER_SERVERS: "zookeeper:22888:23888" + ports: + - "2181:2181" + + connect: + image: confluentinc/cp-kafka-connect:{{ site.cp_version }} + depends_on: + - zookeeper + - kafka1 + - schema-registry + ports: + - "8083:8083" + environment: + CONNECT_BOOTSTRAP_SERVERS: "kafka1:9092" + CONNECT_REST_PORT: 8083 + CONNECT_GROUP_ID: compose-connect-group + CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs + CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets + CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status + CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter + CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' + CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter + CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' + CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect" + CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO" + CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR" + CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" + CONNECT_PLUGIN_PATH: '/usr/share/java' + + kafka1: + image: confluentinc/cp-enterprise-kafka:{{ site.cp_version }} + depends_on: + - zookeeper + ports: + - "29092:29092" + environment: + KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:9092,PLAINTEXT_HOST://localhost:29092 + KAFKA_BROKER_ID: 1 + KAFKA_BROKER_RACK: "r1" + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_DELETE_TOPIC_ENABLE: "true" + KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" + KAFKA_SCHEMA_REGISTRY_URL: "schema-registry:8081" + KAFKA_JMX_PORT: 9991 + + + schema-registry: + image: confluentinc/cp-schema-registry:{{ site.cp_version }} + restart: always + depends_on: + - zookeeper + environment: + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: "zookeeper:2181" + SCHEMA_REGISTRY_HOST_NAME: schema-registry + SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:8081" + ports: + - 8081:8081 + diff --git a/_includes/shared-content/docker-install.txt b/_includes/shared-content/cp-docker-install.txt similarity index 100% rename from _includes/shared-content/docker-install.txt rename to _includes/shared-content/cp-docker-install.txt diff --git a/_includes/shared-content/docker-setup.html b/_includes/shared-content/docker-setup.html new file mode 100644 index 0000000000..bc300dd2a5 --- /dev/null +++ b/_includes/shared-content/docker-setup.html @@ -0,0 +1,5 @@ +

Recipes on Confluent Developer typically use Docker. Install a version:

+ diff --git a/assets/sass/main.scss b/assets/sass/main.scss index 7c01ee85e0..0ac50fa7f2 100644 --- a/assets/sass/main.scss +++ b/assets/sass/main.scss @@ -3,6 +3,11 @@ @import "bulma/bulma"; +ul li { + list-style-type: disc; + margin-left: 2em; +} + pre { background-color: transparent; padding: 0; diff --git a/recipes/filtering.html b/recipes/filtering.html index 8a84507302..0091d3dc89 100644 --- a/recipes/filtering.html +++ b/recipes/filtering.html @@ -3,7 +3,7 @@ permalink: /recipes/filter-a-stream-of-events recipe_title: Filter a stream of events -problem: you have events in a Kafka topic, and you want to filter some of them out so that only those you're interested in appear in another topic. +problem: you have events in a topic, but need to see a filtered view of those events ksql_try_it: recipes/filtering/ksql/markup/try_it.html ksql_test_it: recipes/filtering/ksql/markup/test_it.html @@ -13,5 +13,5 @@ kstreams_test_it: recipes/filtering/kstreams/markup/test_it.html kstreams_take_it_to_prod: recipes/filtering/kstreams/markup/take_it_to_prod.html -default_tab: ksql +default_tab: kstreams ---