Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion _includes/recipes/filtering/ksql/markup/try_it.html
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ <h4 class="subtitle is-4">1. Get Confluent Platform</h4>
<p>If you haven't already, get Confluent Platform.</p>
<br/>

<pre><code class="shell">{% include shared-content/docker-install.txt %}</code></pre>
<pre><code class="shell">{% include shared-content/cp-docker-install.txt %}</code></pre>
</div>

<div class="recipe-try-it-step">
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
docker exec -i schema-registry /usr/bin/kafka-avro-console-producer --topic user-events --broker-list broker:9092 --property value.schema="$(< src/main/avro/user.avsc)"
docker-compose exec -i schema-registry /usr/bin/kafka-avro-console-producer --topic user-events --broker-list broker:9092 --property value.schema="$(< src/main/avro/user.avsc)"
26 changes: 19 additions & 7 deletions _includes/recipes/filtering/kstreams/markup/try_it.html
Original file line number Diff line number Diff line change
@@ -1,17 +1,29 @@
<div class="recipe-try-it-step">
<h4 class="subtitle is-4">1. Get Confluent Platform</h4>

<p>If you haven't already, get Confluent Platform.</p>
<pre class="snippet"><code class="shell">{% include shared-content/docker-install.txt %}</code></pre>


<div class="recipe-try-it-step">
<h4 class="subtitle is-4">0. Install Docker</h4>
{% include shared-content/docker-setup.html %}
</div>

<div class="recipe-try-it-step">
<h4 class="subtitle is-4">2. Initialize the project</h4>
<h4 class="subtitle is-4">1. Initialize the project</h4>

<p>To get started, make a new directory anywhere you'd like for this project:</p>
<pre class="snippet"><code class="shell">mkdir filter-events && cd filter-events</code></pre>

<p>Then create the following Gradle build file, named <code>build.gradle</code> for the project:</p>
<p>Create the following <code>docker-compose.yaml</code> file:</p>
<pre class="snippet"><code class="yaml">{% include_raw shared-content/cp-docker-compose.txt %}</code></pre>

<p>Download those Docker images:</p>
<pre class="snippet"><code class="shell">docker-composer pull</code></pre>

</div>

<div class="recipe-try-it-step">
<h4 class="subtitle is-4">2. Set up the build</h4>

<p>Then create the following <a href="https://gradle.org/">Gradle</a> build file, named <code>build.gradle</code> for the project:</p>
<pre class="snippet"><code class="groovy">{% include_raw recipes/filtering/kstreams/code/build.gradle %}</code></pre>

<p>Next, create a directory for configuration data:</p>
Expand Down Expand Up @@ -57,7 +69,7 @@ <h4 class="subtitle is-4">5. Compile and run the Kafka Streams program</h4>
<div class="recipe-try-it-step">
<h4 class="subtitle is-4">6. Produce events to the input topic</h4>

<p>In a new terminaL terminal, run:</p>
<p>In a new terminal, run:</p>
<pre class="snippet"><code class="shell">{% include_raw recipes/filtering/kstreams/harness/recipe-steps/dev/console-producer.sh %}</code></pre>

<p>When the console producer starts, it will log some messages and hang, waiting for your input. Type in one line at a time and press enter to send it. Each line represents an event. To send all of the events below, paste the following into the prompt and press enter:</p>
Expand Down
100 changes: 100 additions & 0 deletions _includes/shared-content/cp-docker-compose.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
version: "3.0"

services:

ksql-cli:
image: confluentinc/cp-ksql-cli:{{ site.cp_version }}
depends_on:
- ksql-server

ksql-server:
image: confluentinc/cp-ksql-server:{{ site.cp_version }}
ports:
- 8088:8088
depends_on:
- kafka1
- zookeeper
- schema-registry
environment:
KSQL_BOOTSTRAP_SERVERS: kafka1:9092
KSQL_LISTENERS: http://0.0.0.0:8088
KSQL_KSQL_SERVICE_ID: kafka_workshop
KSQL_CUB_KAFKA_TIMEOUT: 300
KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KSQL_KSQL_COMMIT_INTERVAL_MS: 2000
KSQL_KSQL_CACHE_MAX_BYTES_BUFFERING: 10000000
KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: earliest


zookeeper:
image: confluentinc/cp-zookeeper:{{ site.cp_version }}
restart: always
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: "2181"
ZOOKEEPER_TICK_TIME: "2000"
ZOOKEEPER_SERVERS: "zookeeper:22888:23888"
ports:
- "2181:2181"

connect:
image: confluentinc/cp-kafka-connect:{{ site.cp_version }}
depends_on:
- zookeeper
- kafka1
- schema-registry
ports:
- "8083:8083"
environment:
CONNECT_BOOTSTRAP_SERVERS: "kafka1:9092"
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect"
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_PLUGIN_PATH: '/usr/share/java'

kafka1:
image: confluentinc/cp-enterprise-kafka:{{ site.cp_version }}
depends_on:
- zookeeper
ports:
- "29092:29092"
environment:
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:9092,PLAINTEXT_HOST://localhost:29092
KAFKA_BROKER_ID: 1
KAFKA_BROKER_RACK: "r1"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_DELETE_TOPIC_ENABLE: "true"
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
KAFKA_SCHEMA_REGISTRY_URL: "schema-registry:8081"
KAFKA_JMX_PORT: 9991


schema-registry:
image: confluentinc/cp-schema-registry:{{ site.cp_version }}
restart: always
depends_on:
- zookeeper
environment:
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: "zookeeper:2181"
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:8081"
ports:
- 8081:8081

5 changes: 5 additions & 0 deletions _includes/shared-content/docker-setup.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
<p>Recipes on Confluent Developer typically use Docker. Install a version:</p>
<ul>
<li><a href="https://hub.docker.com/editions/community/docker-ce-desktop-windows">Docker for Windows</a></li>
<li><a href="https://hub.docker.com/editions/community/docker-ce-desktop-mac">Docker for Mac</a></li>
</ul>
5 changes: 5 additions & 0 deletions assets/sass/main.scss
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@

@import "bulma/bulma";

ul li {
list-style-type: disc;
margin-left: 2em;
}

pre {
background-color: transparent;
padding: 0;
Expand Down
4 changes: 2 additions & 2 deletions recipes/filtering.html
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
permalink: /recipes/filter-a-stream-of-events
recipe_title: Filter a stream of events

problem: you have events in a Kafka topic, and you want to filter some of them out so that only those you're interested in appear in another topic.
problem: you have events in a topic, but need to see a filtered view of those events

ksql_try_it: recipes/filtering/ksql/markup/try_it.html
ksql_test_it: recipes/filtering/ksql/markup/test_it.html
Expand All @@ -13,5 +13,5 @@
kstreams_test_it: recipes/filtering/kstreams/markup/test_it.html
kstreams_take_it_to_prod: recipes/filtering/kstreams/markup/take_it_to_prod.html

default_tab: ksql
default_tab: kstreams
---