Skip to content
Permalink
Browse files
[FLINK-27509] update table-walkthrough playground for Flink 1.14
  • Loading branch information
shba24 committed May 23, 2022
1 parent de4d2d7 commit e891d49dc52e1019fe606b87d41573bdcd8a52df
Showing 6 changed files with 12 additions and 17 deletions.
@@ -13,7 +13,7 @@ Currently, the following playgrounds are available:
Flink job. The playground is presented in detail in
["Flink Operations Playground"](https://ci.apache.org/projects/flink/flink-docs-release-1.14/docs/try-flink/flink-operations-playground), which is part of the _Try Flink_ section of the Flink documentation.

* The **Table Walkthrough** (in the `table-walkthrough` folder) shows to use the Table API to build an analytics pipeline that reads streaming data from Kafka and writes results to MySQL, along with a real-time dashboard in Grafana. The walkthrough is presented in detail in ["Real Time Reporting with the Table API"](https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/try-flink/table_api), which is part of the _Try Flink_ section of the Flink documentation.
* The **Table Walkthrough** (in the `table-walkthrough` folder) shows to use the Table API to build an analytics pipeline that reads streaming data from Kafka and writes results to MySQL, along with a real-time dashboard in Grafana. The walkthrough is presented in detail in ["Real Time Reporting with the Table API"](https://ci.apache.org/projects/flink/flink-docs-release-1.14/docs/try-flink/table_api), which is part of the _Try Flink_ section of the Flink documentation.

* The **PyFlink Walkthrough** (in the `pyflink-walkthrough` folder) provides a complete example that uses the Python API, and guides you through the steps needed to run and manage Pyflink Jobs. The pipeline used in this walkthrough reads data from Kafka, performs aggregations, and writes results to Elasticsearch that are visualized with Kibana. This walkthrough is presented in detail in the [pyflink-walkthrough README](pyflink-walkthrough).

@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

FROM maven:3.6-jdk-8-slim AS builder
FROM maven:3.8-jdk-8-slim AS builder

# Get data producer code and compile it
COPY ./src /opt/data-producer/src
@@ -16,18 +16,18 @@
# limitations under the License.
################################################################################

FROM maven:3.6-jdk-8-slim AS builder
FROM maven:3.8-jdk-8-slim AS builder

COPY ./pom.xml /opt/pom.xml
COPY ./src /opt/src
RUN cd /opt; mvn clean install -Dmaven.test.skip

FROM apache/flink:1.13.1-scala_2.12-java8
FROM apache/flink:1.14.4-scala_2.12-java8

# Download connector libraries
RUN wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-kafka_2.12/1.13.1/flink-sql-connector-kafka_2.12-1.13.1.jar; \
wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-connector-jdbc_2.12/1.13.1/flink-connector-jdbc_2.12-1.13.1.jar; \
wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-csv/1.13.1/flink-csv-1.13.1.jar; \
RUN wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-kafka_2.12/1.14.4/flink-sql-connector-kafka_2.12-1.14.4.jar; \
wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-connector-jdbc_2.12/1.14.4/flink-connector-jdbc_2.12-1.14.4.jar; \
wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/org/apache/flink/flink-csv/1.14.4/flink-csv-1.14.4.jar; \
wget -P /opt/flink/lib/ https://repo.maven.apache.org/maven2/mysql/mysql-connector-java/8.0.19/mysql-connector-java-8.0.19.jar;

COPY --from=builder /opt/target/spend-report-*.jar /opt/flink/usrlib/spend-report.jar
@@ -19,7 +19,7 @@
version: '2.1'
services:
jobmanager:
image: apache/flink-table-walkthrough:1-FLINK-1.13-scala_2.12
image: apache/flink-table-walkthrough:1-FLINK-1.14-scala_2.12
build: .
hostname: "jobmanager"
expose:
@@ -33,7 +33,7 @@ services:
- kafka
- mysql
taskmanager:
image: apache/flink-table-walkthrough:1-FLINK-1.13-scala_2.12
image: apache/flink-table-walkthrough:1-FLINK-1.14-scala_2.12
build: .
expose:
- "6121"
@@ -50,7 +50,7 @@ services:
ports:
- "2181:2181"
kafka:
image: wurstmeister/kafka:2.12-2.2.1
image: wurstmeister/kafka:2.13-2.8.1
ports:
- "9092:9092"
depends_on:
@@ -30,7 +30,7 @@ under the License.

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<flink.version>1.13.1</flink.version>
<flink.version>1.14.4</flink.version>
<java.version>1.8</java.version>
<scala.binary.version>2.12</scala.binary.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
@@ -72,12 +72,6 @@ under the License.
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
@@ -45,6 +45,7 @@ public static void main(String[] args) throws Exception {
" 'connector' = 'kafka',\n" +
" 'topic' = 'transactions',\n" +
" 'properties.bootstrap.servers' = 'kafka:9092',\n" +
" 'scan.startup.mode' = 'earliest-offset',\n" +
" 'format' = 'csv'\n" +
")");

0 comments on commit e891d49

Please sign in to comment.