Skip to content

Commit

Permalink
[FLINK-8193][quickstart] Cleanup quickstart poms
Browse files Browse the repository at this point in the history
This closes #5118.
  • Loading branch information
zentol committed Dec 6, 2017
1 parent 612f07b commit 9ba9e80
Show file tree
Hide file tree
Showing 2 changed files with 112 additions and 283 deletions.
Expand Up @@ -59,38 +59,37 @@ under the License.
a) Adding new dependencies:
You can add dependencies to the list below.
Please check if the maven-shade-plugin below is filtering out your dependency
and remove the exclude from there.
b) Build a jar for running on the cluster:
There are two options for creating a jar from this project
b.1) "mvn clean package" -> this will create a fat jar which contains all
dependencies necessary for running the jar created by this pom in a cluster.
The "maven-shade-plugin" excludes everything that is provided on a running Flink cluster.
b.2) "mvn clean package -Pbuild-jar" -> This will also create a fat-jar, but with much
nicer dependency exclusion handling. This approach is preferred and leads to
much cleaner jar files.
"mvn clean package -Pbuild-jar"
This will create a fat-jar which contains all dependencies necessary for running the created jar in a cluster.
-->

<dependencies>
<!-- Apache Flink dependencies -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<!-- This dependency is required to actually execute jobs. It is currently pulled in by
flink-streaming-java, but we explicitly depend on it to safeguard against future changes. -->
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<!-- explicitly add a standard logging framework, as Flink does not have
a hard dependency on one specific framework by default -->
Expand Down Expand Up @@ -118,13 +117,13 @@ under the License.
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
Expand All @@ -134,6 +133,12 @@ under the License.
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
Expand All @@ -150,21 +155,49 @@ under the License.

<build>
<plugins>
<!-- disable the exclusion rules -->
<!-- We use the maven-shade plugin to create a fat jar that contains all dependencies
except flink and its transitive dependencies. The resulting fat-jar can be executed
on a cluster. Change the value of Program-Class if your program entry point changes. -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.1</version>
<executions>
<!-- Run shade goal on package phase -->
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes combine.self="override"></excludes>
<excludes>
<exclude>org.apache.flink:force-shading</exclude>
<exclude>com.google.code.findbgs:jsr305</exclude>
<exclude>org.slf4j:slf4j-api</exclude>
</excludes>
</artifactSet>
<filters>
<filter>
<!-- Do not copy the signatures in the META-INF folder.
Otherwise, this might cause SecurityExceptions when using the JAR. -->
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<!-- If you want to use ./bin/flink run <quickstart jar> uncomment the following lines.
This will add a Main-Class entry to the manifest file -->
<!--
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>${package}.StreamingJob</mainClass>
</transformer>
</transformers>
-->
</configuration>
</execution>
</executions>
Expand All @@ -176,130 +209,6 @@ under the License.

<build>
<plugins>
<!-- We use the maven-shade plugin to create a fat jar that contains all dependencies
except flink and its transitive dependencies. The resulting fat-jar can be executed
on a cluster. Change the value of Program-Class if your program entry point changes. -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.1</version>
<executions>
<!-- Run shade goal on package phase -->
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes>
<!-- This list contains all dependencies of flink-dist
Everything else will be packaged into the fat-jar
-->
<exclude>org.apache.flink:flink-annotations</exclude>
<exclude>org.apache.flink:flink-shaded-hadoop2</exclude>
<exclude>org.apache.flink:flink-shaded-curator</exclude>
<exclude>org.apache.flink:flink-core</exclude>
<exclude>org.apache.flink:flink-java</exclude>
<exclude>org.apache.flink:flink-scala_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-runtime_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-optimizer_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-clients_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-avro</exclude>
<exclude>org.apache.flink:flink-examples-batch_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-examples-streaming_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-streaming-java_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-streaming-scala_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-scala-shell_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-python</exclude>
<exclude>org.apache.flink:flink-metrics-core</exclude>
<exclude>org.apache.flink:flink-metrics-jmx</exclude>
<exclude>org.apache.flink:flink-statebackend-rocksdb_${scala.binary.version}</exclude>
<exclude>org.apache.flink:flink-shaded-jackson</exclude>

<!-- Also exclude very big transitive dependencies of Flink
WARNING: You have to remove these excludes if your code relies on other
versions of these dependencies.
-->

<exclude>log4j:log4j</exclude>
<exclude>org.scala-lang:scala-library</exclude>
<exclude>org.scala-lang:scala-compiler</exclude>
<exclude>org.scala-lang:scala-reflect</exclude>
<exclude>com.typesafe.akka:akka-actor_*</exclude>
<exclude>com.typesafe.akka:akka-remote_*</exclude>
<exclude>com.typesafe.akka:akka-slf4j_*</exclude>
<exclude>io.netty:netty-all</exclude>
<exclude>io.netty:netty</exclude>
<exclude>commons-fileupload:commons-fileupload</exclude>
<exclude>org.apache.avro:avro</exclude>
<exclude>commons-collections:commons-collections</exclude>
<exclude>org.codehaus.jackson:jackson-core-asl</exclude>
<exclude>org.codehaus.jackson:jackson-mapper-asl</exclude>
<exclude>com.thoughtworks.paranamer:paranamer</exclude>
<exclude>org.xerial.snappy:snappy-java</exclude>
<exclude>org.apache.commons:commons-compress</exclude>
<exclude>org.tukaani:xz</exclude>
<exclude>com.esotericsoftware.kryo:kryo</exclude>
<exclude>com.esotericsoftware.minlog:minlog</exclude>
<exclude>org.objenesis:objenesis</exclude>
<exclude>com.twitter:chill_*</exclude>
<exclude>com.twitter:chill-java</exclude>
<exclude>commons-lang:commons-lang</exclude>
<exclude>junit:junit</exclude>
<exclude>org.apache.commons:commons-lang3</exclude>
<exclude>org.slf4j:slf4j-api</exclude>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>log4j:log4j</exclude>
<exclude>org.apache.commons:commons-math</exclude>
<exclude>org.apache.sling:org.apache.sling.commons.json</exclude>
<exclude>commons-logging:commons-logging</exclude>
<exclude>commons-codec:commons-codec</exclude>
<exclude>stax:stax-api</exclude>
<exclude>com.typesafe:config</exclude>
<exclude>org.uncommons.maths:uncommons-maths</exclude>
<exclude>com.github.scopt:scopt_*</exclude>
<exclude>commons-io:commons-io</exclude>
<exclude>commons-cli:commons-cli</exclude>
</excludes>
</artifactSet>
<filters>
<filter>
<artifact>org.apache.flink:*</artifact>
<excludes>
<!-- exclude shaded google but include shaded curator -->
<exclude>org/apache/flink/shaded/com/**</exclude>
<exclude>web-docs/**</exclude>
</excludes>
</filter>
<filter>
<!-- Do not copy the signatures in the META-INF folder.
Otherwise, this might cause SecurityExceptions when using the JAR. -->
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<!-- If you want to use ./bin/flink run <quickstart jar> uncomment the following lines.
This will add a Main-Class entry to the manifest file -->
<!--
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>${package}.StreamingJob</mainClass>
</transformer>
</transformers>
-->
<createDependencyReducedPom>false</createDependencyReducedPom>
</configuration>
</execution>
</executions>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
Expand Down

0 comments on commit 9ba9e80

Please sign in to comment.