Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump Nussknacker to 1.15.0 #53

Merged
merged 4 commits into from
Jun 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 40 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,16 @@ val silencerV_2_12 = "1.6.0"
val silencerV = "1.7.0"

val flink114V = "1.14.5"
val currentFlinkV = "1.16.0"
val flink116V = "1.16.0"
val currentFlinkV = "1.18.1"
val sttpV = "3.8.11"
val kafkaV = "3.3.1"
val testContainersScalaV = "0.41.0"

ThisBuild / version := "0.1-SNAPSHOT"

val defaultNussknackerV = "1.14.0"
// todo: for now we should regularly bump the version until we start publish single "latest" -SNAPSHOT version
val defaultNussknackerV = "1.15.0"

val nussknackerV = {
val v = sys.env
Expand Down Expand Up @@ -145,6 +147,38 @@ lazy val flink114ManagerCompat = (project in file("flink114/manager"))
)
.dependsOn(commonTest % "test,it")

lazy val flink116ModelCompat = (project in file("flink116/model"))
.settings(commonSettings(scala212V))
.settings(
name := "flink116-model",
libraryDependencies ++= deps(flink116V),
dependencyOverrides ++= Seq(
"org.apache.kafka" % "kafka-clients" % kafkaV,
"org.apache.kafka" %% "kafka" % kafkaV,
),
)
.dependsOn(commonTest % "test,it")

lazy val flink116ManagerCompat = (project in file("flink116/manager"))
.settings(commonSettings(scala212V))
.configs(IntegrationTest)
.settings(Defaults.itSettings)
.settings(
name := "flink116-manager",
libraryDependencies ++= managerDeps(flink116V),
dependencyOverrides ++= Seq(
//For some strange reason, docker client libraries have conflict with schema registry client :/
"org.glassfish.jersey.core" % "jersey-common" % "2.22.2",
// must be the same as used by flink - otherwise it is evicted by version from deployment-manager-api
"com.typesafe.akka" %% "akka-actor" % "2.6.20",
"org.scala-lang.modules" %% "scala-java8-compat" % "1.0.2"
),
IntegrationTest / Keys.test := (IntegrationTest / Keys.test)
.dependsOn(flink116ModelCompat / Compile / assembly)
.value,
)
.dependsOn(commonTest % "test,it")

def flinkExclusionsForBefore1_15 = Seq(
"org.apache.flink" % "flink-streaming-java",
"org.apache.flink" % "flink-statebackend-rocksdb",
Expand Down Expand Up @@ -175,7 +209,7 @@ def managerDeps(version: String) = Seq(
ExclusionRule("org.apache.flink", "flink-scala_2.12"),
),
"pl.touk.nussknacker" %% "nussknacker-http-utils" % nussknackerV % "provided,it,test",
"pl.touk.nussknacker" %% "nussknacker-interpreter" % nussknackerV % "provided,it,test",
"pl.touk.nussknacker" %% "nussknacker-scenario-compiler" % nussknackerV % "provided,it,test",
"pl.touk.nussknacker" %% "nussknacker-deployment-manager-api" % nussknackerV % "provided",
"org.apache.flink" %% "flink-streaming-scala" % version excludeAll (
ExclusionRule("log4j", "log4j"),
Expand All @@ -187,9 +221,10 @@ def managerDeps(version: String) = Seq(

def deps(version: String) = Seq(
"org.apache.flink" %% "flink-streaming-scala" % version % "provided",
"org.apache.flink" %% "flink-statebackend-rocksdb" % version % "provided",
"org.apache.flink" % "flink-statebackend-rocksdb" % version % "provided",
"pl.touk.nussknacker" %% "nussknacker-default-model" % nussknackerV,
"pl.touk.nussknacker" %% "nussknacker-flink-base-components" % nussknackerV,
"pl.touk.nussknacker" %% "nussknacker-flink-base-unbounded-components" % nussknackerV,
"pl.touk.nussknacker" %% "nussknacker-flink-executor" % nussknackerV,
"pl.touk.nussknacker" %% "nussknacker-flink-test-utils" % nussknackerV % "test",
"org.apache.flink" %% "flink-streaming-scala" % version % "test",
Expand All @@ -198,7 +233,7 @@ def deps(version: String) = Seq(
def flinkOverrides(version: String) = Seq(
"org.apache.flink" %% "flink-streaming-scala" % version % "provided",
"org.apache.flink" %% "flink-scala" % version % "provided",
"org.apache.flink" %% "flink-statebackend-rocksdb" % version % "provided",
"org.apache.flink" % "flink-statebackend-rocksdb" % version % "provided",
"org.apache.flink" % "flink-avro" % version,
"org.apache.flink" %% "flink-runtime" % version % "provided",
"org.apache.flink" %% "flink-connector-kafka" % version % "provided",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM flink:1.14.5-scala_${scala.major.version}-java11
FROM flink:${flink.version}-scala_${scala.major.version}-java11

COPY entrypointWithIP.sh /
COPY conf.yml /
Expand All @@ -15,4 +15,4 @@ USER flink
RUN mkdir -p /tmp/storage

USER root
ENTRYPOINT ["/entrypointWithIP.sh"]
ENTRYPOINT ["/entrypointWithIP.sh"]
3 changes: 0 additions & 3 deletions commonTest/src/main/resources/logback-test.xml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@

<!-- we are declaring here loggers for libs that not in dependency of this module, but we are doing it for simplicity of logging configurations -->
<!-- levels below can be sometimes duplicated in root level but let's keep them just in case if we want to change value for root logger -->
<logger name="org.apache.zookeeper.server.PrepRequestProcessor" level="WARN"/>
<logger name="com.spotify.docker.client.LoggingPullHandler" level="WARN"/>
<logger name="com.spotify.docker.client.LoggingBuildHandler" level="WARN"/>
<logger name="org.apache.flink.runtime.leaderretrieval.ZooKeeperLeaderRetrievalService" level="WARN"/>
<logger name="org.apache.flink" level="WARN"/>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,10 @@ import java.time.Duration


object FlinkContainer {
val flinkImage: ImageFromDockerfile = {
def flinkImage(flinkVersion: String): ImageFromDockerfile = {
val dockerfileWithReplacedScalaVersion = IOUtils.toString(getClass.getResourceAsStream("/docker/Dockerfile"), StandardCharsets.UTF_8)
.replace("${scala.major.version}", ScalaMajorVersionConfig.scalaMajorVersion)
.replace("${flink.version}", flinkVersion)
new ImageFromDockerfile()
.withFileFromString("Dockerfile", dockerfileWithReplacedScalaVersion)
.withFileFromClasspath("entrypointWithIP.sh", "docker/entrypointWithIP.sh")
Expand All @@ -38,10 +39,10 @@ class JobManagerContainer private(underlying: GenericContainer, network: Network
object JobManagerContainer {
val FlinkJobManagerRestPort = 8081

case class Def(savepointDir: Path, network: Network) extends GenericContainer.Def[JobManagerContainer](
case class Def(flinkVersion: String, savepointDir: Path, network: Network) extends GenericContainer.Def[JobManagerContainer](
new JobManagerContainer(
GenericContainer(
FlinkContainer.flinkImage,
FlinkContainer.flinkImage(flinkVersion),
command = List("jobmanager"),
env = Map("SAVEPOINT_DIR_NAME" -> savepointDir.getFileName.toString),
waitStrategy = new LogMessageWaitStrategy().withRegEx(".*Recover all persisted job graphs.*").withStartupTimeout(Duration.ofSeconds(250)),
Expand All @@ -60,10 +61,10 @@ class TaskManagerContainer private(underlying: GenericContainer, network: Networ
object TaskManagerContainer {
private val TaskManagerSlots = 8

case class Def(network: Network, jobmanagerRpcAddress: String) extends GenericContainer.Def[TaskManagerContainer](
case class Def(flinkVersion: String, network: Network, jobmanagerRpcAddress: String) extends GenericContainer.Def[TaskManagerContainer](
new TaskManagerContainer(
GenericContainer(
FlinkContainer.flinkImage,
FlinkContainer.flinkImage(flinkVersion),
command = List("taskmanager"),
env = Map(
"TASK_MANAGER_NUMBER_OF_TASK_SLOTS" -> TaskManagerSlots.toString,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ import org.testcontainers.containers.Network
import pl.touk.nussknacker.engine._
import pl.touk.nussknacker.engine.api.ProcessVersion
import pl.touk.nussknacker.engine.api.component.DesignerWideComponentId
import pl.touk.nussknacker.engine.api.deployment._
import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus
import pl.touk.nussknacker.engine.api.deployment.{DataFreshnessPolicy, DeploymentManager, ProcessingTypeDeploymentServiceStub}
import pl.touk.nussknacker.engine.api.process.ProcessName
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.deployment.{DeploymentData, User}
Expand All @@ -40,6 +40,8 @@ trait StreamingDockerTest extends TestContainersForAll

override type Containers = JobManagerContainer and TaskManagerContainer

protected val flinkVersion: String

private val userToAct: User = User("testUser", "Test User")

override implicit val patienceConfig: PatienceConfig = PatienceConfig(timeout = scaled(Span(2, Minutes)), interval = scaled(Span(100, Millis)))
Expand All @@ -54,9 +56,9 @@ trait StreamingDockerTest extends TestContainersForAll
override def startContainers(): Containers = {
val network = Network.newNetwork()
val volumeDir = prepareVolumeDir()
val jobmanager: JobManagerContainer = JobManagerContainer.Def(volumeDir, network).start()
val jobmanager: JobManagerContainer = JobManagerContainer.Def(flinkVersion, volumeDir, network).start()
val jobmanagerHostName = jobmanager.container.getContainerInfo.getConfig.getHostName
val taskmanager: TaskManagerContainer = TaskManagerContainer.Def(network, jobmanagerHostName).start()
val taskmanager: TaskManagerContainer = TaskManagerContainer.Def(flinkVersion, network, jobmanagerHostName).start()
jobmanager and taskmanager
}

Expand All @@ -77,7 +79,8 @@ trait StreamingDockerTest extends TestContainersForAll
)
}
val deploymentManagerDependencies = DeploymentManagerDependencies(
new ProcessingTypeDeploymentServiceStub(List.empty),
new ProcessingTypeDeployedScenariosProviderStub(List.empty),
new ProcessingTypeActionServiceStub(),
actorSystem.dispatcher,
actorSystem,
backend
Expand All @@ -103,12 +106,12 @@ trait StreamingDockerTest extends TestContainersForAll
}

private def deployProcess(process: CanonicalProcess, processVersion: ProcessVersion, savepointPath: Option[String] = None, deploymentManager: DeploymentManager): Assertion = {
assert(deploymentManager.deploy(processVersion, DeploymentData.empty, process, savepointPath).isReadyWithin(100 seconds))
assert(deploymentManager.processCommand(DMRunDeploymentCommand(processVersion, DeploymentData.empty, process, savepointPath)).isReadyWithin(100 seconds))
}

protected def cancelProcess(processId: String, deploymentManager: DeploymentManager): Unit = {
implicit val freshnessPolicy: DataFreshnessPolicy = DataFreshnessPolicy.Fresh
assert(deploymentManager.cancel(ProcessName(processId), user = userToAct).isReadyWithin(10 seconds))
assert(deploymentManager.processCommand(DMCancelScenarioCommand(ProcessName(processId), user = userToAct)).isReadyWithin(10 seconds))
eventually {
val runningJobs = deploymentManager
.getProcessStates(ProcessName(processId))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@ import pl.touk.nussknacker.engine.management.FlinkStreamingDeploymentManagerProv
import pl.touk.nussknacker.engine.management.common.CommonFlinkStreamingDeploymentManagerSpec
import pl.touk.nussknacker.engine.util.config.ScalaMajorVersionConfig

class FlinkStreamingDeploymentManagerSpec extends CommonFlinkStreamingDeploymentManagerSpec {
class Flink114StreamingDeploymentManagerSpec extends CommonFlinkStreamingDeploymentManagerSpec {
override protected def classPath: String =
s"./flink114/model/target/scala-${ScalaMajorVersionConfig.scalaMajorVersion}/flink114-model-assembly.jar"

override protected def deploymentManagerProvider: FlinkStreamingDeploymentManagerProvider =
new FlinkStreamingDeploymentManagerProvider()

override protected val flinkVersion: String = "1.14.5"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.client.deployment.executors;

import org.apache.flink.api.common.JobID;
import org.apache.flink.api.dag.Pipeline;
import org.apache.flink.client.FlinkPipelineTranslationUtil;
import org.apache.flink.client.cli.ExecutionConfigAccessor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.PipelineOptionsInternal;
import org.apache.flink.runtime.jobgraph.JobGraph;

import javax.annotation.Nonnull;

import java.net.MalformedURLException;

import static org.apache.flink.util.Preconditions.checkNotNull;

// the class is copied from flink release-1.14 and pathed because we bind in MiniClusterExecutionEnvironment to flink16+ version with getJobGraph(Pipeline, Configuration, ClassLoader)
// but the method in flink-1.14 doesn't have ClassLoader argument which leads to binary incompatibility
public class PipelineExecutorUtils {

public static JobGraph getJobGraph(
@Nonnull final Pipeline pipeline,
@Nonnull final Configuration configuration,
@Nonnull ClassLoader userClassloader // dummy parameter <- the only one line changed
) throws MalformedURLException {
checkNotNull(pipeline);
checkNotNull(configuration);

final ExecutionConfigAccessor executionConfigAccessor =
ExecutionConfigAccessor.fromConfiguration(configuration);
final JobGraph jobGraph =
FlinkPipelineTranslationUtil.getJobGraph(
pipeline, configuration, executionConfigAccessor.getParallelism());

configuration
.getOptional(PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID)
.ifPresent(strJobID -> jobGraph.setJobID(JobID.fromHexString(strJobID)));

jobGraph.addJars(executionConfigAccessor.getJars());
jobGraph.setClasspaths(executionConfigAccessor.getClasspaths());
jobGraph.setSavepointRestoreSettings(executionConfigAccessor.getSavepointRestoreSettings());

return jobGraph;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ package pl.touk.nussknacker.compatibility
import org.scalatest.funsuite.AnyFunSuite
import pl.touk.nussknacker.compatibility.common.BaseGenericITSpec

class GenericITSpec extends AnyFunSuite with BaseGenericITSpec with FlinkSpec
class Flink114GenericITSpec extends AnyFunSuite with BaseGenericITSpec with FlinkSpec
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ import pl.touk.nussknacker.engine.process.helpers.TestResultsHolder

import java.lang

class TimestampTest extends BaseTimestampTest with FlinkSpec with Matchers {
class Flink114TimestampTest extends BaseTimestampTest with FlinkSpec with Matchers {
override protected val sinkForLongsResultsHolder: () => TestResultsHolder[lang.Long] =
() => TimestampTest.sinkForLongsResultsHolder
() => Flink114TimestampTest.sinkForLongsResultsHolder
}

object TimestampTest extends Serializable {
object Flink114TimestampTest extends Serializable {
private val sinkForLongsResultsHolder = new TestResultsHolder[java.lang.Long]
}
19 changes: 19 additions & 0 deletions flink116/manager/src/it/resources/application.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@

deploymentConfig {
type: "test"
restUrl: "http://localhost:8081"
queryableStateProxyUrl: "localhost:9069"
}

modelConfig {
rocksDB: {
enable: false
}
kafka {
kafkaAddress: "dummy:9092"
kafkaProperties {
"schema.registry.url": "http://dummy:1111"
}
}
}

19 changes: 19 additions & 0 deletions flink116/manager/src/it/resources/logback-test.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
<configuration scan="true" scanPeriod="5 seconds">
<jmxConfigurator/>

<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</Pattern>
</encoder>
</appender>

<root level="INFO">
<appender-ref ref="STDOUT"/>
</root>

<logger name="ch.qos.logback" level="warn"/>


</configuration>


Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package pl.touk.nussknacker.engine.management.streaming

import pl.touk.nussknacker.engine.management.FlinkStreamingDeploymentManagerProvider
import pl.touk.nussknacker.engine.management.common.CommonFlinkStreamingDeploymentManagerSpec
import pl.touk.nussknacker.engine.util.config.ScalaMajorVersionConfig

class Flink116StreamingDeploymentManagerSpec extends CommonFlinkStreamingDeploymentManagerSpec {
override protected def classPath: String =
s"./flink116/model/target/scala-${ScalaMajorVersionConfig.scalaMajorVersion}/flink116-model-assembly.jar"

override protected def deploymentManagerProvider: FlinkStreamingDeploymentManagerProvider =
new FlinkStreamingDeploymentManagerProvider()

override protected val flinkVersion: String = "1.16.0"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pl.touk.nussknacker.engine.management.FlinkStreamingDeploymentManagerProvider
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pl.touk.nussknacker.compatibility.common.MockKafkaComponentProvider
10 changes: 10 additions & 0 deletions flink116/model/src/test/resources/application.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
checkpointConfig {
checkpointInterval: 10s
}
timeout: 10s
asyncExecutionConfig {
bufferSize: 200
workers: 8
}

components.mockKafka.disabled: true
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package pl.touk.nussknacker.compatibility

import org.scalatest.funsuite.AnyFunSuite
import pl.touk.nussknacker.compatibility.common.BaseGenericITSpec

class Flink116GenericITSpec extends AnyFunSuite with BaseGenericITSpec with FlinkSpec
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package pl.touk.nussknacker.compatibility

import org.scalatest.matchers.should.Matchers
import pl.touk.nussknacker.compatibility.common.BaseTimestampTest
import pl.touk.nussknacker.engine.process.helpers.TestResultsHolder

import java.lang

class Flink116TimestampTest extends BaseTimestampTest with FlinkSpec with Matchers {
override protected val sinkForLongsResultsHolder: () => TestResultsHolder[lang.Long] =
() => Flink116TimestampTest.sinkForLongsResultsHolder
}

object Flink116TimestampTest extends Serializable {
private val sinkForLongsResultsHolder = new TestResultsHolder[java.lang.Long]
}
Loading
Loading