/
CassandraSparkBuild.scala
240 lines (196 loc) · 11.5 KB
/
CassandraSparkBuild.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import sbt.Keys._
object CassandraSparkBuild extends Build {
import Settings._
import Versions.scalaBinary
val namespace = "spark-cassandra-connector"
val demosPath = file(s"$namespace-demos")
lazy val root = RootProject(
name = "root",
dir = file("."),
settings = rootSettings,
contains = Seq(embedded, connector, demos, jconnector)
)
lazy val embedded = CrossScalaVersionsProject(
name = s"$namespace-embedded",
conf = defaultSettings ++ Seq(libraryDependencies ++= Dependencies.embedded)
) configs IntegrationTest
lazy val connector = CrossScalaVersionsProject(
name = namespace,
conf = assembledSettings ++ Seq(libraryDependencies ++= Dependencies.connector ++ Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value,
"org.scala-lang" % "scala-compiler" % scalaVersion.value % "test,it"))
).copy(dependencies = Seq(embedded % "test->test;it->it,test;")
) configs IntegrationTest
lazy val jconnector = Project(
id = s"$namespace-java",
base = file(s"$namespace-java"),
settings = japiSettings ++ connector.settings,
dependencies = Seq(connector % "compile;runtime->runtime;test->test;it->it,test;provided->provided")
) configs IntegrationTest
lazy val demos = RootProject(
name = "demos",
dir = demosPath,
contains = Seq(simpleDemos, kafkaStreaming, twitterStreaming)
)
lazy val simpleDemos = Project(
id = "simple-demos",
base = demosPath / "simple-demos",
settings = japiSettings ++ demoSettings,
dependencies = Seq(connector, jconnector, embedded)
)
lazy val kafkaStreaming = CrossScalaVersionsProject(
name = "kafka-streaming",
conf = demoSettings ++ kafkaDemoSettings ++ Seq(
libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, minor)) if minor < 11 => Dependencies.kafka
case _ => Seq.empty
}))).copy(base = demosPath / "kafka-streaming", dependencies = Seq(connector, embedded))
lazy val twitterStreaming = Project(
id = "twitter-streaming",
base = demosPath / "twitter-streaming",
settings = demoSettings ++ Seq(libraryDependencies ++= Dependencies.twitter),
dependencies = Seq(connector)
)
def crossBuildPath(base: sbt.File, v: String): sbt.File = base / s"scala-$v" / "src"
/* templates */
def CrossScalaVersionsProject(name: String,
conf: Seq[Def.Setting[_]],
reliesOn: Seq[ClasspathDep[ProjectReference]] = Seq.empty) =
Project(id = name, base = file(name), dependencies = reliesOn, settings = conf ++ Seq(
unmanagedSourceDirectories in (Compile, packageBin) +=
crossBuildPath(baseDirectory.value, scalaBinaryVersion.value),
unmanagedSourceDirectories in (Compile, doc) +=
crossBuildPath(baseDirectory.value, scalaBinaryVersion.value),
unmanagedSourceDirectories in Compile +=
crossBuildPath(baseDirectory.value, scalaBinaryVersion.value)
))
def RootProject(name: String, dir: sbt.File, settings: => scala.Seq[sbt.Def.Setting[_]] = Seq.empty, contains: Seq[ProjectReference]): Project =
Project(id = name, base = dir, settings = parentSettings ++ settings, aggregate = contains)
}
object Dependencies {
import Versions._
implicit class Exclude(module: ModuleID) {
def guavaExclude: ModuleID =
module exclude("com.google.guava", "guava")
def sparkExclusions: ModuleID = module.guavaExclude
.exclude("org.apache.spark", s"spark-core_$scalaBinary")
def logbackExclude: ModuleID = module
.exclude("ch.qos.logback", "logback-classic")
.exclude("ch.qos.logback", "logback-core")
def replExclusions: ModuleID = module.guavaExclude
.exclude("org.apache.spark", s"spark-bagel_$scalaBinary")
.exclude("org.apache.spark", s"spark-mllib_$scalaBinary")
.exclude("org.scala-lang", "scala-compiler")
def kafkaExclusions: ModuleID = module
.exclude("org.slf4j", "slf4j-simple")
.exclude("com.sun.jmx", "jmxri")
.exclude("com.sun.jdmk", "jmxtools")
.exclude("net.sf.jopt-simple", "jopt-simple")
}
object Compile {
val akkaActor = "com.typesafe.akka" %% "akka-actor" % Akka % "provided" // ApacheV2
val akkaRemote = "com.typesafe.akka" %% "akka-remote" % Akka % "provided" // ApacheV2
val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % Akka % "provided" // ApacheV2
val cassandraClient = "org.apache.cassandra" % "cassandra-clientutil" % Cassandra guavaExclude // ApacheV2
val cassandraDriver = "com.datastax.cassandra" % "cassandra-driver-core" % CassandraDriver guavaExclude // ApacheV2
val commonsLang3 = "org.apache.commons" % "commons-lang3" % CommonsLang3 // ApacheV2
val config = "com.typesafe" % "config" % Config % "provided" // ApacheV2
val guava = "com.google.guava" % "guava" % Guava
val jodaC = "org.joda" % "joda-convert" % JodaC
val jodaT = "joda-time" % "joda-time" % JodaT
val lzf = "com.ning" % "compress-lzf" % Lzf % "provided"
val slf4jApi = "org.slf4j" % "slf4j-api" % Slf4j % "provided" // MIT
val jsr166e = "com.twitter" % "jsr166e" % JSR166e // Creative Commons
/* To allow spark artifact inclusion in the demos at runtime, we set 'provided' below. */
val sparkCore = "org.apache.spark" %% "spark-core" % Spark guavaExclude // ApacheV2
val sparkUnsafe = "org.apache.spark" %% "spark-unsafe" % Spark guavaExclude // ApacheV2
val sparkStreaming = "org.apache.spark" %% "spark-streaming" % Spark guavaExclude // ApacheV2
val sparkSql = "org.apache.spark" %% "spark-sql" % Spark sparkExclusions // ApacheV2
val sparkCatalyst = "org.apache.spark" %% "spark-catalyst" % Spark sparkExclusions // ApacheV2
val sparkHive = "org.apache.spark" %% "spark-hive" % Spark sparkExclusions // ApacheV2
object Metrics {
val metricsCore = "com.codahale.metrics" % "metrics-core" % CodaHaleMetrics % "provided"
val metricsJson = "com.codahale.metrics" % "metrics-json" % CodaHaleMetrics % "provided"
}
object Jetty {
val jettyServer = "org.eclipse.jetty" % "jetty-server" % SparkJetty % "provided"
val jettyServlet = "org.eclipse.jetty" % "jetty-servlet" % SparkJetty % "provided"
}
object Embedded {
val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % Akka // ApacheV2
val cassandraServer = "org.apache.cassandra" % "cassandra-all" % Cassandra logbackExclude // ApacheV2
val jopt = "net.sf.jopt-simple" % "jopt-simple" % JOpt
val kafka = "org.apache.kafka" %% "kafka" % Kafka kafkaExclusions // ApacheV2
val sparkRepl = "org.apache.spark" %% "spark-repl" % Spark % "provided" replExclusions // ApacheV2
val snappy = "org.xerial.snappy" % "snappy-java" % "1.1.1.7"
}
object Demos {
val kafka = "org.apache.kafka" % "kafka_2.10" % Kafka kafkaExclusions // ApacheV2
val kafkaStreaming = "org.apache.spark" % "spark-streaming-kafka_2.10" % Spark % "provided" sparkExclusions // ApacheV2
val twitterStreaming = "org.apache.spark" %% "spark-streaming-twitter" % Spark % "provided" sparkExclusions // ApacheV2
}
object Test {
val akkaTestKit = "com.typesafe.akka" %% "akka-testkit" % Akka % "test,it" // ApacheV2
val commonsIO = "commons-io" % "commons-io" % CommonsIO % "test,it" // ApacheV2
val scalaMock = "org.scalamock" %% "scalamock-scalatest-support" % ScalaMock % "test,it" // BSD
val scalaTest = "org.scalatest" %% "scalatest" % ScalaTest % "test,it" // ApacheV2
val scalactic = "org.scalactic" %% "scalactic" % Scalactic % "test,it" // ApacheV2
val mockito = "org.mockito" % "mockito-all" % "1.10.19" % "test,it" // MIT
val junit = "junit" % "junit" % "4.11" % "test,it"
val junitInterface = "com.novocode" % "junit-interface" % "0.10" % "test,it"
val powerMock = "org.powermock" % "powermock-module-junit4" % "1.6.2" % "test,it" // ApacheV2
val powerMockMockito = "org.powermock" % "powermock-api-mockito" % "1.6.2" % "test,it" // ApacheV2
}
}
import Compile._
import BuildUtil._
val logging = Seq(slf4jApi)
val metrics = Seq(Metrics.metricsCore, Metrics.metricsJson)
val jetty = Seq(Jetty.jettyServer, Jetty.jettyServlet)
val testKit = Seq(
Test.akkaTestKit,
Test.commonsIO,
Test.junit,
Test.junitInterface,
Test.scalaMock,
Test.scalaTest,
Test.scalactic,
Test.mockito,
Test.powerMock,
Test.powerMockMockito
)
val akka = Seq(akkaActor, akkaRemote, akkaSlf4j)
val cassandra = Seq(cassandraClient, cassandraDriver)
val spark = Seq(sparkCore, sparkStreaming, sparkSql, sparkCatalyst, sparkHive, sparkUnsafe)
val connector = testKit ++ metrics ++ jetty ++ logging ++ akka ++ cassandra ++ spark.map(_ % "provided") ++ Seq(
commonsLang3, config, guava, jodaC, jodaT, lzf, jsr166e)
val embedded = logging ++ spark ++ cassandra ++ Seq(
Embedded.cassandraServer, Embedded.jopt, Embedded.sparkRepl, Embedded.kafka, Embedded.snappy)
val kafka = Seq(Demos.kafka, Demos.kafkaStreaming)
val twitter = Seq(sparkStreaming, Demos.twitterStreaming)
val documentationMappings = Seq(
DocumentationMapping(url(s"http://spark.apache.org/docs/${Versions.Spark}/api/scala/"),
sparkCore, sparkStreaming, sparkSql, sparkCatalyst, sparkHive
),
DocumentationMapping(url(s"http://doc.akka.io/api/akka/${Versions.Akka}/"),
akkaActor, akkaRemote, akkaSlf4j
)
)
}