Skip to content

Commit b27a786

Browse files
committed
Switch to Spark 0.8.0-incubating
1 parent 35f4866 commit b27a786

16 files changed

+38
-28
lines changed

README.md

+1
Original file line numberDiff line numberDiff line change
@@ -15,5 +15,6 @@ Branches
1515
Changelog
1616
---------
1717

18+
- v0.2.0: Switch to Spark 0.8.0-incubating
1819
- v0.1.0: Implement ball decomposition (deterministic and randomized)
1920
and the distributed version of HyperANF

build.sbt

+8-3
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import AssemblyKeys._
22

33
name := "spark-graph"
44

5-
version := "0.1.0"
5+
version := "0.2.0"
66

77
scalaVersion := "2.9.3"
88

@@ -12,9 +12,12 @@ scalacOptions += "-optimise"
1212
// multiple SparkContexts
1313
parallelExecution in Test := false
1414

15-
libraryDependencies += "org.apache.hadoop" % "hadoop-client" % "1.2.1"
15+
// The transitive dependency on "asm" is excluded since hadoop depends
16+
// on asm-3.1 and everything else on asm-4.0 and they are incompatible.
17+
// This exclusion fixes the problem.
18+
libraryDependencies += "org.apache.hadoop" % "hadoop-client" % "1.2.1" exclude("asm","asm")
1619

17-
libraryDependencies += "org.spark-project" % "spark-core_2.9.3" % "0.7.3" excludeAll(
20+
libraryDependencies += "org.apache.spark" % "spark-core_2.9.3" % "0.8.0-incubating" excludeAll(
1821
ExclusionRule("ch.qos.logback"),
1922
ExclusionRule("org.apache.hadoop")
2023
)
@@ -30,6 +33,8 @@ resolvers ++= Seq(
3033
"Akka Repository" at "http://repo.akka.io/releases/",
3134
"Spray Repository" at "http://repo.spray.cc/")
3235

36+
// dependency graph
37+
net.virtualvoid.sbt.graph.Plugin.graphSettings
3338

3439
// sbt-assembly configuration
3540

project/plugins.sbt

+2
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,4 @@
11
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.9.2")
22

3+
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
4+

src/main/scala/it/unipd/dei/graph/MatToAdjConverter.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@
1717

1818
package it.unipd.dei.graph
1919

20-
import spark.RDD
21-
import spark.SparkContext._
20+
import org.apache.spark.rdd.RDD
21+
import org.apache.spark.SparkContext._
2222

2323
/**
2424
* Converts a dataset in sparse matrix form to a dataset in adjacency list form

src/main/scala/it/unipd/dei/graph/Tool.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import it.unipd.dei.graph.decompositions.BallDecomposition._
2222
import it.unipd.dei.graph.decompositions.FloodBallDecomposition._
2323
import it.unipd.dei.graph.diameter.hyperAnf.HyperAnf._
2424
import it.unipd.dei.graph.serialization.KryoSerialization
25-
import spark.SparkContext
25+
import org.apache.spark.SparkContext
2626
import org.slf4j.LoggerFactory
2727
import it.unipd.dei.graph.decompositions.RandomizedBallDecomposition._
2828
import it.unipd.dei.graph.decompositions.SimpleRandomizedBallDecomposition._

src/main/scala/it/unipd/dei/graph/decompositions/ArcRelabeler.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@
1717

1818
package it.unipd.dei.graph.decompositions
1919

20-
import spark.RDD
21-
import spark.SparkContext._
20+
import org.apache.spark.rdd.RDD
21+
import org.apache.spark.SparkContext._
2222
import it.unipd.dei.graph._
2323

2424
/**

src/main/scala/it/unipd/dei/graph/decompositions/BallComputer.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818
package it.unipd.dei.graph.decompositions
1919

2020
import it.unipd.dei.graph._
21-
import spark.RDD
22-
import spark.SparkContext._
21+
import org.apache.spark.rdd.RDD
22+
import org.apache.spark.SparkContext._
2323

2424
/**
2525
* Trait for classes that can compute balls

src/main/scala/it/unipd/dei/graph/decompositions/BallDecomposition.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@
1717

1818
package it.unipd.dei.graph.decompositions
1919

20-
import spark.SparkContext._
21-
import spark.RDD
20+
import org.apache.spark.SparkContext._
21+
import org.apache.spark.rdd.RDD
2222
import it.unipd.dei.graph._
2323
import org.slf4j.LoggerFactory
2424

src/main/scala/it/unipd/dei/graph/decompositions/FloodBallDecomposition.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,12 @@
1818
package it.unipd.dei.graph.decompositions
1919

2020
import it.unipd.dei.graph._
21-
import spark.SparkContext._
22-
import spark.RDD
21+
import org.apache.spark.SparkContext._
22+
import org.apache.spark.rdd.RDD
2323
import scala.util.Random
2424
import org.slf4j.LoggerFactory
2525
import scala.Left
26-
import spark.broadcast.Broadcast
26+
import org.apache.spark.broadcast.Broadcast
2727

2828
object FloodBallDecomposition extends ArcRelabeler with Timed {
2929

src/main/scala/it/unipd/dei/graph/decompositions/RandomizedBallDecomposition.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,9 @@ package it.unipd.dei.graph.decompositions
1919

2020
import it.unipd.dei.graph._
2121
import it.unipd.dei.graph.decompositions._
22-
import spark.SparkContext._
23-
import spark.RDD
24-
import spark.broadcast.Broadcast
22+
import org.apache.spark.SparkContext._
23+
import org.apache.spark.rdd.RDD
24+
import org.apache.spark.broadcast.Broadcast
2525
import scala.util.Random
2626
import org.slf4j.LoggerFactory
2727

src/main/scala/it/unipd/dei/graph/decompositions/SimpleRandomizedBallDecomposition.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,10 @@ package it.unipd.dei.graph.decompositions
1919

2020
import org.slf4j.LoggerFactory
2121
import it.unipd.dei.graph._
22-
import spark.RDD
23-
import spark.SparkContext._
22+
import org.apache.spark.rdd.RDD
23+
import org.apache.spark.SparkContext._
2424
import scala.Left
25-
import spark.broadcast.Broadcast
25+
import org.apache.spark.broadcast.Broadcast
2626
import scala.util.Random
2727

2828
object SimpleRandomizedBallDecomposition extends BallComputer

src/main/scala/it/unipd/dei/graph/diameter/hyperAnf/HyperAnf.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,14 @@
1717

1818
package it.unipd.dei.graph.diameter.hyperAnf
1919

20-
import spark.{Accumulator, RDD, SparkContext}
21-
import spark.SparkContext._
20+
import org.apache.spark.{Accumulator, SparkContext}
21+
import org.apache.spark.SparkContext._
2222
import it.unipd.dei.graph.{Timed, TextInputConverter, NodeId, Neighbourhood}
2323
import scala.collection.mutable
2424
import it.unipd.dei.graph.diameter.{Confidence,EffectiveDiameter}
2525
import org.slf4j.LoggerFactory
2626
import java.io.File
27+
import org.apache.spark.rdd.RDD
2728

2829
/**
2930
* Implementation of HyperANF with spark

src/main/scala/it/unipd/dei/graph/serialization/KryoSerialization.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -20,19 +20,20 @@ package it.unipd.dei.graph.serialization
2020
import com.esotericsoftware.kryo.Kryo
2121
import it.unipd.dei.graph.diameter.hyperAnf.HyperLogLogCounter
2222
import org.slf4j.LoggerFactory
23+
import org.apache.spark.serializer.KryoRegistrator
2324

2425
/**
2526
* Trait that enables kryo serialization and registers some classes
2627
*/
2728
trait KryoSerialization {
2829

29-
System.setProperty("spark.serializer", "spark.KryoSerializer")
30+
System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
3031
System.setProperty("spark.kryo.registrator",
3132
"it.unipd.dei.graph.serialization.GraphKryoRegistrator")
3233

3334
}
3435

35-
class GraphKryoRegistrator extends spark.KryoRegistrator {
36+
class GraphKryoRegistrator extends KryoRegistrator {
3637

3738
private val log = LoggerFactory.getLogger("KryoRegistrator")
3839

src/test/scala/it/unipd/dei/graph/LocalSparkContext.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package it.unipd.dei.graph
1919

2020
import org.scalatest.{BeforeAndAfterEach, Suite}
21-
import spark.SparkContext
21+
import org.apache.spark.SparkContext
2222

2323
/**
2424
* Provides a SparkContext to each test. The SparkContext is initialized and

src/test/scala/it/unipd/dei/graph/decompositions/BigGraphBallDecompositionSpec.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package it.unipd.dei.graph.decompositions
1919

2020
import org.scalatest._
2121
import BallDecomposition._
22-
import spark.RDD
22+
import org.apache.spark.rdd.RDD
2323
import scala.collection.mutable
2424
import it.unipd.dei.graph._
2525
import it.unipd.dei.graph.LocalSparkContext

src/test/scala/it/unipd/dei/graph/diameter/hyperAnf/HyperAnfSparkSpec.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package it.unipd.dei.graph.diameter.hyperAnf
2020
import org.scalatest.FlatSpec
2121
import it.unipd.dei.graph.LocalSparkContext
2222
import HyperAnf._
23-
import spark.SparkContext
23+
import org.apache.spark.SparkContext
2424

2525
class HyperAnfSparkSpec extends FlatSpec with LocalSparkContext {
2626

0 commit comments

Comments
 (0)