Skip to content

Commit

Permalink
fix compilations
Browse files Browse the repository at this point in the history
  • Loading branch information
pomadchin authored and echeipesh committed Jan 15, 2018
1 parent 7ebbf42 commit 8ba122f
Show file tree
Hide file tree
Showing 38 changed files with 465 additions and 179 deletions.
5 changes: 5 additions & 0 deletions spark-pipeline/build.sbt
Expand Up @@ -4,6 +4,11 @@ name := "geotrellis-spark-pipeline"
libraryDependencies ++= Seq(
"com.github.fge" % "json-schema-validator" % "2.2.6",
"com.chuusai" %% "shapeless" % "2.3.2",
"org.clapper" %% "classutil" % "1.1.2",
"org.clapper" %% "grizzled-scala" % "4.2.0",
"org.ow2.asm" % "asm" % "5.1",
"org.ow2.asm" % "asm-commons" % "5.1",
"org.ow2.asm" % "asm-util" % "5.1",
sparkCore % "provided",
scalatest % "test")

Expand Down
69 changes: 69 additions & 0 deletions spark-pipeline/src/main/scala/geotrellis/spark/pipeline/Main.scala
@@ -0,0 +1,69 @@
package geotrellis.spark.pipeline

import java.io.File

import org.clapper.classutil._
import grizzled.file.Implicits._
import grizzled.file.{util => fileutil}

object Main {
private val (runtimeClassFiles, runtimeClassFinder) = {
import scala.util.Properties
val version = Properties.releaseVersion.get
val shortVersion = version.split("""\.""").take(2).mkString(".")

val targetDirectory: Option[File] = Array(
fileutil.joinPath("spark-pipeline/target", s"scala-$version"),
fileutil.joinPath("spark-pipeline/target", s"scala-$shortVersion")
)
.map(new File(_))
.find(_.exists)

assert(targetDirectory.isDefined)
val dir = targetDirectory.get

// SBT-dependent paths
val classDir = new File(fileutil.joinPath(dir.getPath, "classes"))
val testClassDir = new File(fileutil.joinPath(dir.getPath, "test-classes"))

// Get class files under the directory.
val classFiles = classDir.listRecursively()
.filter(_.getName.endsWith(".class"))
.toVector
val testClassFiles = testClassDir.listRecursively()
.filter(_.getName.endsWith(".class"))
.toVector

// The number of returned classInfo objects should be the same number
// as the number of class files.
val allClassFiles = classFiles ++ testClassFiles
val finder = ClassFinder(Seq(classDir, testClassDir))
assert(finder.getClasses().size == allClassFiles.length)
(allClassFiles, finder)
}

def main(args: Array[String]): Unit = {
//val classes = runtimeClassFinder.getClasses()

//classes.take(1).foreach(println)
//val zz = ClassFinder.concreteSubclasses("geotrellis.spark.pipeline.function.TileCellTypeChange", classes)



//println("========")
//zz.foreach(println)

//val res: ClassInfo = zz.toList.head
//res.methods.filter()
//val classes = finder.getClasses // classes is an Iterator[ClassInfo]
//classes.foreach(println)
//println("========")

//Class.forName("geotrellis.spark.pipeline.function.TileCellTypeChange")

//ClassUtil


}

}

This file was deleted.

This file was deleted.

@@ -0,0 +1,9 @@
package geotrellis.spark.pipeline.ast

trait Node[T] {
def get: T
}

trait Read[T] extends Node[T]
trait Transform[F, T] extends Node[T]
trait Write[T] extends Node[T]
@@ -0,0 +1,15 @@
package geotrellis.spark.pipeline.ast.multiband.spatial

import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json
import geotrellis.vector._
import org.apache.spark.rdd.RDD

case class BufferedReproject(
node: Node[RDD[(ProjectedExtent, MultibandTile)]],
reproject: json.TransformBufferedReproject
) extends Transform[RDD[(ProjectedExtent, MultibandTile)], MultibandTileLayerRDD[SpatialKey]] {
def get: MultibandTileLayerRDD[SpatialKey] = ???
}
@@ -0,0 +1,11 @@
package geotrellis.spark.pipeline.ast.multiband.spatial

import geotrellis.raster._
import geotrellis.spark.pipeline.ast.Read
import geotrellis.spark.pipeline.json
import geotrellis.vector._
import org.apache.spark.rdd.RDD

case class FileRead(read: json.ReadFile) extends Read[RDD[(ProjectedExtent, MultibandTile)]] {
def get: RDD[(ProjectedExtent, MultibandTile)] = ???
}
@@ -0,0 +1,12 @@
package geotrellis.spark.pipeline.ast.multiband.spatial

import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json

case class FileWrite(
node: Node[MultibandTileLayerRDD[SpatialKey]],
write: json.WriteFile
) extends Write[MultibandTileLayerRDD[SpatialKey]] {
def get: MultibandTileLayerRDD[SpatialKey] = ???
}

This file was deleted.

@@ -0,0 +1,12 @@
package geotrellis.spark.pipeline.ast.multiband.spatial

import geotrellis.raster._
import geotrellis.spark.pipeline.ast.Read
import geotrellis.spark.pipeline.json
import geotrellis.vector._

import org.apache.spark.rdd.RDD

case class HadoopRead(read: json.ReadHadoop) extends Read[RDD[(ProjectedExtent, MultibandTile)]] {
def get: RDD[(ProjectedExtent, MultibandTile)] = ???
}
@@ -0,0 +1,12 @@
package geotrellis.spark.pipeline.ast.multiband.spatial

import geotrellis.spark.pipeline.ast._
import geotrellis.spark._
import geotrellis.spark.pipeline.json

case class HadoopWrite(
node: Node[MultibandTileLayerRDD[SpatialKey]],
write: json.WriteHadoop
) extends Write[MultibandTileLayerRDD[SpatialKey]] {
def get: MultibandTileLayerRDD[SpatialKey] = ???
}
@@ -0,0 +1,18 @@
package geotrellis.spark.pipeline.ast.multiband.spatial

import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json
import geotrellis.vector._

import org.apache.spark.rdd.RDD

case class PerTileReproject(
node: Node[RDD[(ProjectedExtent, MultibandTile)]],
reproject: json.TransformPerTileReproject
) extends Transform[RDD[(ProjectedExtent, MultibandTile)], MultibandTileLayerRDD[SpatialKey]] {
def get: MultibandTileLayerRDD[SpatialKey] = {
reproject.eval(node.get)
}
}
@@ -0,0 +1,15 @@
package geotrellis.spark.pipeline.ast.multiband.spatial

import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json
import geotrellis.vector._
import org.apache.spark.rdd.RDD

case class TileToLayout(
node: Node[RDD[(ProjectedExtent, MultibandTile)]],
reproject: json.TransformTile
) extends Transform[RDD[(ProjectedExtent, MultibandTile)], MultibandTileLayerRDD[SpatialKey]] {
def get: MultibandTileLayerRDD[SpatialKey] = ???
}
@@ -0,0 +1,16 @@
package geotrellis.spark.pipeline.ast.multiband.temporal

import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json
import geotrellis.vector._

import org.apache.spark.rdd.RDD

case class BufferedReproject(
node: Node[RDD[(TemporalProjectedExtent, MultibandTile)]],
reproject: json.TransformBufferedReproject
) extends Transform[RDD[(TemporalProjectedExtent, MultibandTile)], MultibandTileLayerRDD[SpaceTimeKey]] {
def get: MultibandTileLayerRDD[SpaceTimeKey] = ???
}
@@ -0,0 +1,13 @@
package geotrellis.spark.pipeline.ast.multiband.temporal

import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.pipeline.ast.Read
import geotrellis.spark.pipeline.json
import geotrellis.vector._

import org.apache.spark.rdd.RDD

case class FileRead(read: json.ReadFile) extends Read[RDD[(TemporalProjectedExtent, MultibandTile)]] {
def get: RDD[(TemporalProjectedExtent, MultibandTile)] = ???
}
@@ -0,0 +1,12 @@
package geotrellis.spark.pipeline.ast.multiband.temporal

import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json

case class FileWrite(
node: Node[MultibandTileLayerRDD[SpaceTimeKey]],
write: json.WriteFile
) extends Write[MultibandTileLayerRDD[SpaceTimeKey]] {
def get: MultibandTileLayerRDD[SpaceTimeKey] = ???
}
@@ -0,0 +1,13 @@
package geotrellis.spark.pipeline.ast.multiband.temporal

import geotrellis.raster._
import geotrellis.spark.TemporalProjectedExtent
import geotrellis.spark.pipeline.ast.Read
import geotrellis.spark.pipeline.json
import geotrellis.vector._

import org.apache.spark.rdd.RDD

case class HadoopRead(read: json.ReadHadoop) extends Read[RDD[(TemporalProjectedExtent, MultibandTile)]] {
def get: RDD[(TemporalProjectedExtent, MultibandTile)] = ???
}
@@ -0,0 +1,12 @@
package geotrellis.spark.pipeline.ast.multiband.temporal

import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json

case class HadoopWrite(
node: Node[MultibandTileLayerRDD[SpaceTimeKey]],
write: json.WriteHadoop
) extends Write[MultibandTileLayerRDD[SpaceTimeKey]] {
def get: MultibandTileLayerRDD[SpaceTimeKey] = ???
}
@@ -0,0 +1,16 @@
package geotrellis.spark.pipeline.ast.multiband.temporal

import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json
import geotrellis.vector._

import org.apache.spark.rdd.RDD

case class PerTileReproject(
node: Node[RDD[(TemporalProjectedExtent, MultibandTile)]],
reproject: json.TransformPerTileReproject
) extends Transform[RDD[(TemporalProjectedExtent, MultibandTile)], MultibandTileLayerRDD[SpaceTimeKey]] {
def get: MultibandTileLayerRDD[SpaceTimeKey] = ???
}
@@ -0,0 +1,15 @@
package geotrellis.spark.pipeline.ast.multiband.temporal

import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json
import geotrellis.vector._
import org.apache.spark.rdd.RDD

case class TileToLayout(
node: Node[RDD[(TemporalProjectedExtent, MultibandTile)]],
reproject: json.TransformTile
) extends Transform[RDD[(TemporalProjectedExtent, MultibandTile)], MultibandTileLayerRDD[SpaceTimeKey]] {
def get: MultibandTileLayerRDD[SpaceTimeKey] = ???
}
@@ -0,0 +1,15 @@
package geotrellis.spark.pipeline.ast.singleband.spatial

import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json
import geotrellis.vector._
import org.apache.spark.rdd.RDD

case class BufferedReproject(
node: Node[RDD[(ProjectedExtent, Tile)]],
reproject: json.TransformBufferedReproject
) extends Transform[RDD[(ProjectedExtent, Tile)], TileLayerRDD[SpatialKey]] {
def get: TileLayerRDD[SpatialKey] = ???
}
@@ -0,0 +1,11 @@
package geotrellis.spark.pipeline.ast.singleband.spatial

import geotrellis.raster._
import geotrellis.spark.pipeline.ast.Read
import geotrellis.spark.pipeline.json
import geotrellis.vector._
import org.apache.spark.rdd.RDD

case class FileRead(read: json.ReadFile) extends Read[RDD[(ProjectedExtent, Tile)]] {
def get: RDD[(ProjectedExtent, Tile)] = ???
}
@@ -0,0 +1,12 @@
package geotrellis.spark.pipeline.ast.singleband.spatial

import geotrellis.spark._
import geotrellis.spark.pipeline.ast._
import geotrellis.spark.pipeline.json

case class FileWrite(
node: Node[TileLayerRDD[SpatialKey]],
write: json.WriteFile
) extends Write[TileLayerRDD[SpatialKey]] {
def get: TileLayerRDD[SpatialKey] = ???
}
@@ -0,0 +1,12 @@
package geotrellis.spark.pipeline.ast.singleband.spatial

import geotrellis.raster._
import geotrellis.spark.pipeline.ast.Read
import geotrellis.spark.pipeline.json
import geotrellis.vector._

import org.apache.spark.rdd.RDD

case class HadoopRead(read: json.ReadHadoop) extends Read[RDD[(ProjectedExtent, Tile)]] {
def get: RDD[(ProjectedExtent, Tile)] = ???
}

0 comments on commit 8ba122f

Please sign in to comment.