Skip to content

Commit

Permalink
move S3Mock to s3 testkit
Browse files Browse the repository at this point in the history
  • Loading branch information
pomadchin committed Oct 27, 2016
1 parent cd1ca27 commit 3dc9a54
Show file tree
Hide file tree
Showing 27 changed files with 75 additions and 15 deletions.
10 changes: 9 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,15 @@ lazy val sparkTestkit: Project = Project("spark-testkit", file("spark-testkit"))
settings(commonSettings: _*)

lazy val s3 = Project("s3", file("s3")).
dependsOn(sparkTestkit % "test->test", spark % "provided;test->test").
dependsOn(spark % "provided;test->test").
settings(commonSettings: _*)

lazy val s3Test = Project("s3-test", file("s3-test")).
dependsOn(s3 % "provided", s3Testkit, sparkTestkit, spark % "provided;test->test").
settings(commonSettings: _*)

lazy val s3Testkit = Project("s3-testkit", file("s3-testkit")).
dependsOn(s3 % "provided", spark % "provided;test->test").
settings(commonSettings: _*)

lazy val accumulo = Project("accumulo", file("accumulo")).
Expand Down
11 changes: 11 additions & 0 deletions s3-test/build.sbt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import Dependencies._

name := "geotrellis-s3-test"
libraryDependencies ++= Seq(
sparkCore % "provided",
awsSdkS3,
spire,
scalatest % "test")

fork in Test := false
parallelExecution in Test := false
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
package geotrellis.spark.io.s3

import java.nio.file.{ Paths, Files }

import com.amazonaws.auth.AWSCredentials
import geotrellis.raster._
import geotrellis.vector._
import geotrellis.spark._
import geotrellis.spark.io.hadoop._
import geotrellis.spark.io.s3.testkit._

import com.amazonaws.auth.AWSCredentials
import org.apache.hadoop.mapreduce.{ TaskAttemptContext, InputSplit }
import org.scalatest._

import java.nio.file.{ Paths, Files }

class MockGeoTiffS3InputFormat extends GeoTiffS3InputFormat {
override def getS3Client(credentials: AWSCredentials): S3Client = new MockS3Client
override def createRecordReader(split: InputSplit, context: TaskAttemptContext) =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package geotrellis.spark.io.s3

import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.s3.testkit._

class S3AttributeStoreSpec extends AttributeStoreSpec {
val bucket = "attribute-store-test-mock-bucket"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package geotrellis.spark.io.s3.util

import geotrellis.util._
import geotrellis.spark.io.s3._
import geotrellis.spark.io.s3.testkit._

import java.nio.{ByteBuffer, ByteOrder}
import scala.collection.mutable._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import java.nio.file.{Files, Paths}
import geotrellis.util._
import geotrellis.vector.Extent
import geotrellis.spark.io.s3._
import geotrellis.spark.io.s3.testkit._
import geotrellis.raster.testkit._
import geotrellis.raster.io.geotiff._
import geotrellis.raster.io.geotiff.reader._
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
package geotrellis.spark.io.s3

import com.amazonaws.auth.AWSCredentials
import geotrellis.spark.io.s3.testkit._
import geotrellis.proj4.LatLng
import geotrellis.raster.Tile
import geotrellis.spark.TestEnvironment
import geotrellis.vector.{ Extent, ProjectedExtent }

import com.amazonaws.auth.AWSCredentials
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapreduce.{ Job, RecordReader, TaskAttemptContext, InputSplit }
import org.scalatest._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import geotrellis.spark.TestEnvironment
import geotrellis.spark.render._
import geotrellis.spark.testfiles.TestFiles
import geotrellis.spark.io.s3._
import geotrellis.spark.io.s3.testkit._
import geotrellis.spark.io.s3.SaveToS3

import org.scalatest._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package geotrellis.spark.io.s3

import geotrellis.raster.Tile
import geotrellis.spark.io._
import geotrellis.spark.io.s3.testkit._
import geotrellis.spark.io.index._
import geotrellis.spark.testfiles.TestFiles
import geotrellis.spark._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package geotrellis.spark.io.s3

import geotrellis.raster.Tile
import geotrellis.spark.io._
import geotrellis.spark.io.s3.testkit._
import geotrellis.spark.io.index._
import geotrellis.spark.testfiles.TestFiles
import geotrellis.spark._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import java.nio.file.{ Paths, Files }
import java.nio.ByteBuffer
import geotrellis.util._
import geotrellis.spark.io.s3._
import geotrellis.spark.io.s3.testkit._
import spire.syntax.cfor._

import com.amazonaws.services.s3.model._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package geotrellis.spark.io.s3

import geotrellis.raster.{Tile, TileFeature}
import geotrellis.spark.io._
import geotrellis.spark.io.s3.testkit._
import geotrellis.spark.io.index._
import geotrellis.spark.testfiles.TestTileFeatureFiles
import geotrellis.spark._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package geotrellis.spark.io.s3
import geotrellis.raster.{Tile, TileFeature}
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.s3.testkit._
import geotrellis.spark.io.index._
import geotrellis.spark.testfiles.TestTileFeatureFiles

Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,20 @@
package geotrellis.spark.io.s3

import com.amazonaws.auth.AWSCredentials
import geotrellis.proj4.LatLng
import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.tiling._
import geotrellis.spark.io.hadoop._
import geotrellis.spark.ingest._
import geotrellis.util.Filesystem
import geotrellis.spark.io.json._
import geotrellis.spark.io.s3.testkit._

import com.amazonaws.auth.AWSCredentials
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.task._
import geotrellis.spark.io.json._

import java.time.{ZoneOffset, ZonedDateTime}
import java.nio.file.{Files, Path, Paths}
import java.time.format.DateTimeFormatter
Expand Down
10 changes: 10 additions & 0 deletions s3-testkit/build.sbt
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import Dependencies._

name := "geotrellis-s3-testkit"
libraryDependencies ++= Seq(
sparkCore % "provided",
awsSdkS3,
spire,
logging,
scalatest
)
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package geotrellis.spark.io.s3.util
package geotrellis.spark.io.s3.testkit

import geotrellis.util._
import geotrellis.spark.io.s3._
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package geotrellis.spark.io.s3
package geotrellis.spark.io.s3.testkit

import geotrellis.spark.io.s3._
import java.io.ByteArrayInputStream
import com.amazonaws.services.s3.model._
import java.util.concurrent.ConcurrentHashMap
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package geotrellis.spark.io.s3
package geotrellis.spark.io.s3.testkit

import geotrellis.spark.io._
import geotrellis.spark.io.s3._
import org.apache.spark._

class MockS3LayerCollectionReader(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package geotrellis.spark.io.s3
package geotrellis.spark.io.s3.testkit

import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.s3._
import geotrellis.spark.io.json._

import org.apache.spark._
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package geotrellis.spark.io.s3
package geotrellis.spark.io.s3.testkit

import geotrellis.spark.io._
import geotrellis.spark.io.s3._
import geotrellis.spark.io.json._

import spray.json._
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package geotrellis.spark.io.s3.util
package geotrellis.spark.io.s3.testkit

import geotrellis.util._
import geotrellis.spark.io.s3._
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
package geotrellis.spark.io.s3
package geotrellis.spark.io.s3.testkit

import geotrellis.spark.io.s3._

import org.scalatest._
import scala.io.Source
Expand Down
4 changes: 3 additions & 1 deletion scripts/buildall.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@ HOSTALIASES=/tmp/hostaliases ./sbt -J-Xmx2G "project geowave" test || { exit 1;
./sbt -J-Xmx2G "project proj4" test || { exit 1; }
./sbt -J-Xmx2G "project raster-test" test || { exit 1; }
./sbt -J-Xmx2G "project raster-testkit" compile || { exit 1; }
./sbt -J-Xmx2G "project s3" test || { exit 1; }
./sbt -J-Xmx2G "project s3" compile || { exit 1; }
./sbt -J-Xmx2G "project s3-test" test || { exit 1; }
./sbt -J-Xmx2G "project s3-testkit" test || { exit 1; }
./sbt -J-Xmx2G "project shapefile" compile || { exit 1; }
./sbt -J-Xmx2G "project slick" test:compile || { exit 1; }
./sbt -J-Xmx2G "project spark" test || { exit 1; }
Expand Down
4 changes: 4 additions & 0 deletions scripts/cleanall.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
./sbt -J-Xmx2G "project geowave" clean || { exit 1; }
./sbt -J-Xmx2G "project hbase" clean || { exit 1; }
./sbt -J-Xmx2G "project proj4" clean || { exit 1; }
./sbt -J-Xmx2G "project s3" clean || { exit 1; }
./sbt -J-Xmx2G "project s3-test" clean || { exit 1; }
./sbt -J-Xmx2G "project raster-test" clean || { exit 1; }
./sbt -J-Xmx2G "project shapefile" clean || { exit 1; }
./sbt -J-Xmx2G "project slick" clean || { exit 1; }
Expand All @@ -27,6 +29,8 @@ rm -r raster-test/target
rm -r raster-testkit/target
rm -r raster/target
rm -r s3/target
rm -r s3-test/target
rm -r s3-testkit/target
rm -r shapefile/target
rm -r slick/target
rm -r spark-testkit/target
Expand Down
1 change: 1 addition & 0 deletions scripts/publish-local.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
./sbt "project raster" publish-local && \
./sbt "project raster-testkit" publish-local && \
./sbt "project s3" publish-local && \
./sbt "project s3-testkit" publish-local && \
./sbt "project geowave" publish-local && \
./sbt "project shapefile" publish-local && \
./sbt "project slick" publish-local && \
Expand Down
1 change: 1 addition & 0 deletions scripts/publish-m2.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
"project raster" +publish-m2 \
"project raster-testkit" +publish-m2 \
"project s3" +publish-m2 \
"project s3-testkit" +publish-m2 \
"project shapefile" +publish-m2 \
"project slick" +publish-m2 \
"project spark" +publish-m2 \
Expand Down
1 change: 1 addition & 0 deletions scripts/publish-snapshot.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
"project raster" publish \
"project raster-testkit" publish \
"project s3" publish \
"project s3-testkit" publish \
"project geowave" publish \
"project accumulo" publish \
"project cassandra" publish \
Expand Down

0 comments on commit 3dc9a54

Please sign in to comment.