Skip to content

Commit

Permalink
added de.javakaffe into geomesa subproject; required UnmodifiableColl…
Browse files Browse the repository at this point in the history
…ection kryo serializer
  • Loading branch information
pomadchin committed Oct 6, 2016
1 parent 5c5ec22 commit 8f962f3
Show file tree
Hide file tree
Showing 8 changed files with 51 additions and 7 deletions.
2 changes: 2 additions & 0 deletions geomesa/build.sbt
Expand Up @@ -5,6 +5,8 @@ libraryDependencies ++= Seq(
"org.locationtech.geomesa" % "geomesa-jobs" % Version.geomesa,
"org.locationtech.geomesa" % "geomesa-accumulo-datastore" % Version.geomesa,
"org.locationtech.geomesa" % "geomesa-utils" % Version.geomesa,
"de.javakaffee" % "kryo-serializers" % "0.38" exclude("com.esotericsoftware", "kryo"),
"com.esotericsoftware" % "kryo-shaded" % "3.0.3",
"org.apache.spark" %% "spark-core" % Version.spark % "provided",
spire,
scalatest % "test")
Expand Down
Expand Up @@ -15,7 +15,7 @@ object GeometryToGeoMesaSimpleFeature {
val whenField = "when"
val whereField = "where"

@transient lazy val featureTypeCache =
lazy val featureTypeCache =
new LRUCache[String, SimpleFeatureType](
maxSize = ConfigFactory.load().getInt("geotrellis.geomesa.featureTypeCacheSize"),
sizeOf = {x => 1l}
Expand Down
Expand Up @@ -24,7 +24,7 @@ class GeoMesaFeatureReader(val instance: GeoMesaInstance)(implicit sc: SparkCont
numPartitions: Option[Int] = None
): RDD[SimpleFeature] = {
val dataStore = instance.accumuloDataStore
dataStore.createSchema(simpleFeatureType)
if(!dataStore.getTypeNames().contains(simpleFeatureType.getTypeName)) dataStore.createSchema(simpleFeatureType)
dataStore.dispose()

val job = Job.getInstance(sc.hadoopConfiguration)
Expand Down
Expand Up @@ -9,13 +9,12 @@ import org.apache.spark.rdd.RDD
import org.geotools.data.Transaction
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}

import scala.collection.JavaConversions._

class GeoMesaFeatureWriter(val instance: GeoMesaInstance)(implicit sc: SparkContext) extends Serializable {
def write[G <: Geometry, D: ? => Seq[(String, Any)]]
(layerId: LayerId, rdd: RDD[Feature[G, D]])
(implicit ev: Feature[G, D] => FeatureToGeoMesaSimpleFeatureMethods[G, D]): Unit = {

// SimpleFeatureType requires valid UnmodifiableCollection kryo serializer
rdd
.map { f => val sf = f.toSimpleFeature(layerId.name); sf.getFeatureType -> sf }.groupByKey
.foreachPartition { (partition: Iterator[(SimpleFeatureType, Iterable[SimpleFeature])]) =>
Expand Down
@@ -0,0 +1,14 @@
package geotrellis.spark.io.geomesa.kryo

import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.{KryoRegistrator => SparkKryoRegistrator}
import de.javakaffee.kryoserializers._

class KryoRegistrator extends SparkKryoRegistrator {
override def registerClasses(kryo: Kryo): Unit = {
new geotrellis.spark.io.kryo.KryoRegistrator().registerClasses(kryo)

// SimpleFeatureType requires proper UnmodifiableCollection serializer
UnmodifiableCollectionsSerializer.registerSerializers(kryo)
}
}
28 changes: 28 additions & 0 deletions geomesa/src/test/scala/geotrellis/GeoMesaTestEnvironment.scala
@@ -0,0 +1,28 @@
/*
* Copyright (c) 2014 DigitalGlobe.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package geotrellis

import geotrellis.spark.TestEnvironment

import org.apache.spark.SparkConf
import org.scalatest._

trait GeoMesaTestEnvironment extends TestEnvironment { self: Suite =>
override def setKryoRegistrator(conf: SparkConf) =
conf.set("spark.kryo.registrator", classOf[geotrellis.spark.io.geomesa.kryo.KryoRegistrator].getName)
.set("spark.kryo.registrationRequired", "false")
}
Expand Up @@ -17,7 +17,7 @@ object GeoMesaSimpleFeatureType {
val whenField = GeometryToGeoMesaSimpleFeature.whenField
val whereField = GeometryToGeoMesaSimpleFeature.whereField

@transient lazy val featureTypeCache =
lazy val featureTypeCache =
new LRUCache[String, SimpleFeatureType](
maxSize = ConfigFactory.load().getInt("geotrellis.geomesa.featureTypeCacheSize"),
sizeOf = {x => 1l}
Expand Down
Expand Up @@ -3,7 +3,6 @@ package geotrellis.spark.io.geomesa
import geotrellis.geomesa.geotools.{GeoMesaSimpleFeatureType, GeometryToGeoMesaSimpleFeature}
import geotrellis.spark.{LayerId, TestEnvironment}
import geotrellis.vector._

import org.opengis.filter.Filter
import org.apache.spark.rdd.RDD
import org.geotools.data.Query
Expand All @@ -12,7 +11,9 @@ import org.scalatest.{BeforeAndAfterAll, FunSpec, Matchers, Suite}
import java.text.SimpleDateFormat
import java.util.TimeZone

class GeoMesaPersistenceSpec extends FunSpec with Suite with BeforeAndAfterAll with Matchers with TestEnvironment {
import geotrellis.GeoMesaTestEnvironment

class GeoMesaPersistenceSpec extends FunSpec with Suite with BeforeAndAfterAll with Matchers with GeoMesaTestEnvironment {

describe("GeoMesa Features Spec") {
val featuresInstance = GeoMesaInstance(
Expand Down

0 comments on commit 8f962f3

Please sign in to comment.