Skip to content

Commit

Permalink
some utils for working w/ new "enum" format
Browse files Browse the repository at this point in the history
  • Loading branch information
squito committed Mar 20, 2015
1 parent dbfc7bf commit fef6605
Show file tree
Hide file tree
Showing 3 changed files with 169 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,11 @@ import javax.ws.rs.core.MediaType
import javax.ws.rs.ext.{ContextResolver, Provider}

import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.databind.{JsonSerializer, ObjectMapper, SerializationFeature, SerializerProvider}
import com.fasterxml.jackson.databind.module.SimpleModule

import org.apache.spark.util.SparkEnum

@Provider
@Produces(Array(MediaType.APPLICATION_JSON))
Expand All @@ -38,10 +42,13 @@ class CustomObjectMapper extends ContextResolver[ObjectMapper]{
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
mapper.setDateFormat(CustomObjectMapper.makeISODateFormat)

val sparkEnumModule = new SimpleModule()
sparkEnumModule.addSerializer(classOf[SparkEnum], new SparkEnumSerializer)
mapper.registerModule(sparkEnumModule)

override def getContext(tpe: Class[_]): ObjectMapper = {
mapper
}

}

object CustomObjectMapper {
Expand All @@ -51,5 +58,10 @@ object CustomObjectMapper {
iso8601.setCalendar(cal);
iso8601;
}
}

class SparkEnumSerializer extends JsonSerializer[SparkEnum] {
def serialize(se: SparkEnum, jgen: JsonGenerator, provider: SerializerProvider): Unit = {
jgen.writeString(se.toString)
}
}
69 changes: 69 additions & 0 deletions core/src/main/scala/org/apache/spark/util/SparkEnum.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util

import org.apache.spark.SparkException

/**
* Util for the "enum" pattern we have adopted
*/
private[spark] trait SparkEnum {
override def toString: String = {
val simpleName = getClass().getSimpleName()
val a = simpleName.indexOf('$')
simpleName.substring(0,a)
}

}

/**
* Util for the "enum" pattern we've adopted. It adds methods to parse the enum from a String.
* Note that you must still manually keep {{values}} in sync with the values you create.
*/
private[spark] trait SparkEnumCompanion[T <: SparkEnum] {
val values: Seq[T]

lazy val enumNames: Map[String, T] = {
try {
val tmpMap = values.map { t =>
t.toString -> t
}.toMap
if (tmpMap.size != values.size) {
throw new SparkException("It appears you have multiple constants with the same" +
" name. Perhaps your naming scheme is incompatible with SparkEnum. found names: " +
tmpMap.keys)
}
tmpMap
} catch {
case ie: InternalError =>
throw new SparkException("It appears you are using SparkEnum in a class which does not " +
"follow the naming conventions")
}
}


def parse(s:String): Option[T] = {
enumNames.get(s)
}

def parseIgnoreCase(s: String): Option[T] = {
enumNames.find { case (k, v) =>
k.toLowerCase() == s.toLowerCase()
}.map{_._2}
}

}
86 changes: 86 additions & 0 deletions core/src/test/scala/org/apache/spark/util/SparkEnumTest.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util

import org.scalatest.{Matchers, FunSuite}

import org.apache.spark.SparkException

class SparkEnumTest extends FunSuite with Matchers {

test("toString") {
DummyEnum.Foo.toString should be ("Foo")
DummyEnum.Bar.toString should be ("Bar")
}

test("parse") {
DummyEnum.parse("Foo") should be (Some(DummyEnum.Foo))
DummyEnum.parse("Bar") should be (Some(DummyEnum.Bar))

DummyEnum.parse("") should be (None)
DummyEnum.parse("foo") should be (None)
DummyEnum.parse("bar") should be (None)
}


test("bad enums") {
val ex = intercept[SparkException](BadEnum.enumNames)
// I get different errors on each run, not sure why, but either is fine.
ex.getMessage should (be ("It appears you have multiple constants with the same name. " +
"Perhaps your naming scheme is incompatible with SparkEnum. found names: Set(Bippy)") or
be ("It appears you are using SparkEnum in a class which does not follow the naming" +
" conventions"))
}

test("parseIgnoreCase") {
DummyEnum.parseIgnoreCase("Foo") should be (Some(DummyEnum.Foo))
DummyEnum.parseIgnoreCase("Bar") should be (Some(DummyEnum.Bar))

DummyEnum.parseIgnoreCase("") should be (None)
DummyEnum.parseIgnoreCase("foo") should be (Some(DummyEnum.Foo))
DummyEnum.parseIgnoreCase("bar") should be (Some(DummyEnum.Bar))
}
}


sealed abstract class DummyEnum extends SparkEnum

object DummyEnum extends SparkEnumCompanion[DummyEnum] {
final val Foo = {
case object Foo extends DummyEnum
Foo
}
final val Bar = {
case object Bar extends DummyEnum
Bar
}
val values = Seq(
Foo,
Bar
)
}

sealed abstract class BadEnum extends SparkEnum

object BadEnum extends SparkEnumCompanion[BadEnum] {
case object Bippy extends BadEnum
object Blah {
case object Bippy extends BadEnum
}

val values = Seq(Bippy, Blah.Bippy)
}

0 comments on commit fef6605

Please sign in to comment.