From fef66058612ebf225e58dd5f5fea6bae1afd5b31 Mon Sep 17 00:00:00 2001 From: Imran Rashid Date: Fri, 20 Mar 2015 16:25:42 -0500 Subject: [PATCH] some utils for working w/ new "enum" format --- .../status/api/v1/CustomObjectMapper.scala | 16 +++- .../org/apache/spark/util/SparkEnum.scala | 69 +++++++++++++++ .../org/apache/spark/util/SparkEnumTest.scala | 86 +++++++++++++++++++ 3 files changed, 169 insertions(+), 2 deletions(-) create mode 100644 core/src/main/scala/org/apache/spark/util/SparkEnum.scala create mode 100644 core/src/test/scala/org/apache/spark/util/SparkEnumTest.scala diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/CustomObjectMapper.scala b/core/src/main/scala/org/apache/spark/status/api/v1/CustomObjectMapper.scala index aad906699b549..6c4b70777fec0 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/CustomObjectMapper.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/CustomObjectMapper.scala @@ -23,7 +23,11 @@ import javax.ws.rs.core.MediaType import javax.ws.rs.ext.{ContextResolver, Provider} import com.fasterxml.jackson.annotation.JsonInclude -import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.{JsonSerializer, ObjectMapper, SerializationFeature, SerializerProvider} +import com.fasterxml.jackson.databind.module.SimpleModule + +import org.apache.spark.util.SparkEnum @Provider @Produces(Array(MediaType.APPLICATION_JSON)) @@ -38,10 +42,13 @@ class CustomObjectMapper extends ContextResolver[ObjectMapper]{ mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) mapper.setDateFormat(CustomObjectMapper.makeISODateFormat) + val sparkEnumModule = new SimpleModule() + sparkEnumModule.addSerializer(classOf[SparkEnum], new SparkEnumSerializer) + mapper.registerModule(sparkEnumModule) + override def getContext(tpe: Class[_]): ObjectMapper = { mapper } - } object CustomObjectMapper { @@ -51,5 +58,10 @@ object CustomObjectMapper { iso8601.setCalendar(cal); iso8601; } +} +class SparkEnumSerializer extends JsonSerializer[SparkEnum] { + def serialize(se: SparkEnum, jgen: JsonGenerator, provider: SerializerProvider): Unit = { + jgen.writeString(se.toString) + } } diff --git a/core/src/main/scala/org/apache/spark/util/SparkEnum.scala b/core/src/main/scala/org/apache/spark/util/SparkEnum.scala new file mode 100644 index 0000000000000..a84f7120578b8 --- /dev/null +++ b/core/src/main/scala/org/apache/spark/util/SparkEnum.scala @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.util + +import org.apache.spark.SparkException + +/** + * Util for the "enum" pattern we have adopted + */ +private[spark] trait SparkEnum { + override def toString: String = { + val simpleName = getClass().getSimpleName() + val a = simpleName.indexOf('$') + simpleName.substring(0,a) + } + +} + +/** + * Util for the "enum" pattern we've adopted. It adds methods to parse the enum from a String. + * Note that you must still manually keep {{values}} in sync with the values you create. + */ +private[spark] trait SparkEnumCompanion[T <: SparkEnum] { + val values: Seq[T] + + lazy val enumNames: Map[String, T] = { + try { + val tmpMap = values.map { t => + t.toString -> t + }.toMap + if (tmpMap.size != values.size) { + throw new SparkException("It appears you have multiple constants with the same" + + " name. Perhaps your naming scheme is incompatible with SparkEnum. found names: " + + tmpMap.keys) + } + tmpMap + } catch { + case ie: InternalError => + throw new SparkException("It appears you are using SparkEnum in a class which does not " + + "follow the naming conventions") + } + } + + + def parse(s:String): Option[T] = { + enumNames.get(s) + } + + def parseIgnoreCase(s: String): Option[T] = { + enumNames.find { case (k, v) => + k.toLowerCase() == s.toLowerCase() + }.map{_._2} + } + +} diff --git a/core/src/test/scala/org/apache/spark/util/SparkEnumTest.scala b/core/src/test/scala/org/apache/spark/util/SparkEnumTest.scala new file mode 100644 index 0000000000000..ae6abad196148 --- /dev/null +++ b/core/src/test/scala/org/apache/spark/util/SparkEnumTest.scala @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.util + +import org.scalatest.{Matchers, FunSuite} + +import org.apache.spark.SparkException + +class SparkEnumTest extends FunSuite with Matchers { + + test("toString") { + DummyEnum.Foo.toString should be ("Foo") + DummyEnum.Bar.toString should be ("Bar") + } + + test("parse") { + DummyEnum.parse("Foo") should be (Some(DummyEnum.Foo)) + DummyEnum.parse("Bar") should be (Some(DummyEnum.Bar)) + + DummyEnum.parse("") should be (None) + DummyEnum.parse("foo") should be (None) + DummyEnum.parse("bar") should be (None) + } + + + test("bad enums") { + val ex = intercept[SparkException](BadEnum.enumNames) + // I get different errors on each run, not sure why, but either is fine. + ex.getMessage should (be ("It appears you have multiple constants with the same name. " + + "Perhaps your naming scheme is incompatible with SparkEnum. found names: Set(Bippy)") or + be ("It appears you are using SparkEnum in a class which does not follow the naming" + + " conventions")) + } + + test("parseIgnoreCase") { + DummyEnum.parseIgnoreCase("Foo") should be (Some(DummyEnum.Foo)) + DummyEnum.parseIgnoreCase("Bar") should be (Some(DummyEnum.Bar)) + + DummyEnum.parseIgnoreCase("") should be (None) + DummyEnum.parseIgnoreCase("foo") should be (Some(DummyEnum.Foo)) + DummyEnum.parseIgnoreCase("bar") should be (Some(DummyEnum.Bar)) + } +} + + +sealed abstract class DummyEnum extends SparkEnum + +object DummyEnum extends SparkEnumCompanion[DummyEnum] { + final val Foo = { + case object Foo extends DummyEnum + Foo + } + final val Bar = { + case object Bar extends DummyEnum + Bar + } + val values = Seq( + Foo, + Bar + ) +} + +sealed abstract class BadEnum extends SparkEnum + +object BadEnum extends SparkEnumCompanion[BadEnum] { + case object Bippy extends BadEnum + object Blah { + case object Bippy extends BadEnum + } + + val values = Seq(Bippy, Blah.Bippy) +}