Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
refactor/simplify delimited/json encoders/decoders
- Loading branch information
Showing
28 changed files
with
336 additions
and
381 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
121 changes: 121 additions & 0 deletions
121
src/main/scala/io/epiphanous/flinkrunner/serde/Codec.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
package io.epiphanous.flinkrunner.serde | ||
|
||
import com.fasterxml.jackson.databind._ | ||
import com.fasterxml.jackson.databind.json.JsonMapper | ||
import com.fasterxml.jackson.databind.module.SimpleModule | ||
import com.fasterxml.jackson.dataformat.csv.{ | ||
CsvGenerator, | ||
CsvMapper, | ||
CsvParser, | ||
CsvSchema | ||
} | ||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule | ||
import com.fasterxml.jackson.module.scala.DefaultScalaModule | ||
import org.apache.avro.generic.GenericRecord | ||
|
||
import java.io.OutputStream | ||
import java.nio.charset.StandardCharsets | ||
import scala.collection.JavaConverters._ | ||
|
||
case class Codec[E]( | ||
typeClass: Class[E], | ||
jsonConfig: JsonConfig = JsonConfig(), | ||
delimitedConfig: DelimitedConfig = DelimitedConfig.CSV) { | ||
|
||
lazy val isAvro: Boolean = | ||
classOf[GenericRecord].isAssignableFrom(typeClass) | ||
|
||
lazy val avroModule: SimpleModule = | ||
new SimpleModule().addSerializer(new AvroJsonSerializer) | ||
|
||
lazy val jsonMapper: JsonMapper = { | ||
val mapper = JsonMapper | ||
.builder() | ||
.addModule(DefaultScalaModule) | ||
.addModule(new JavaTimeModule) | ||
.configure( | ||
MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, | ||
jsonConfig.sortKeys | ||
) | ||
.configure( | ||
SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, | ||
jsonConfig.sortKeys | ||
) | ||
.configure(SerializationFeature.INDENT_OUTPUT, jsonConfig.pretty) | ||
.configure( | ||
DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, | ||
false | ||
) | ||
(if (isAvro) mapper.addModule(avroModule) else mapper).build() | ||
} | ||
|
||
lazy val jsonWriter: ObjectWriter = jsonMapper.writerFor(typeClass) | ||
|
||
lazy val jsonReader: ObjectReader = jsonMapper.readerFor(typeClass) | ||
|
||
lazy val csvMapper: CsvMapper = { | ||
val builder = CsvMapper | ||
.builder() | ||
.addModule(DefaultScalaModule) | ||
.addModule(new JavaTimeModule) | ||
.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, false) | ||
.configure(CsvGenerator.Feature.ALWAYS_QUOTE_STRINGS, false) | ||
.configure(CsvParser.Feature.TRIM_SPACES, true) | ||
.configure(CsvParser.Feature.SKIP_EMPTY_LINES, true) | ||
.configure(CsvParser.Feature.ALLOW_COMMENTS, true) | ||
.configure(CsvParser.Feature.EMPTY_STRING_AS_NULL, true) | ||
.configure( | ||
DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, | ||
false | ||
) | ||
(if (isAvro) builder.addModule(avroModule) else builder).build() | ||
} | ||
|
||
lazy val csvSchema: CsvSchema = { | ||
val start = csvMapper.schemaFor(typeClass) | ||
val updatedWithConfig = (if (isAvro) { | ||
val columns = start | ||
.iterator() | ||
.asScala | ||
.toList | ||
.filterNot(c => | ||
c.hasName("schema") || c.hasName( | ||
"specificData" | ||
) | ||
) | ||
.asJava | ||
start | ||
.withoutColumns() | ||
.rebuild() | ||
.addColumns(columns) | ||
.build() | ||
} else start) | ||
.withColumnSeparator(delimitedConfig.columnSeparator) | ||
.withLineSeparator(delimitedConfig.lineSeparator) | ||
.withEscapeChar(delimitedConfig.escapeChar) | ||
.withUseHeader(false) // delimited header use handled in encoder | ||
if (delimitedConfig.useQuotes) | ||
updatedWithConfig.withQuoteChar(delimitedConfig.quoteCharacter) | ||
else updatedWithConfig.withoutQuoteChar() | ||
} | ||
|
||
lazy val csvHeader: Array[Byte] = csvSchema | ||
.iterator() | ||
.asScala | ||
.map(_.getName) | ||
.toList | ||
.mkString( | ||
"", | ||
delimitedConfig.columnSeparator.toString, | ||
delimitedConfig.lineSeparator | ||
) | ||
.getBytes(StandardCharsets.UTF_8) | ||
|
||
def maybeWriteHeader(stream: OutputStream): Unit = | ||
if (delimitedConfig.useHeader) stream.write(csvHeader) | ||
|
||
lazy val csvWriter: ObjectWriter = csvMapper.writer(csvSchema) | ||
|
||
lazy val csvReader: ObjectReader = | ||
csvMapper.readerFor(typeClass).`with`(csvSchema) | ||
} |
75 changes: 0 additions & 75 deletions
75
src/main/scala/io/epiphanous/flinkrunner/serde/DelimitedCodec.scala
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
20 changes: 14 additions & 6 deletions
20
src/main/scala/io/epiphanous/flinkrunner/serde/DelimitedRowDecoder.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,20 +1,28 @@ | ||
package io.epiphanous.flinkrunner.serde | ||
|
||
import com.fasterxml.jackson.databind.ObjectReader | ||
import org.apache.flink.api.common.typeinfo.TypeInformation | ||
|
||
import scala.util.Try | ||
|
||
/** Decode a delimited text line into an instance of the requested class. | ||
* | ||
* @param delimitedConfig | ||
* a delimited codec config (defaults to csv) | ||
* @tparam E | ||
* the type to decode into | ||
*/ | ||
class DelimitedRowDecoder[E: TypeInformation]( | ||
delimitedConfig: DelimitedConfig = | ||
DelimitedConfig.CSV.copy(useHeader = false)) | ||
extends RowDecoder[E] | ||
with DelimitedCodec { | ||
extends RowDecoder[E] { | ||
|
||
@transient | ||
lazy val reader: ObjectReader = | ||
getReader(delimitedConfig, implicitly[TypeInformation[E]].getTypeClass) | ||
lazy val codec: Codec[E] = Codec( | ||
implicitly[TypeInformation[E]].getTypeClass, | ||
delimitedConfig = delimitedConfig | ||
) | ||
|
||
override def decode(line: String): Try[E] = | ||
Try(reader.readValue[E](line)) | ||
Try(codec.csvReader.readValue[E](line)) | ||
|
||
} |
Oops, something went wrong.