Skip to content

Commit

Permalink
Move to com.networknt Json Schema validator
Browse files Browse the repository at this point in the history
Signed-off-by: Grisha Pomadchin <gr.pomadchin@gmail.com>
  • Loading branch information
pomadchin committed May 31, 2017
1 parent 3bd4abf commit a9ab63d
Show file tree
Hide file tree
Showing 5 changed files with 99 additions and 85 deletions.
2 changes: 2 additions & 0 deletions project/Dependencies.scala
Expand Up @@ -48,4 +48,6 @@ object Dependencies {
val slickPG = "com.github.tminglei" %% "slick-pg" % "0.14.6"

val parserCombinators = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.5"

val jsonSchemaValidator = "com.networknt" % "json-schema-validator" % "0.1.7"
}
2 changes: 1 addition & 1 deletion spark-etl/build.sbt
Expand Up @@ -2,7 +2,7 @@ import Dependencies._

name := "geotrellis-spark-etl"
libraryDependencies ++= Seq(
"com.github.fge" % "json-schema-validator" % "2.2.6",
jsonSchemaValidator,
sparkCore % "provided",
scalatest % "test")

Expand Down
127 changes: 65 additions & 62 deletions spark-etl/src/main/resources/input-schema.json
@@ -1,71 +1,74 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "array",
"properties": {
"format": {
"type": "string"
},
"name": {
"type": "string"
},
"cache": {
"type": "string"
},
"crs": {
"type": "string"
},
"noData": {
"type": "integer"
},
"maxTileSize": {
"type": "integer"
},
"numPartitions": {
"type": "integer"
},
"clip": {
"type": "object",
"properties": {
"xmin": {
"type": "number"
},
"ymin": {
"type": "number"
},
"xmax": {
"type": "number"
},
"ymax": {
"type": "number"
}
"items": {
"type": "object",
"properties": {
"format": {
"type": "string"
},
"required": [
"xmin",
"ymin",
"xmax",
"ymax"
]
},
"backend": {
"type": "object",
"properties": {
"type": {
"type": "string"
"name": {
"type": "string"
},
"cache": {
"type": "string"
},
"crs": {
"type": "string"
},
"noData": {
"type": "integer"
},
"maxTileSize": {
"type": "integer"
},
"numPartitions": {
"type": "integer"
},
"clip": {
"type": "object",
"properties": {
"xmin": {
"type": "number"
},
"ymin": {
"type": "number"
},
"xmax": {
"type": "number"
},
"ymax": {
"type": "number"
}
},
"path": {
"type": "string"
}
"required": [
"xmin",
"ymin",
"xmax",
"ymax"
]
},
"required": [
"type",
"path"
]
}
"backend": {
"type": "object",
"properties": {
"type": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"type",
"path"
]
}
},
"required": [
"format",
"name",
"backend"
]
},
"required": [
"format",
"name",
"backend"
],
"additionalProperties": false
}
Expand Up @@ -17,12 +17,17 @@
package geotrellis.spark.etl.config

import geotrellis.spark.etl.config.json._
import geotrellis.util.LazyLogging

import org.apache.spark.SparkContext
import com.github.fge.jackson.JsonLoader
import spray.json._

trait BaseEtlConf extends ConfigParse {
import scala.collection.JavaConverters._

trait BaseEtlConf extends ConfigParse with LazyLogging {
private def colorString(str: String, color: String = Console.RED) = s"${color}${str}${Console.RESET}"
private def loggerError(str: String, color: String = Console.RED) = logger.error(colorString(str, color))

val help = """
|geotrellis-etl
|
Expand All @@ -40,9 +45,9 @@ trait BaseEtlConf extends ConfigParse {

val requiredFields = Set('input, 'output, 'backendProfiles)

val backendProfilesSchema = schemaFactory.getJsonSchema(JsonLoader.fromResource("/backend-profiles-schema.json"))
val inputSchema = schemaFactory.getJsonSchema(JsonLoader.fromResource("/input-schema.json"))
val outputSchema = schemaFactory.getJsonSchema(JsonLoader.fromResource("/output-schema.json"))
val backendProfilesSchema = schemaFactory.getSchema(getClass.getResourceAsStream("/backend-profiles-schema.json"))
val inputSchema = schemaFactory.getSchema(getClass.getResourceAsStream("/input-schema.json"))
val outputSchema = schemaFactory.getSchema(getClass.getResourceAsStream("/output-schema.json"))

def nextOption(map: Map[Symbol, String], list: Seq[String]): Map[Symbol, String] =
list.toList match {
Expand All @@ -58,7 +63,7 @@ trait BaseEtlConf extends ConfigParse {
sys.exit(1)
}
case option :: tail => {
println(s"Unknown option ${option}")
println(colorString(s"Unknown option ${option}"))
println(help)
sys.exit(1)
}
Expand All @@ -68,28 +73,28 @@ trait BaseEtlConf extends ConfigParse {
val m = parse(args)

if(m.keySet != requiredFields) {
println(s"missing required field(s): ${(requiredFields -- m.keySet).mkString(", ")}, use --help command to get additional information about input options.")
loggerError(s"missing required field(s): ${(requiredFields -- m.keySet).mkString(", ")}, use --help command to get additional information about input options.")
sys.exit(1)
}

val(backendProfiles, input, output) = (m('backendProfiles), m('input), m('output))

val inputValidation = inputSchema.validate(JsonLoader.fromString(input), true)
val backendProfilesValidation = backendProfilesSchema.validate(JsonLoader.fromString(backendProfiles), true)
val outputValidation = outputSchema.validate(JsonLoader.fromString(output), true)
val inputValidation = inputSchema.validate(jsonNodeFromString(input))
val backendProfilesValidation = backendProfilesSchema.validate(jsonNodeFromString(backendProfiles))
val outputValidation = outputSchema.validate(jsonNodeFromString(output))

if(!inputValidation.isSuccess || !backendProfilesValidation.isSuccess || !outputValidation.isSuccess) {
if(!inputValidation.isSuccess) {
println("input validation error:")
println(inputValidation)
if(!inputValidation.isEmpty || !backendProfilesValidation.isEmpty || !outputValidation.isEmpty) {
if(!inputValidation.isEmpty) {
loggerError(s"input validation errors:")
inputValidation.asScala.foreach(msg => loggerError(s" - ${msg.getMessage}"))
}
if(!backendProfilesValidation.isSuccess) {
println("backendProfiles validation error:")
println(backendProfilesValidation)
if(!backendProfilesValidation.isEmpty) {
loggerError(s"backendProfiles validation error:")
backendProfilesValidation.asScala.foreach(msg => loggerError(s" - ${msg.getMessage}"))
}
if(!outputValidation.isSuccess) {
println("output validation error:")
println(outputValidation)
if(!outputValidation.isEmpty) {
loggerError(s"output validation error:")
outputValidation.asScala.foreach(msg => loggerError(s" - ${msg.getMessage}"))
}
sys.exit(1)
}
Expand Down
Expand Up @@ -16,7 +16,9 @@

package geotrellis.spark.etl.config

import com.github.fge.jsonschema.main.JsonSchemaFactory
import com.networknt.schema.JsonSchemaFactory
import com.fasterxml.jackson.databind.JsonNode
import com.fasterxml.jackson.databind.ObjectMapper
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
Expand All @@ -25,7 +27,7 @@ trait ConfigParse {
val help: String
val requiredFields: Set[Symbol]

val schemaFactory = JsonSchemaFactory.byDefault()
val schemaFactory = new JsonSchemaFactory()

def getJson(filePath: String, conf: Configuration): String = {
val path = new Path(filePath)
Expand All @@ -35,6 +37,8 @@ trait ConfigParse {
is.close(); fs.close(); json
}

def jsonNodeFromString(content: String): JsonNode = new ObjectMapper().readTree(content)

def nextOption(map: Map[Symbol, String], list: Seq[String]): Map[Symbol, String]

def parse(args: Seq[String])(implicit sc: SparkContext) =
Expand Down

0 comments on commit a9ab63d

Please sign in to comment.