Skip to content

Commit

Permalink
HdfsRepository supports snowplow#54
Browse files Browse the repository at this point in the history
exclude hdfs jar, add resolver tests.
  • Loading branch information
liningalex committed May 31, 2016
1 parent 0e03982 commit ae3f7d5
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 5 deletions.
6 changes: 4 additions & 2 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,10 @@ object Dependencies {
val scalaz7 = "org.scalaz" %% "scalaz-core" % V.scalaz7
val collUtils = "com.twitter" %% "util-collection" % V.collUtils
// hadoop hdfs
val hadoopCommon = "org.apache.hadoop" % "hadoop-common" % V.hadoop
val hadoopHdfs = "org.apache.hadoop" % "hadoop-hdfs" % V.hadoop
val hadoopCommon = "org.apache.hadoop" % "hadoop-common" % V.hadoop % "provided"
val hadoopHdfs = "org.apache.hadoop" % "hadoop-hdfs" % V.hadoop % "provided"
val hadoopCommonTest = "org.apache.hadoop" % "hadoop-common" % V.hadoop % "test"
val hadoopHdfsTest = "org.apache.hadoop" % "hadoop-hdfs" % V.hadoop % "test"
// Scala (test only)
val specs2 = "org.specs2" %% "specs2" % V.specs2 % "test"
val scalazSpecs2 = "org.typelevel" %% "scalaz-specs2" % V.scalazSpecs2 % "test"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ import org.json4s.scalaz.JsonScalaz._
import repositories.{
RepositoryRef,
EmbeddedRepositoryRef,
HttpRepositoryRef
HttpRepositoryRef,
HdfsRepositoryRef
}
import validation.SchemaValidation.{ isValid, getErrors }
import validation.ValidatableJsonMethods
Expand All @@ -51,7 +52,7 @@ import ProcessingMessageMethods._
*/
object Resolver {

private val ConfigurationSchema = SchemaCriterion("com.snowplowanalytics.iglu", "resolver-config", "jsonschema", 1, 0, 1)
private val ConfigurationSchema = SchemaCriterion("com.snowplowanalytics.iglu", "resolver-config", "jsonschema", 1, 1, 0)

/**
* Helper class responsible for aggregating repository lookup errors
Expand Down Expand Up @@ -160,6 +161,8 @@ object Resolver {
EmbeddedRepositoryRef.parse(rc)
} else if (HttpRepositoryRef.isHttp(rc)) {
HttpRepositoryRef.parse(rc)
} else if (HdfsRepositoryRef.isHdfs(rc)) {
HdfsRepositoryRef.parse(rc)
} else {
s"Configuration unrecognizable as either embedded or HTTP repository".fail.toProcessingMessageNel
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import com.fasterxml.jackson.databind.JsonNode
import repositories.{
EmbeddedRepositoryRef,
HttpRepositoryRef,
HdfsRepositoryRef,
RepositoryRefConfig
}
import validation.ProcessingMessageMethods._
Expand All @@ -49,10 +50,12 @@ object ResolverSpec {

private val embedRef: (String, Int) => EmbeddedRepositoryRef = (prefix, priority) =>
EmbeddedRepositoryRef(RepositoryRefConfig("An embedded repo", priority, List(prefix)), "/embed-path")

private val hdfsRef: (String, Int) => HdfsRepositoryRef = (prefix, priority) =>
HdfsRepositoryRef(RepositoryRefConfig("An hdfs repo", priority, List(prefix)), "/hdfs-path")
val one = embedRef("com.acme", 0)
val two = embedRef("de.acompany.snowplow", 40)
val three = embedRef("de.acompany.snowplow", 100)
val four = hdfsRef("de.acompany.snowplow", 200)
}

def notFoundError(schemaKey: String, repos: List[String]): String =
Expand All @@ -75,6 +78,7 @@ class ResolverSpec extends Specification with DataTables with ValidationMatchers
a Resolver should retry after non-404 errors $e6
a Resolver should give up after 3rd retry $e7
a Resolver should accumulate errors from all repositories $e8
a Resolver should accumulate errors from all repositories $e9
"""

import ResolverSpec._
Expand Down Expand Up @@ -268,4 +272,39 @@ class ResolverSpec extends Specification with DataTables with ValidationMatchers
)
} and(there was 2.times(httpRep1).lookupSchema(schemaKey))
}

def e9 = {

val config =
s"""|{
|"schema": "iglu:com.snowplowanalytics.iglu/resolver-config/jsonschema/1-1-0",
|"data": {
|"cacheSize": 500,
|"repositories": [
|{
|"name": "Iglu Central",
|"priority": 0,
|"vendorPrefixes": [ "com.snowplowanalytics" ],
|"connection": {
|"http": {
|"uri": "http://iglucentral.com"
|}
|}
|}, {
|"name": "An hdfs repo",
|"priority": 200,
|"vendorPrefixes": [ "de.acompany.snowplow" ],
|"connection": {
|"hdfs": {
|"path": "/hdfs-path"
|}
|}
|}
|]
|}
|}""".stripMargin.replaceAll("[\n\r]","")

val expected = Resolver(cacheSize = 500, SpecHelpers.IgluCentral, Repos.four)
Resolver.parse(SpecHelpers.asJsonNode(config)) must beSuccessful(expected)
}
}

0 comments on commit ae3f7d5

Please sign in to comment.