Skip to content

Commit

Permalink
FormatOps: ignore line breaks in infix expressions
Browse files Browse the repository at this point in the history
  • Loading branch information
Albert Meltzer committed Apr 12, 2020
1 parent 8b81fe6 commit 1a055b5
Show file tree
Hide file tree
Showing 5,502 changed files with 63,569 additions and 68,199 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
Original file line number Diff line number Diff line change
Expand Up @@ -88,16 +88,14 @@ case class MetricEvaluatorResult[R](

val strings = Seq(
"MetricEvaluatorResult:",
s" # engine params evaluated: ${engineParamsScores.size}") ++
Seq(
"Optimal Engine Params:",
s" $bestEPStr",
"Metrics:",
s" $metricHeader: ${bestScore.score}") ++
s" # engine params evaluated: ${engineParamsScores.size}") ++ Seq(
"Optimal Engine Params:",
s" $bestEPStr",
"Metrics:",
s" $metricHeader: ${bestScore.score}") ++
otherMetricHeaders.zip(bestScore.otherScores).map {
case (h, s) => s" $h: $s"
} ++
outputPath.toSeq.map { p =>
} ++ outputPath.toSeq.map { p =>
s"The best variant params can be found in $p"
}

Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,9 @@ object CreateWorkflow extends Logging {

val parser = new scopt.OptionParser[WorkflowConfig]("CreateWorkflow") {
override def errorOnUnknownArgument: Boolean = false
opt[String]("batch") action { (x, c) => c.copy(batch = x) } text (
"Batch label of the workflow run."
)
opt[String]("batch") action { (x, c) =>
c.copy(batch = x)
} text ("Batch label of the workflow run.")
opt[String]("engine-id") required () action { (x, c) =>
c.copy(engineId = x)
} text ("Engine's ID.")
Expand All @@ -91,16 +91,17 @@ object CreateWorkflow extends Logging {
opt[String]("engine-params-generator-class") action { (x, c) =>
c.copy(engineParamsGeneratorClass = Some(x))
} text ("Path to evaluator parameters")
opt[String]("env") action { (x, c) => c.copy(env = Some(x)) } text (
"Comma-separated list of environmental variables (in 'FOO=BAR' " +
"format) to pass to the Spark execution environment."
)
opt[Unit]("verbose") action { (x, c) => c.copy(verbose = true) } text (
"Enable verbose output."
)
opt[Unit]("debug") action { (x, c) => c.copy(debug = true) } text (
"Enable debug output."
)
opt[String]("env") action { (x, c) =>
c.copy(env = Some(x))
} text
("Comma-separated list of environmental variables (in 'FOO=BAR' " +
"format) to pass to the Spark execution environment.")
opt[Unit]("verbose") action { (x, c) =>
c.copy(verbose = true)
} text ("Enable verbose output.")
opt[Unit]("debug") action { (x, c) =>
c.copy(debug = true)
} text ("Enable debug output.")
opt[Unit]("skip-sanity-check") action { (x, c) =>
c.copy(skipSanityCheck = true)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,8 +170,8 @@ object WorkflowUtils extends Logging {
case JField(f, _) => f == field
case _ => false
} map { jv =>
implicit lazy val formats = Utils
.json4sDefaultFormats + new NameParamsSerializer
implicit lazy val formats = Utils.json4sDefaultFormats +
new NameParamsSerializer
val np: NameParams =
try { jv._2.extract[NameParams] }
catch {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,13 +149,8 @@ class EngineSuite extends FunSuite with Inside with SharedSparkContext {
val pModel22 = PersistentModelManifest(model22.getClass.getName)
val pModel23 = PersistentModelManifest(model23.getClass.getName)

models should contain theSameElementsAs Seq(
Unit,
pModel21,
pModel22,
pModel23,
model24,
model25)
models should contain theSameElementsAs
Seq(Unit, pModel21, pModel22, pModel23, model24, model25)
}

test("Engine.eval") {
Expand Down Expand Up @@ -207,8 +202,8 @@ class EngineSuite extends FunSuite with Inside with SharedSparkContext {
pQ shouldBe q
pModels shouldBe None
pPs should have size algoCount
pPs shouldBe Seq(
Prediction(id = 2, q = q, models = Some(model0)))
pPs shouldBe
Seq(Prediction(id = 2, q = q, models = Some(model0)))
}
}
}
Expand Down Expand Up @@ -266,13 +261,8 @@ class EngineSuite extends FunSuite with Inside with SharedSparkContext {
persistedModels,
params = WorkflowParams())

deployableModels should contain theSameElementsAs Seq(
model20,
model21,
model22,
model23,
model24,
model25)
deployableModels should contain theSameElementsAs
Seq(model20, model21, model22, model23, model24, model25)
}
}

Expand All @@ -290,10 +280,8 @@ class EngineTrainSuite extends FunSuite with SharedSparkContext {

val pd = ProcessedData(1, TrainingData(0))

models should contain theSameElementsAs Seq(
PAlgo0.Model(2, pd),
PAlgo1.Model(3, pd),
PAlgo0.Model(4, pd))
models should contain theSameElementsAs
Seq(PAlgo0.Model(2, pd), PAlgo1.Model(3, pd), PAlgo0.Model(4, pd))
}

test("Local DS/P/Algos") {
Expand Down Expand Up @@ -329,10 +317,8 @@ class EngineTrainSuite extends FunSuite with SharedSparkContext {

val pd = ProcessedData(1, TrainingData(0))

models should contain theSameElementsAs Seq(
NAlgo0.Model(2, pd),
NAlgo1.Model(3, pd),
NAlgo0.Model(4, pd))
models should contain theSameElementsAs
Seq(NAlgo0.Model(2, pd), NAlgo1.Model(3, pd), NAlgo0.Model(4, pd))
}

test("Parallel DS/P/Algos Stop-After-Read") {
Expand Down Expand Up @@ -380,10 +366,8 @@ class EngineTrainSuite extends FunSuite with SharedSparkContext {

val pd = ProcessedData(1, TrainingData(0, error = true))

models should contain theSameElementsAs Seq(
PAlgo0.Model(2, pd),
PAlgo1.Model(3, pd),
PAlgo0.Model(4, pd))
models should contain theSameElementsAs
Seq(PAlgo0.Model(2, pd), PAlgo1.Model(3, pd), PAlgo0.Model(4, pd))
}
}

Expand Down Expand Up @@ -429,8 +413,8 @@ class EngineEvalSuite extends FunSuite with Inside with SharedSparkContext {
pQ shouldBe q
pModels shouldBe None
pPs should have size 1
pPs shouldBe Seq(
Prediction(id = 3, q = q, models = Some(model0)))
pPs shouldBe
Seq(Prediction(id = 3, q = q, models = Some(model0)))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,9 +207,8 @@ class JsonExtractorSuite extends FunSuite with Matchers {
val query = new ScalaQuery("query string", Some("option"))
val jValue = JsonExtractor.toJValue(JsonExtractorOption.Both, query)

compact(render(jValue)) should
be(
"""{"string":"query string","optional":"option","default":"default"}""")
compact(render(jValue)) should be(
"""{"string":"query string","optional":"option","default":"default"}""")
}

test("Scala object to JValue using option Gson does not serialize optional") {
Expand All @@ -224,9 +223,8 @@ class JsonExtractorSuite extends FunSuite with Matchers {
val query = new ScalaQuery("query string", Some("option"))
val jValue = JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, query)

compact(render(jValue)) should
be(
"""{"string":"query string","optional":"option","default":"default"}""")
compact(render(jValue)) should be(
"""{"string":"query string","optional":"option","default":"default"}""")
}

test(
Expand All @@ -237,9 +235,8 @@ class JsonExtractorSuite extends FunSuite with Matchers {
query,
Utils.json4sDefaultFormats + new UpperCaseFormat)

compact(render(jValue)) should
be(
"""{"string":"QUERY STRING","optional":"OPTION","default":"DEFAULT"}""")
compact(render(jValue)) should be(
"""{"string":"QUERY STRING","optional":"OPTION","default":"DEFAULT"}""")
}

test("Java object to JValue using option Gson with custom serializer") {
Expand Down Expand Up @@ -287,8 +284,8 @@ class JsonExtractorSuite extends FunSuite with Matchers {
("algo2", new JavaParams("parameter2")))
val json = JsonExtractor.paramsToJson(JsonExtractorOption.Both, params)

json should be(
"""[{"algo":{"p":"parameter"}},{"algo2":{"p":"parameter2"}}]""")
json should
be("""[{"algo":{"p":"parameter"}},{"algo2":{"p":"parameter2"}}]""")
}

test("Java Params to Json using option Gson") {
Expand All @@ -297,8 +294,8 @@ class JsonExtractorSuite extends FunSuite with Matchers {
("algo2", new JavaParams("parameter2")))
val json = JsonExtractor.paramsToJson(JsonExtractorOption.Gson, params)

json should be(
"""[{"algo":{"p":"parameter"}},{"algo2":{"p":"parameter2"}}]""")
json should
be("""[{"algo":{"p":"parameter"}},{"algo2":{"p":"parameter2"}}]""")
}

test("Scala Params to Json using option Both") {
Expand Down Expand Up @@ -328,8 +325,8 @@ class JsonExtractorSuite extends FunSuite with Matchers {
("java", new JavaParams("parameter2")))
val json = JsonExtractor.paramsToJson(JsonExtractorOption.Both, params)

json should be(
"""[{"scala":{"a":"parameter"}},{"java":{"p":"parameter2"}}]""")
json should
be("""[{"scala":{"a":"parameter"}},{"java":{"p":"parameter2"}}]""")
}

test("Serializing Scala EngineParams works using option Json4sNative") {
Expand Down Expand Up @@ -390,8 +387,7 @@ private class UpperCaseFormat
case JObject(
JField("string", JString(string)) ::
JField("optional", JString(optional)) ::
JField("default", JString(default)) ::
Nil) => ScalaQuery(
JField("default", JString(default)) :: Nil) => ScalaQuery(
string.toUpperCase,
Some(optional.toUpperCase),
default.toUpperCase)
Expand Down

0 comments on commit 1a055b5

Please sign in to comment.