From 48c9026d004d4ea34387536cc65b6a4e307feebe Mon Sep 17 00:00:00 2001 From: Erica Chiu Date: Mon, 19 Aug 2019 17:03:07 -0700 Subject: [PATCH 1/5] Make model selector metadata to metric more robust --- .../impl/selector/ModelSelectorSummary.scala | 21 ++++++---- .../com/salesforce/op/ModelInsightsTest.scala | 41 +++++++++++++++++++ 2 files changed, 55 insertions(+), 7 deletions(-) diff --git a/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala b/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala index 72262b999f..d4d4a8ee32 100644 --- a/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala +++ b/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala @@ -240,19 +240,26 @@ case object ModelSelectorSummary { ReflectionUtils.classForName(className) match { case n if n == classOf[MultiMetrics] => JsonUtils.fromString[Map[String, Map[String, Any]]](json).map{ d => - val asMetrics = d.flatMap{ case (_, values) => values.map{ + val asMetrics = d.flatMap{ case (_, values) => + values.map{ case (nm: String, mp: Map[String, Any]@unchecked) => val valsJson = JsonUtils.toJsonString(mp) // TODO: gross but it works. try to find a better way - nm match { - case OpEvaluatorNames.Binary.humanFriendlyName => + + val binary = classOf[BinaryClassificationMetrics].getDeclaredFields.map(f => f.getName).toSet + val multi = classOf[MultiClassificationMetrics].getDeclaredFields.map(f => f.getName).toSet + val binscore = classOf[BinaryClassificationBinMetrics].getDeclaredFields.map(f => f.getName).toSet + val regression = classOf[RegressionMetrics].getDeclaredFields.map(f => f.getName).toSet + mp.keys match { + case `binary` => nm -> JsonUtils.fromString[BinaryClassificationMetrics](valsJson).get - case OpEvaluatorNames.BinScore.humanFriendlyName => + case `binscore` => nm -> JsonUtils.fromString[BinaryClassificationBinMetrics](valsJson).get - case OpEvaluatorNames.Multi.humanFriendlyName => + case `multi` => nm -> JsonUtils.fromString[MultiClassificationMetrics](valsJson).get - case OpEvaluatorNames.Regression.humanFriendlyName => + case `regression` => nm -> JsonUtils.fromString[RegressionMetrics](valsJson).get - case _ => nm -> JsonUtils.fromString[SingleMetric](valsJson).get + case _ => + nm -> JsonUtils.fromString[SingleMetric](valsJson).get }} } MultiMetrics(asMetrics) diff --git a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala index b2c10a91f4..cd02e002c7 100644 --- a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala +++ b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala @@ -30,6 +30,7 @@ package com.salesforce.op +import com.salesforce.op.evaluators.{MultiClassificationMetrics, MultiMetrics, OpEvaluatorNames, OpMultiClassificationEvaluator, OpRegressionEvaluator, RegressionMetrics} import com.salesforce.op.features.types._ import com.salesforce.op.features.{Feature, FeatureDistributionType, FeatureLike} import com.salesforce.op.filters._ @@ -798,4 +799,44 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou } } } + + it should "return both metrics when having multiple multi-class classification metrics in model insights" in { + val prediction = MultiClassificationModelSelector + .withCrossValidation(seed = 42, + trainTestEvaluators = Seq(new OpMultiClassificationEvaluator( + name = OpEvaluatorNames.Custom("second", "second") + )), + splitter = Option(DataCutter(seed = 42, reserveTestFraction = 0.1)), + modelsAndParameters = models) + .setInput(label, checked) + .getOutput() + val workflow = new OpWorkflow().setResultFeatures(prediction).setParameters(params).setReader(dataReader) + val workflowModel = workflow.train() + val insights = workflowModel.modelInsights(prediction) + val trainEval = insights.selectedModelInfo.get.trainEvaluation + trainEval shouldBe a[MultiMetrics] + val trainMetric = trainEval.asInstanceOf[MultiMetrics].metrics + trainMetric.size shouldEqual 2 + trainMetric.foreach( metric => metric._2 shouldBe a[MultiClassificationMetrics]) + } + + it should "return both metrics when having multiple regression metrics in model insights" in { + val prediction = RegressionModelSelector + .withCrossValidation(seed = 42, + trainTestEvaluators = Seq(new OpRegressionEvaluator( + name = OpEvaluatorNames.Custom("second", "second") + )), + dataSplitter = Option(DataSplitter(seed = 42, reserveTestFraction = 0.1)), + modelsAndParameters = models) + .setInput(label, features) + .getOutput() + val workflow = new OpWorkflow().setResultFeatures(prediction).setParameters(params).setReader(dataReader) + val workflowModel = workflow.train() + val insights = workflowModel.modelInsights(prediction) + val trainEval = insights.selectedModelInfo.get.trainEvaluation + trainEval shouldBe a[MultiMetrics] + val trainMetric = trainEval.asInstanceOf[MultiMetrics].metrics + trainMetric.size shouldEqual 2 + trainMetric.foreach( metric => metric._2 shouldBe a[RegressionMetrics]) + } } From 35f47eba9e0d4fc7802983fbd2dbaa3c08d8ad45 Mon Sep 17 00:00:00 2001 From: Erica Chiu Date: Tue, 20 Aug 2019 09:23:54 -0700 Subject: [PATCH 2/5] Add binary classifcation test --- .../com/salesforce/op/ModelInsightsTest.scala | 25 ++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala index cd02e002c7..d74e452f38 100644 --- a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala +++ b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala @@ -30,7 +30,7 @@ package com.salesforce.op -import com.salesforce.op.evaluators.{MultiClassificationMetrics, MultiMetrics, OpEvaluatorNames, OpMultiClassificationEvaluator, OpRegressionEvaluator, RegressionMetrics} +import com.salesforce.op.evaluators._ import com.salesforce.op.features.types._ import com.salesforce.op.features.{Feature, FeatureDistributionType, FeatureLike} import com.salesforce.op.filters._ @@ -800,6 +800,29 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou } } + it should "return both metrics when having multiple binary classification metrics in model insights" in { + val prediction = BinaryClassificationModelSelector + .withCrossValidation(seed = 42, + trainTestEvaluators = Seq(new OpBinaryClassificationEvaluator( + name = OpEvaluatorNames.Custom("second", "second") + )), + splitter = Option(DataSplitter(seed = 42, reserveTestFraction = 0.1)), + modelsAndParameters = models) + .setInput(label, checked) + .getOutput() + val workflow = new OpWorkflow().setResultFeatures(prediction).setParameters(params).setReader(dataReader) + val workflowModel = workflow.train() + val insights = workflowModel.modelInsights(prediction) + val trainEval = insights.selectedModelInfo.get.trainEvaluation + trainEval shouldBe a[MultiMetrics] + val trainMetric = trainEval.asInstanceOf[MultiMetrics].metrics + trainMetric.size shouldEqual 4 + trainMetric.map( metric => metric._2.isInstanceOf[BinaryClassificationMetrics]).toArray should + contain theSameElementsAs Array(true, true, false, false) + trainMetric.map( metric => metric._2.isInstanceOf[BinaryClassificationBinMetrics]).toArray should + contain theSameElementsAs Array(true, true, false, false) + } + it should "return both metrics when having multiple multi-class classification metrics in model insights" in { val prediction = MultiClassificationModelSelector .withCrossValidation(seed = 42, From 79c90839177a9403d410b3801a6cf3c754de406e Mon Sep 17 00:00:00 2001 From: Erica Chiu Date: Tue, 20 Aug 2019 09:57:29 -0700 Subject: [PATCH 3/5] Fix technicalities in test --- .../test/scala/com/salesforce/op/ModelInsightsTest.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala index d74e452f38..9ea777c2fa 100644 --- a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala +++ b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala @@ -816,11 +816,11 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou val trainEval = insights.selectedModelInfo.get.trainEvaluation trainEval shouldBe a[MultiMetrics] val trainMetric = trainEval.asInstanceOf[MultiMetrics].metrics - trainMetric.size shouldEqual 4 + trainMetric.size shouldEqual 3 trainMetric.map( metric => metric._2.isInstanceOf[BinaryClassificationMetrics]).toArray should - contain theSameElementsAs Array(true, true, false, false) + contain theSameElementsAs Array(true, true, false) trainMetric.map( metric => metric._2.isInstanceOf[BinaryClassificationBinMetrics]).toArray should - contain theSameElementsAs Array(true, true, false, false) + contain theSameElementsAs Array(true, false, false) } it should "return both metrics when having multiple multi-class classification metrics in model insights" in { From 7f2d6c83962cc2d793314213f3b64faf8d24b233 Mon Sep 17 00:00:00 2001 From: Matthew Tovbin Date: Tue, 20 Aug 2019 13:30:28 -0700 Subject: [PATCH 4/5] fixed the test --- .../op/evaluators/EvaluationMetrics.scala | 4 +- .../impl/selector/ModelSelectorSummary.scala | 38 +++++++---------- .../com/salesforce/op/ModelInsightsTest.scala | 41 +++++++++++-------- 3 files changed, 41 insertions(+), 42 deletions(-) diff --git a/core/src/main/scala/com/salesforce/op/evaluators/EvaluationMetrics.scala b/core/src/main/scala/com/salesforce/op/evaluators/EvaluationMetrics.scala index 56b63577ff..27c3cc1aeb 100644 --- a/core/src/main/scala/com/salesforce/op/evaluators/EvaluationMetrics.scala +++ b/core/src/main/scala/com/salesforce/op/evaluators/EvaluationMetrics.scala @@ -213,10 +213,12 @@ object OpEvaluatorNames extends Enum[OpEvaluatorNames] { case object BinScore extends OpEvaluatorNames("binScoreEval", "bin score evaluation metrics") case object Multi extends OpEvaluatorNames("multiEval", "multiclass evaluation metrics") case object Regression extends OpEvaluatorNames("regEval", "regression evaluation metrics") - case object Forecast extends OpEvaluatorNames("regForecast", "regression evaluation metrics") + case object Forecast extends OpEvaluatorNames("regForecast", "forecast evaluation metrics") case class Custom(name: String, humanName: String) extends OpEvaluatorNames(name, humanName) { override def entryName: String = name.toLowerCase } + def withFriendlyNameInsensitive(name: String): Option[OpEvaluatorNames] = + values.collectFirst { case n if n.humanFriendlyName.equalsIgnoreCase(name) => n } override def withName(name: String): OpEvaluatorNames = Try(super.withName(name)).getOrElse(Custom(name, name)) override def withNameInsensitive(name: String): OpEvaluatorNames = super.withNameInsensitiveOption(name) .getOrElse(Custom(name, name)) diff --git a/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala b/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala index d4d4a8ee32..d3c7c4ec93 100644 --- a/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala +++ b/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala @@ -239,33 +239,25 @@ case object ModelSelectorSummary { ReflectionUtils.classForName(className) match { case n if n == classOf[MultiMetrics] => - JsonUtils.fromString[Map[String, Map[String, Any]]](json).map{ d => - val asMetrics = d.flatMap{ case (_, values) => - values.map{ - case (nm: String, mp: Map[String, Any]@unchecked) => + JsonUtils.fromString[Map[String, Map[String, Any]]](json).map { d => + val asMetrics = d.flatMap { case (_, values) => + values.collect { case (nm: String, mp: Map[String, Any]@unchecked) => val valsJson = JsonUtils.toJsonString(mp) // TODO: gross but it works. try to find a better way - - val binary = classOf[BinaryClassificationMetrics].getDeclaredFields.map(f => f.getName).toSet - val multi = classOf[MultiClassificationMetrics].getDeclaredFields.map(f => f.getName).toSet - val binscore = classOf[BinaryClassificationBinMetrics].getDeclaredFields.map(f => f.getName).toSet - val regression = classOf[RegressionMetrics].getDeclaredFields.map(f => f.getName).toSet - mp.keys match { - case `binary` => - nm -> JsonUtils.fromString[BinaryClassificationMetrics](valsJson).get - case `binscore` => - nm -> JsonUtils.fromString[BinaryClassificationBinMetrics](valsJson).get - case `multi` => - nm -> JsonUtils.fromString[MultiClassificationMetrics](valsJson).get - case `regression` => - nm -> JsonUtils.fromString[RegressionMetrics](valsJson).get - case _ => - nm -> JsonUtils.fromString[SingleMetric](valsJson).get - }} + nm -> (OpEvaluatorNames.withFriendlyNameInsensitive(nm) match { + case Some(OpEvaluatorNames.Binary) => JsonUtils.fromString[BinaryClassificationMetrics](valsJson) + case Some(OpEvaluatorNames.BinScore) => JsonUtils.fromString[BinaryClassificationBinMetrics](valsJson) + case Some(OpEvaluatorNames.Multi) => JsonUtils.fromString[MultiClassificationMetrics](valsJson) + case Some(OpEvaluatorNames.Regression) => JsonUtils.fromString[RegressionMetrics](valsJson) + case Some(OpEvaluatorNames.Forecast) => JsonUtils.fromString[ForecastMetrics](valsJson) + case _ => JsonUtils.fromString[SingleMetric](valsJson) + }).get + } } MultiMetrics(asMetrics) }.recoverWith { case t: Throwable => error(n, t) } - case n => JsonUtils.fromString(json)(ClassTag(n)) - .recoverWith { case t: Throwable => error(n, t) } + case n => + JsonUtils.fromString(json)(ClassTag(n)) + .recoverWith { case t: Throwable => error(n, t) } } } } diff --git a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala index 9ea777c2fa..181cdf02d8 100644 --- a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala +++ b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala @@ -800,11 +800,11 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou } } - it should "return both metrics when having multiple binary classification metrics in model insights" in { + it should "return default & custom metrics when having multiple binary classification metrics in model insights" in { val prediction = BinaryClassificationModelSelector .withCrossValidation(seed = 42, - trainTestEvaluators = Seq(new OpBinaryClassificationEvaluator( - name = OpEvaluatorNames.Custom("second", "second") + trainTestEvaluators = Seq(Evaluators.BinaryClassification.custom( + metricName = "second", evaluateFn = _ => 0.0 )), splitter = Option(DataSplitter(seed = 42, reserveTestFraction = 0.1)), modelsAndParameters = models) @@ -816,18 +816,19 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou val trainEval = insights.selectedModelInfo.get.trainEvaluation trainEval shouldBe a[MultiMetrics] val trainMetric = trainEval.asInstanceOf[MultiMetrics].metrics - trainMetric.size shouldEqual 3 - trainMetric.map( metric => metric._2.isInstanceOf[BinaryClassificationMetrics]).toArray should - contain theSameElementsAs Array(true, true, false) - trainMetric.map( metric => metric._2.isInstanceOf[BinaryClassificationBinMetrics]).toArray should - contain theSameElementsAs Array(true, false, false) + trainMetric.map { case (metricName, metric) => metricName -> metric.getClass } should contain theSameElementsAs Seq( + OpEvaluatorNames.Binary.humanFriendlyName -> classOf[BinaryClassificationMetrics], + OpEvaluatorNames.BinScore.humanFriendlyName -> classOf[BinaryClassificationBinMetrics], + "second" -> classOf[SingleMetric] + ) } - it should "return both metrics when having multiple multi-class classification metrics in model insights" in { + it should + "return default & custom metrics when having multiple multi-class classification metrics in model insights" in { val prediction = MultiClassificationModelSelector .withCrossValidation(seed = 42, - trainTestEvaluators = Seq(new OpMultiClassificationEvaluator( - name = OpEvaluatorNames.Custom("second", "second") + trainTestEvaluators = Seq(Evaluators.MultiClassification.custom( + metricName = "second", evaluateFn = _ => 0.0 )), splitter = Option(DataCutter(seed = 42, reserveTestFraction = 0.1)), modelsAndParameters = models) @@ -839,15 +840,17 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou val trainEval = insights.selectedModelInfo.get.trainEvaluation trainEval shouldBe a[MultiMetrics] val trainMetric = trainEval.asInstanceOf[MultiMetrics].metrics - trainMetric.size shouldEqual 2 - trainMetric.foreach( metric => metric._2 shouldBe a[MultiClassificationMetrics]) + trainMetric.map { case (metricName, metric) => metricName -> metric.getClass } should contain theSameElementsAs Seq( + OpEvaluatorNames.Multi.humanFriendlyName -> classOf[MultiClassificationMetrics], + "second" -> classOf[SingleMetric] + ) } - it should "return both metrics when having multiple regression metrics in model insights" in { + it should "return default & custom metrics when having multiple regression metrics in model insights" in { val prediction = RegressionModelSelector .withCrossValidation(seed = 42, - trainTestEvaluators = Seq(new OpRegressionEvaluator( - name = OpEvaluatorNames.Custom("second", "second") + trainTestEvaluators = Seq(Evaluators.Regression.custom( + metricName = "second", evaluateFn = _ => 0.0 )), dataSplitter = Option(DataSplitter(seed = 42, reserveTestFraction = 0.1)), modelsAndParameters = models) @@ -859,7 +862,9 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou val trainEval = insights.selectedModelInfo.get.trainEvaluation trainEval shouldBe a[MultiMetrics] val trainMetric = trainEval.asInstanceOf[MultiMetrics].metrics - trainMetric.size shouldEqual 2 - trainMetric.foreach( metric => metric._2 shouldBe a[RegressionMetrics]) + trainMetric.map { case (metricName, metric) => metricName -> metric.getClass } should contain theSameElementsAs Seq( + OpEvaluatorNames.Regression.humanFriendlyName -> classOf[RegressionMetrics], + "second" -> classOf[SingleMetric] + ) } } From bd6178ae685ee8bfdf2959a1a43992e00cc7329e Mon Sep 17 00:00:00 2001 From: Matthew Tovbin Date: Tue, 20 Aug 2019 13:57:54 -0700 Subject: [PATCH 5/5] more fixes --- .../impl/selector/ModelSelectorSummary.scala | 3 ++- .../com/salesforce/op/ModelInsightsTest.scala | 20 +++++++++---------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala b/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala index d3c7c4ec93..efeee59b03 100644 --- a/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala +++ b/core/src/main/scala/com/salesforce/op/stages/impl/selector/ModelSelectorSummary.scala @@ -249,7 +249,8 @@ case object ModelSelectorSummary { case Some(OpEvaluatorNames.Multi) => JsonUtils.fromString[MultiClassificationMetrics](valsJson) case Some(OpEvaluatorNames.Regression) => JsonUtils.fromString[RegressionMetrics](valsJson) case Some(OpEvaluatorNames.Forecast) => JsonUtils.fromString[ForecastMetrics](valsJson) - case _ => JsonUtils.fromString[SingleMetric](valsJson) + case _ => // assume a custom metric here, hence trying to parse as single metric value + JsonUtils.fromString[SingleMetric](valsJson) }).get } } diff --git a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala index 181cdf02d8..26b2d5c391 100644 --- a/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala +++ b/core/src/test/scala/com/salesforce/op/ModelInsightsTest.scala @@ -51,7 +51,7 @@ import org.junit.runner.RunWith import com.salesforce.op.features.types.Real import com.salesforce.op.stages.impl.feature.TextStats import com.twitter.algebird.Moments -import org.apache.spark.sql.DataFrame +import org.apache.spark.sql.{DataFrame, Dataset} import org.scalatest.FlatSpec import org.scalatest.junit.JUnitRunner import org.apache.spark.sql.functions._ @@ -803,9 +803,10 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou it should "return default & custom metrics when having multiple binary classification metrics in model insights" in { val prediction = BinaryClassificationModelSelector .withCrossValidation(seed = 42, - trainTestEvaluators = Seq(Evaluators.BinaryClassification.custom( - metricName = "second", evaluateFn = _ => 0.0 - )), + trainTestEvaluators = Seq( + Evaluators.BinaryClassification.custom(metricName = "second", evaluateFn = _ => 0.0), + Evaluators.BinaryClassification.custom(metricName = "third", evaluateFn = _ => 1.0) + ), splitter = Option(DataSplitter(seed = 42, reserveTestFraction = 0.1)), modelsAndParameters = models) .setInput(label, checked) @@ -819,7 +820,8 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou trainMetric.map { case (metricName, metric) => metricName -> metric.getClass } should contain theSameElementsAs Seq( OpEvaluatorNames.Binary.humanFriendlyName -> classOf[BinaryClassificationMetrics], OpEvaluatorNames.BinScore.humanFriendlyName -> classOf[BinaryClassificationBinMetrics], - "second" -> classOf[SingleMetric] + "second" -> classOf[SingleMetric], + "third" -> classOf[SingleMetric] ) } @@ -827,9 +829,7 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou "return default & custom metrics when having multiple multi-class classification metrics in model insights" in { val prediction = MultiClassificationModelSelector .withCrossValidation(seed = 42, - trainTestEvaluators = Seq(Evaluators.MultiClassification.custom( - metricName = "second", evaluateFn = _ => 0.0 - )), + trainTestEvaluators = Seq(Evaluators.MultiClassification.custom(metricName = "second", evaluateFn = _ => 0.0)), splitter = Option(DataCutter(seed = 42, reserveTestFraction = 0.1)), modelsAndParameters = models) .setInput(label, checked) @@ -849,9 +849,7 @@ class ModelInsightsTest extends FlatSpec with PassengerSparkFixtureTest with Dou it should "return default & custom metrics when having multiple regression metrics in model insights" in { val prediction = RegressionModelSelector .withCrossValidation(seed = 42, - trainTestEvaluators = Seq(Evaluators.Regression.custom( - metricName = "second", evaluateFn = _ => 0.0 - )), + trainTestEvaluators = Seq(Evaluators.Regression.custom(metricName = "second", evaluateFn = _ => 0.0)), dataSplitter = Option(DataSplitter(seed = 42, reserveTestFraction = 0.1)), modelsAndParameters = models) .setInput(label, features)