Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Specify categorical variables in metadata #120

Merged
merged 16 commits into from
Sep 11, 2018
Merged
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Copyright (c) 2017, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

package com.salesforce.op.stages.impl.feature

import org.apache.spark.ml.attribute.AttributeGroup
import org.apache.spark.sql.types.StructField
import org.junit.runner.RunWith
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner

object AttributeTestUtils extends Matchers{
Copy link
Collaborator

@tovbinm tovbinm Sep 7, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

  1. better use a trait - it will be cleaner and simpler to use.
  2. make return type the Assertion
    here it is:
trait AttributeAsserts {
   self: Matchers =>
   final def assertNominal(schema: StructField, expectedNominal: Array[Boolean]): Assertion = ???
}

then mixin it like this:

@RunWith(classOf[JUnitRunner])
class BinaryVectorizerTest extends OpTransformerSpec[OPVector, BinaryVectorizer] with AttributeAsserts { ... }


/**
* Assert if attributes are nominal or not
* @param schema
* @param expectedNominal Expected array of booleans. True if the field is nominal, false if not.
*/
final def assertNominal(schema: StructField, expectedNominal: Array[Boolean]): Unit = {
val attributes = AttributeGroup.fromStructField(schema).attributes.get
attributes.map(_.isNominal) shouldBe expectedNominal
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ class Base64VectorizerTest extends FlatSpec with TestSparkContext with Base64Tes
def assertVectorizer(vec: FeatureLike[OPVector], expected: Seq[Text]): Unit = {
val result = new OpWorkflow().setResultFeatures(vec).transform(realData)
val vectors = result.collect(vec)
val schema = result.schema(vec.name)
AttributeTestUtils.assertNominal(schema, Array.fill(vectors.head.value.size)(true))

vectors.length shouldBe expected.length
// TODO add a more robust check
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ class BinaryMapVectorizerTest

transformed.collect(vector) shouldBe expectedResult
val field = transformed.schema(estimator.getOutputFeatureName)
AttributeTestUtils.assertNominal(field, Array.fill(expectedResult.head.value.size)(true))
OpVectorMetadata(field) shouldEqual expectedMeta
val vectorMetadata = estimator.getMetadata()
OpVectorMetadata(field.copy(metadata = vectorMetadata)) shouldEqual expectedMeta
Expand Down Expand Up @@ -100,6 +101,7 @@ class BinaryMapVectorizerTest

transformed.collect(vector) shouldBe expected
val field = transformed.schema(estimator.getOutputFeatureName)
AttributeTestUtils.assertNominal(field, Array.fill(expected.head.value.size)(true))
OpVectorMetadata(field) shouldEqual expectedMeta
val vectorMetadata = estimator.getMetadata()
OpVectorMetadata(field.copy(metadata = vectorMetadata)) shouldEqual expectedMeta
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,8 @@ class BinaryVectorizerTest extends OpTransformerSpec[OPVector, BinaryVectorizer]
f1 -> List(RootCol, IndCol(Some(TransmogrifierDefaults.NullString))),
f2 -> List(RootCol, IndCol(Some(TransmogrifierDefaults.NullString)))
)
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(expected.head.value.size)(true))
}

it should "transform the data correctly [trackNulls=true,fillValue=true]" in {
Expand All @@ -117,6 +119,8 @@ class BinaryVectorizerTest extends OpTransformerSpec[OPVector, BinaryVectorizer]
f1 -> List(RootCol, IndCol(Some(TransmogrifierDefaults.NullString))),
f2 -> List(RootCol, IndCol(Some(TransmogrifierDefaults.NullString)))
)
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(expected.head.value.size)(true))
}

it should "transform the data correctly [trackNulls=false,fillValue=false]" in {
Expand All @@ -141,6 +145,8 @@ class BinaryVectorizerTest extends OpTransformerSpec[OPVector, BinaryVectorizer]
f1 -> List(RootCol),
f2 -> List(RootCol)
)
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(expected.head.value.size)(true))
}

it should "transform the data correctly [trackNulls=false,fillValue=true]" in {
Expand All @@ -165,5 +171,7 @@ class BinaryVectorizerTest extends OpTransformerSpec[OPVector, BinaryVectorizer]
f1 -> List(RootCol),
f2 -> List(RootCol)
)
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(expected.head.value.size)(true))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import com.salesforce.op.test.{OpTransformerSpec, TestFeatureBuilder, TestOpVect
import com.salesforce.op.utils.date.DateTimeUtils
import com.salesforce.op.utils.spark.OpVectorMetadata
import com.salesforce.op.utils.spark.RichDataset._
import org.apache.spark.ml.attribute.AttributeGroup
import org.apache.spark.ml.linalg.Vectors
import org.joda.time.{DateTime, DateTimeConstants}
import org.junit.runner.RunWith
Expand All @@ -51,7 +52,9 @@ class DateListVectorizerTest extends OpTransformerSpec[OPVector, DateListVectori
val now = TransmogrifierDefaults.ReferenceDate.minusMillis(1).getMillis // make date time be in the past

private def daysToMilliseconds(n: Int): Long = n * DateTimeConstants.MILLIS_PER_DAY

private def monthsToMilliseconds(n: Int): Long = n * 2628000000L

private def hoursToMilliseconds(n: Int): Long = n * DateTimeConstants.MILLIS_PER_HOUR

val (testData, clicks, opens, purchases) = TestFeatureBuilder("clicks", "opens", "purchases",
Expand Down Expand Up @@ -122,7 +125,9 @@ class DateListVectorizerTest extends OpTransformerSpec[OPVector, DateListVectori
Vectors.dense(2.0, 1.0, -1.0).toOPVector
)

val fieldMetadata = transformed.schema(output.name).metadata
val schema = transformed.schema(output.name)
val fieldMetadata = schema.metadata
AttributeTestUtils.assertNominal(schema, Array.fill(testModelTimeSinceFirst.getInputFeatures().size)(false))
testModelTimeSinceFirst.getMetadata() shouldEqual fieldMetadata
}

Expand All @@ -148,7 +153,10 @@ class DateListVectorizerTest extends OpTransformerSpec[OPVector, DateListVectori
Vectors.dense(2.0, 0.0, 1.0, 0.0, -1.0, 0.0).toOPVector
)

val fieldMetadata = transformed.schema(output.name).metadata
val schema = transformed.schema(output.name)
val fieldMetadata = schema.metadata
AttributeTestUtils.assertNominal(schema, Array.fill(testModelTimeSinceFirst.getInputFeatures().size)
(Seq(false, true)).flatten)
testModelTimeSinceFirst.getMetadata() shouldEqual fieldMetadata
}

Expand All @@ -174,7 +182,9 @@ class DateListVectorizerTest extends OpTransformerSpec[OPVector, DateListVectori
Vectors.dense(-28.0, -29.0, -31.0).toOPVector
)

val fieldMetadata = transformed.schema(output.name).metadata
val schema = transformed.schema(output.name)
val fieldMetadata = schema.metadata
AttributeTestUtils.assertNominal(schema, Array.fill(testModelTimeSinceFirst.getInputFeatures().size)(false))
testModelTimeSinceFirst.getMetadata() shouldEqual fieldMetadata
}

Expand All @@ -196,7 +206,9 @@ class DateListVectorizerTest extends OpTransformerSpec[OPVector, DateListVectori
Vectors.sparse(21, Array(), Array()).toOPVector
)

val fieldMetadata = transformed.schema(output.name).metadata
val schema = transformed.schema(output.name)
val fieldMetadata = schema.metadata
AttributeTestUtils.assertNominal(schema, Array.fill(testModelModeDay.getInputFeatures().size * 7)(true))
testModelModeDay.getMetadata() shouldEqual fieldMetadata

val daysOfWeek = List("Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday").map(s =>
Expand Down Expand Up @@ -225,7 +237,9 @@ class DateListVectorizerTest extends OpTransformerSpec[OPVector, DateListVectori
Vectors.sparse(24, Array(7, 15, 23), Array(1.0, 1.0, 1.0)).toOPVector
)

val fieldMetadata = transformed.schema(output.name).metadata
val schema = transformed.schema(output.name)
val fieldMetadata = schema.metadata
AttributeTestUtils.assertNominal(schema, Array.fill(testModelModeDay.getInputFeatures().size * 8)(true))
testModelModeDay.getMetadata() shouldEqual fieldMetadata

val daysOfWeek = List("Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday",
Expand Down Expand Up @@ -253,7 +267,9 @@ class DateListVectorizerTest extends OpTransformerSpec[OPVector, DateListVectori
Vectors.sparse(36, Array(), Array()).toOPVector
)

val fieldMetadata = transformed.schema(output.name).metadata
val schema = transformed.schema(output.name)
val fieldMetadata = schema.metadata
AttributeTestUtils.assertNominal(schema, Array.fill(testModelModeMonth.getInputFeatures().size * 12)(true))
testModelModeMonth.getMetadata() shouldEqual fieldMetadata

val months = List(
Expand Down Expand Up @@ -283,7 +299,9 @@ class DateListVectorizerTest extends OpTransformerSpec[OPVector, DateListVectori
Vectors.sparse(72, Array(), Array()).toOPVector
)

val fieldMetadata = transformed.schema(output.name).metadata
val schema = transformed.schema(output.name)
val fieldMetadata = schema.metadata
AttributeTestUtils.assertNominal(schema, Array.fill(testModelModeHour.getInputFeatures().size * 24)(true))
testModelModeHour.getMetadata() shouldEqual fieldMetadata

val hours = (0 until 24).map(i => IndCol(Some(s"$i:00"))).toList
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,9 @@ class DateMapToUnitCircleVectorizerTest extends OpEstimatorSpec[OPVector, Sequen
val output = f1.toUnitCircle(TimePeriod.HourOfDay)
val transformed = output.originStage.asInstanceOf[DateMapToUnitCircleVectorizer[DateMap]]
.fit(inputData).transform(inputData)
val field = transformed.schema(output.name)
val actual = transformed.collect(output)
AttributeTestUtils.assertNominal(field, Array.fill(actual.head.value.size)(false))
all (actual.zip(expectedResult).map(g => Vectors.sqdist(g._1.value, g._2.value))) should be < eps
}

Expand All @@ -88,7 +90,9 @@ class DateMapToUnitCircleVectorizerTest extends OpEstimatorSpec[OPVector, Sequen
val output = f1DT.toUnitCircle(TimePeriod.HourOfDay)
val transformed = output.originStage.asInstanceOf[DateMapToUnitCircleVectorizer[DateMap]]
.fit(inputData).transform(inputData)
val field = transformed.schema(output.name)
val actual = transformed.collect(output)
AttributeTestUtils.assertNominal(field, Array.fill(actual.head.value.size)(false))
all (actual.zip(expectedResult).map(g => Vectors.sqdist(g._1.value, g._2.value))) should be < eps
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ class DateMapVectorizerTest extends FlatSpec with TestSparkContext {
val meta = OpVectorMetadata(vector.name, transformed.schema(vector.name).metadata)
meta.columns.length shouldBe 3
meta.columns.map(_.grouping) should contain theSameElementsAs Array(Option("a"), Option("b"), Option("c"))
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(expected(moment).head.value.size)(false))

val vector2 = f1.vectorize(defaultValue = 0, referenceDate = moment, trackNulls = true,
circularDateReps = Seq())
Expand All @@ -80,6 +82,8 @@ class DateMapVectorizerTest extends FlatSpec with TestSparkContext {
val meta2 = OpVectorMetadata(vector2.name, transformed2.schema(vector2.name).metadata)
meta2.columns.length shouldBe 6
meta2.history.keys.size shouldBe 1
val field2 = transformed2.schema(vector2.name)
AttributeTestUtils.assertNominal(field2, Array.fill(expected(moment).head.value.size)(Seq(false, true)).flatten)

val vector3 = f1.vectorize(defaultValue = 0)
val transformed3 = new OpWorkflow().setResultFeatures(vector3).transform(ds)
Expand All @@ -88,6 +92,9 @@ class DateMapVectorizerTest extends FlatSpec with TestSparkContext {
val meta3 = OpVectorMetadata(vector3.name, transformed3.schema(vector3.name).metadata)
meta3.columns.length shouldBe 30
meta2.history.keys.size shouldBe 1
val field3 = transformed3.schema(vector3.name)
val expectedNominal = Array.fill(24)(false) ++ Array.fill(3)(Seq(false, true)).flatten.asInstanceOf[Array[Boolean]]
AttributeTestUtils.assertNominal(field3, expectedNominal)
}

private def expected(moment: JDateTime) = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ class DateTimeVectorizerTest extends FlatSpec with TestSparkContext {
val meta = OpVectorMetadata(vector.name, transformed.schema(vector.name).metadata)
meta.columns.length shouldBe 3
meta.history.keys.size shouldBe 3
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(expected(moment).head.value.size)(false))

val vector2 = f1.vectorize(
dateListPivot = TransmogrifierDefaults.DateListDefault,
Expand All @@ -105,6 +107,8 @@ class DateTimeVectorizerTest extends FlatSpec with TestSparkContext {
val meta2 = OpVectorMetadata(vector2.name, transformed2.schema(vector2.name).metadata)
meta2.columns.length shouldBe 6
meta2.history.keys.size shouldBe 3
val field2 = transformed2.schema(vector2.name)
AttributeTestUtils.assertNominal(field2, Array.fill(expected(moment).head.value.size)(Seq(false, true)).flatten)

val vector3 = f1.vectorize(
dateListPivot = TransmogrifierDefaults.DateListDefault,
Expand All @@ -117,6 +121,9 @@ class DateTimeVectorizerTest extends FlatSpec with TestSparkContext {
val meta3 = OpVectorMetadata(vector3.name, transformed3.schema(vector3.name).metadata)
meta3.columns.length shouldBe 30
meta3.history.keys.size shouldBe 6
val field3 = transformed3.schema(vector3.name)
val expectedNominal = Array.fill(24)(false) ++ Array.fill(3)(Seq(false, true)).flatten.asInstanceOf[Array[Boolean]]
AttributeTestUtils.assertNominal(field3, expectedNominal)
}

it should "vectorize dates correctly any time" in {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,10 @@ class DateToUnitCircleTransformerTest extends OpTransformerSpec[OPVector, DateTo
val vectorizer = new DateToUnitCircleTransformer().setTimePeriod(timePeriod).setInput(f)
val transformed = vectorizer.transform(ds)
val vector = vectorizer.getOutput()
transformed.collect(vector)
val actual = transformed.collect(vector)
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(actual.head.value.size)(false))
actual
}

def indexSeqToUnitCircle(indices: Seq[Int], numIndices: Int): Seq[OPVector] = {
Expand All @@ -81,6 +84,8 @@ class DateToUnitCircleTransformerTest extends OpTransformerSpec[OPVector, DateTo
val transformed = output.originStage.asInstanceOf[Transformer].transform(ds)
val actual = transformed.collect(output)
all (actual.zip(expectedResult).map(g => Vectors.sqdist(g._1.value, g._2.value))) should be < eps
val field = transformed.schema(output.name)
AttributeTestUtils.assertNominal(field, Array.fill(actual.head.value.size)(false))
}

it should "work with its DateTime shortcut" in {
Expand All @@ -90,6 +95,8 @@ class DateToUnitCircleTransformerTest extends OpTransformerSpec[OPVector, DateTo
val transformed = output.originStage.asInstanceOf[Transformer].transform(ds)
val actual = transformed.collect(output)
all (actual.zip(expectedResult).map(g => Vectors.sqdist(g._1.value, g._2.value))) should be < eps
val field = transformed.schema(output.name)
AttributeTestUtils.assertNominal(field, Array.fill(actual.head.value.size)(false))
}

it should "store the proper meta data" in {
Expand Down Expand Up @@ -118,6 +125,8 @@ class DateToUnitCircleTransformerTest extends OpTransformerSpec[OPVector, DateTo
Array(1.0, 0.0)
).map(Vectors.dense(_).toOPVector)
all (actual.zip(expected).map(g => Vectors.sqdist(g._1.value, g._2.value))) should be < eps
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(actual.head.value.size)(false))
}

it should "transform the data correctly when the timePeriod is HourOfDay" in {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ class DateVectorizerTest extends FlatSpec with TestSparkContext {
val meta = OpVectorMetadata(vector.name, transformed.schema(vector.name).metadata)
meta.columns.length shouldBe 3
meta.history.keys.size shouldBe 3
val field = transformed.schema(vector.name)
AttributeTestUtils.assertNominal(field, Array.fill(expectedAt(moment).head.value.size)(false))

val vector2 = f1.vectorize(
dateListPivot = TransmogrifierDefaults.DateListDefault,
Expand All @@ -83,6 +85,8 @@ class DateVectorizerTest extends FlatSpec with TestSparkContext {
val meta2 = OpVectorMetadata(vector2.name, transformed2.schema(vector2.name).metadata)
meta2.columns.length shouldBe 6
meta2.history.keys.size shouldBe 3
val field2 = transformed2.schema(vector2.name)
AttributeTestUtils.assertNominal(field2, Array.fill(expectedAt(moment).head.value.size)(Seq(false, true)).flatten)

val vector3 = f1.vectorize(
dateListPivot = TransmogrifierDefaults.DateListDefault,
Expand All @@ -94,6 +98,9 @@ class DateVectorizerTest extends FlatSpec with TestSparkContext {
val meta3 = OpVectorMetadata(vector3.name, transformed3.schema(vector3.name).metadata)
meta3.columns.length shouldBe 30
meta3.history.keys.size shouldBe 6
val field3 = transformed3.schema(vector3.name)
val expectedNominal = Array.fill(24)(false) ++ Array.fill(3)(Seq(false, true)).flatten.asInstanceOf[Array[Boolean]]
AttributeTestUtils.assertNominal(field3, expectedNominal)
}

private def buildTestData(moment: DateTime) = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,8 +203,11 @@ class DecisionTreeNumericBucketizerTest extends OpEstimatorSpec[OPVector,
val splits = model.splits
assertSplits(splits = splits, expectedSplits = expectedSplits, expectedTolerance)

val res = model.transform(data).collect(out)
assertMetadata(
val transformed = model.transform(data)
val res = transformed.collect(out)
val field = transformed.schema(out.name)
AttributeTestUtils.assertNominal(field, Array.fill(res.head.value.size)(true))
assertMetadata(
shouldSplit = Array(shouldSplit),
splits = Array(splits),
trackNulls = trackNulls, trackInvalid = trackInvalid,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,8 @@ class DecisionTreeNumericMapBucketizerTest extends OpEstimatorSpec[OPVector,
)
val scored = model.setInputDataset(data).score(keepIntermediateFeatures = true)
val res = scored.collect(out)
val field = scored.schema(out.name)
AttributeTestUtils.assertNominal(field, Array.fill(res.head.value.size)(true))
assertMetadata(
shouldSplit = stage.shouldSplitByKey.toArray.sortBy(_._1).map(_._2),
splits = stage.splitsByKey.toArray.sortBy(_._1).map(_._2),
Expand Down
Loading