Skip to content

Commit

Permalink
Skip SparkTableMetricsRepositoryTest iceberg test when SupportsRowLev…
Browse files Browse the repository at this point in the history
…elOperations is not available (#536)
  • Loading branch information
eycho-am authored and rdsharma26 committed Apr 16, 2024
1 parent ced6bd6 commit 1aa1d6e
Showing 1 changed file with 19 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -101,21 +101,24 @@ class SparkTableMetricsRepositoryTest extends AnyWordSpec
}

"save and load to iceberg a single metric" in withSparkSessionIcebergCatalog { spark => {
val resultKey = ResultKey(System.currentTimeMillis(), Map("tag" -> "value"))
val metric = DoubleMetric(Entity.Column, "m1", "", Try(100))
val context = AnalyzerContext(Map(analyzer -> metric))

val repository = new SparkTableMetricsRepository(spark, "local.metrics_table")
// Save the metric
repository.save(resultKey, context)

// Load the metric
val loadedContext = repository.loadByKey(resultKey)

assert(loadedContext.isDefined)
assert(loadedContext.get.metric(analyzer).contains(metric))
}

}
// The SupportsRowLevelOperations class is available from spark 3.3
// We should skip this test for lower spark versions
val className = "org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations"
if (Try(Class.forName(className)).isSuccess) {
val resultKey = ResultKey(System.currentTimeMillis(), Map("tag" -> "value"))
val metric = DoubleMetric(Entity.Column, "m1", "", Try(100))
val context = AnalyzerContext(Map(analyzer -> metric))

val repository = new SparkTableMetricsRepository(spark, "local.metrics_table")
// Save the metric
repository.save(resultKey, context)

// Load the metric
val loadedContext = repository.loadByKey(resultKey)

assert(loadedContext.isDefined)
assert(loadedContext.get.metric(analyzer).contains(metric))
}
} }
}
}

0 comments on commit 1aa1d6e

Please sign in to comment.