diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkCopyOnWriteScan.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkCopyOnWriteScan.java index e32b68fb054f..16eb9c51df5c 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkCopyOnWriteScan.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkCopyOnWriteScan.java @@ -73,7 +73,6 @@ class SparkCopyOnWriteScan extends SparkPartitioningAwareScan Schema expectedSchema, List filters, Supplier scanReportSupplier) { - super(spark, table, scan, readConf, expectedSchema, filters, scanReportSupplier); this.snapshot = snapshot; diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkPartitioningAwareScan.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkPartitioningAwareScan.java index 0bf8c9016738..6538268697e2 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkPartitioningAwareScan.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkPartitioningAwareScan.java @@ -78,7 +78,6 @@ abstract class SparkPartitioningAwareScan extends S Schema expectedSchema, List filters, Supplier scanReportSupplier) { - super(spark, table, readConf, expectedSchema, filters, scanReportSupplier); this.scan = scan; diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkScan.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkScan.java index f648f7e6e7ae..65c5e04f31a1 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkScan.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkScan.java @@ -36,6 +36,7 @@ import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.source.metrics.NumDeletes; import org.apache.iceberg.spark.source.metrics.NumSplits; +import org.apache.iceberg.spark.source.metrics.ScannedDataFiles; import org.apache.iceberg.spark.source.metrics.ScannedDataManifests; import org.apache.iceberg.spark.source.metrics.SkippedDataFiles; import org.apache.iceberg.spark.source.metrics.SkippedDataManifests; @@ -47,7 +48,6 @@ import org.apache.iceberg.spark.source.metrics.TaskTotalPlanningDuration; import org.apache.iceberg.spark.source.metrics.TotalFileSize; import org.apache.iceberg.spark.source.metrics.TotalPlanningDuration; -import org.apache.iceberg.spark.source.metrics.scannedDataFiles; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.iceberg.util.SnapshotUtil; @@ -192,6 +192,7 @@ public String description() { @Override public CustomTaskMetric[] reportDriverMetrics() { ScanReport scanReport = scanReportSupplier != null ? scanReportSupplier.get() : null; + if (scanReport == null) { return new CustomTaskMetric[0]; } @@ -216,7 +217,7 @@ public CustomMetric[] supportedCustomMetrics() { new TotalPlanningDuration(), new ScannedDataManifests(), new SkippedDataManifests(), - new scannedDataFiles(), + new ScannedDataFiles(), new SkippedDataFiles() }; } diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/scannedDataFiles.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/ScannedDataFiles.java similarity index 95% rename from spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/scannedDataFiles.java rename to spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/ScannedDataFiles.java index 296ec52f86f8..f453872fdc29 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/scannedDataFiles.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/ScannedDataFiles.java @@ -20,7 +20,7 @@ import org.apache.spark.sql.connector.metric.CustomSumMetric; -public class scannedDataFiles extends CustomSumMetric { +public class ScannedDataFiles extends CustomSumMetric { static final String NAME = "scannedDataFiles"; diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/ScannedDataManifests.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/ScannedDataManifests.java index 1e0061297864..a167904280e6 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/ScannedDataManifests.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/ScannedDataManifests.java @@ -33,13 +33,4 @@ public String name() { public String description() { return "number of scanned data manifests"; } - - @Override - public String aggregateTaskMetrics(long[] taskMetrics) { - long sum = 0L; - for (long taskMetric : taskMetrics) { - sum += taskMetric; - } - return String.valueOf(sum); - } } diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TaskScannedDataFiles.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TaskScannedDataFiles.java index d18cc47afc6c..d9a527da08f6 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TaskScannedDataFiles.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TaskScannedDataFiles.java @@ -31,7 +31,7 @@ private TaskScannedDataFiles(long value) { @Override public String name() { - return scannedDataFiles.NAME; + return ScannedDataFiles.NAME; } @Override diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TaskTotalPlanningDuration.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TaskTotalPlanningDuration.java index 17b76bc7aaf2..32ac6fde8bf3 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TaskTotalPlanningDuration.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TaskTotalPlanningDuration.java @@ -42,7 +42,7 @@ public long value() { public static TaskTotalPlanningDuration from(ScanReport scanReport) { TimerResult timerResult = scanReport.scanMetrics().totalPlanningDuration(); - long value = timerResult != null ? timerResult.count() : -1; + long value = timerResult != null ? timerResult.totalDuration().toMillis() : -1; return new TaskTotalPlanningDuration(value); } } diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TotalFileSize.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TotalFileSize.java index 1b82325605df..994626e54f10 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TotalFileSize.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TotalFileSize.java @@ -31,6 +31,6 @@ public String name() { @Override public String description() { - return "total file size"; + return "total file size (bytes)"; } } diff --git a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TotalPlanningDuration.java b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TotalPlanningDuration.java index 976bea350364..8b66eeac4046 100644 --- a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TotalPlanningDuration.java +++ b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/metrics/TotalPlanningDuration.java @@ -31,6 +31,6 @@ public String name() { @Override public String description() { - return "total planning duration"; + return "total planning duration (ms)"; } } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadMetrics.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadMetrics.java index 24f648fbad88..367e4da043fb 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadMetrics.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadMetrics.java @@ -58,7 +58,7 @@ public void testReadMetricsForV1Table() throws NoSuchTableException { JavaConverters.mapAsJavaMapConverter(sparkPlans.get(0).metrics()).asJava(); Assertions.assertThat(metricsMap.get("skippedDataFiles").value()).isEqualTo(1); Assertions.assertThat(metricsMap.get("scannedDataManifests").value()).isEqualTo(2); - Assertions.assertThat(metricsMap.get("resultDataFiles").value()).isEqualTo(1); + Assertions.assertThat(metricsMap.get("scannedDataFiles").value()).isEqualTo(1); } @Test @@ -79,6 +79,6 @@ public void testReadMetricsForV2Table() throws NoSuchTableException { JavaConverters.mapAsJavaMapConverter(sparkPlans.get(0).metrics()).asJava(); Assertions.assertThat(metricsMap.get("skippedDataFiles").value()).isEqualTo(1); Assertions.assertThat(metricsMap.get("scannedDataManifests").value()).isEqualTo(2); - Assertions.assertThat(metricsMap.get("resultDataFiles").value()).isEqualTo(1); + Assertions.assertThat(metricsMap.get("scannedDataFiles").value()).isEqualTo(1); } }