diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala index c649d7582c955..fe84cc09e109a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -1914,7 +1914,6 @@ class Dataset[T] private[sql]( * {{{ * // Monitor the metrics using a listener. * spark.streams.addListener(new StreamingQueryListener() { - * override def onQueryStarted(event: QueryStartedEvent): Unit = {} * override def onQueryProgress(event: QueryProgressEvent): Unit = { * event.progress.observedMetrics.asScala.get("my_event").foreach { row => * // Trigger if the number of errors exceeds 5 percent @@ -1926,7 +1925,8 @@ class Dataset[T] private[sql]( * } * } * } - * override def onQueryTerminated(event: QueryTerminatedEvent): Unit = {} + * def onQueryStarted(event: QueryStartedEvent): Unit = {} + * def onQueryTerminated(event: QueryTerminatedEvent): Unit = {} * }) * // Observe row count (rc) and error row count (erc) in the streaming Dataset * val observed_ds = ds.observe("my_event", count(lit(1)).as("rc"), count($"error").as("erc")) @@ -1936,7 +1936,6 @@ class Dataset[T] private[sql]( * @group typedrel * @since 3.0.0 */ - @varargs def observe(name: String, expr: Column, exprs: Column*): Dataset[T] = withTypedPlan { CollectMetrics(name, (expr +: exprs).map(_.named), logicalPlan) }