Skip to content

Commit

Permalink
Revert "[MINOR][SQL][SS][DOCS] Add varargs to Dataset.observe(String,…
Browse files Browse the repository at this point in the history
… ..) with a documentation fix"

This reverts commit 72a0562.
  • Loading branch information
HyukjinKwon committed Apr 7, 2022
1 parent 6f6eb3f commit d2afd98
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1914,7 +1914,6 @@ class Dataset[T] private[sql](
* {{{
* // Monitor the metrics using a listener.
* spark.streams.addListener(new StreamingQueryListener() {
* override def onQueryStarted(event: QueryStartedEvent): Unit = {}
* override def onQueryProgress(event: QueryProgressEvent): Unit = {
* event.progress.observedMetrics.asScala.get("my_event").foreach { row =>
* // Trigger if the number of errors exceeds 5 percent
Expand All @@ -1926,7 +1925,8 @@ class Dataset[T] private[sql](
* }
* }
* }
* override def onQueryTerminated(event: QueryTerminatedEvent): Unit = {}
* def onQueryStarted(event: QueryStartedEvent): Unit = {}
* def onQueryTerminated(event: QueryTerminatedEvent): Unit = {}
* })
* // Observe row count (rc) and error row count (erc) in the streaming Dataset
* val observed_ds = ds.observe("my_event", count(lit(1)).as("rc"), count($"error").as("erc"))
Expand All @@ -1936,7 +1936,6 @@ class Dataset[T] private[sql](
* @group typedrel
* @since 3.0.0
*/
@varargs
def observe(name: String, expr: Column, exprs: Column*): Dataset[T] = withTypedPlan {
CollectMetrics(name, (expr +: exprs).map(_.named), logicalPlan)
}
Expand Down

0 comments on commit d2afd98

Please sign in to comment.