Skip to content

Re-word comment to fix line-length issue. #50

Re-word comment to fix line-length issue.

Re-word comment to fix line-length issue. #50

GitHub Actions / Report test results failed Oct 13, 2023 in 0s

40792 tests run, 910 skipped, 1 failed.

Annotations

Check failure on line 2752 in KafkaSourceStressSuite

See this annotation in the file changed.

@github-actions github-actions / Report test results

KafkaSourceStressSuite.stress test with multiple topics and partitions

org.scalatest.exceptions.TestFailedException: 
Timed out waiting for stream: The code passed to failAfter did not complete within 30 seconds.
java.base/java.lang.Thread.getStackTrace(Thread.java:1610)
	org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:277)
	org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
	org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
	org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:480)
	scala.collection.mutable.HashMap$Node.foreach(HashMap.scala:642)
	scala.collection.mutable.HashMap.foreach(HashMap.scala:504)
	org.apache.spark.sql.streaming.StreamTest.fetchStreamAnswer$1(StreamTest.scala:480)

	Caused by: 	null
	java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1764)
		org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:481)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:482)
		scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
		org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
		org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
		org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
		org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
		org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)


== Progress ==
   AssertOnQuery(<condition>, )
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 0 until 4, message = Add partition)
   CheckAnswer: [1],[2],[3],[4]
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 4 until 10, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10]
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 10 until 11, message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@4e60ce87,Map(),null)
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 11 until 20, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20]
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 20 until 23, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@27c8397f,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23]
   StopStream
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 23 until 27, message = Add partition)
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@7312e5ae,Map(),null)
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 27 until 32, message = Delete topic stress1)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32]
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 32 until 36, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@5e6331c,Map(),null)
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress1, stress5, stress3), data = Range 36 until 43, message = Add topic stress6)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43]
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress1, stress5, stress7, stress3), data = Range 43 until 45, message = Add topic stress7)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45]
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress1, stress5, stress7), data = Range 45 until 51, message = Delete topic stress3)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51]
   StopStream
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 51 until 57, message = Add topic stress8)
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 57 until 66, message = )
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 66 until 74, message = )
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@5256229b,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74]
   StopStream
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 74 until 82, message = )
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 82 until 89, message = Add topic stress9)
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@190bc547,Map(),null)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 89 until 97, message = )
=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97]
   StopStream
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 97 until 102, message = )
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 102 until 105, message = )
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@4b917fe,Map(),null)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 105 until 109, message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109]
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 109 until 110, message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110]
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 110 until 112, message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@23d99994,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112]
   StopStream
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7, stress10), data = Range 112 until 113, message = Add topic stress10)
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@7158b220,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113]
   StopStream
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress11, stress7, stress10), data = Range 113 until 114, message = Add topic stress11)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress11, stress7, stress10), data = Range 114 until 121, message = )
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress8, stress1, stress5, stress11, stress7, stress10, stress12, stress2), data = Range 121 until 129, message = Add topic stress12)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress8, stress1, stress5, stress11, stress7, stress10, stress12, stress2), data = empty Range 129 until 129, message = )
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@465a1e81,Map(),null)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress8, stress1, stress5, stress11, stress7, stress10, stress12, stress2), data = Range 129 until 131, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@a10f753,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131]

== Stream ==
Output Mode: Append
Stream state: {KafkaV2[SubscribePattern[stress.*]]: {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":4,"2":1,"3":0,"4":0,"5":2,"6":1,"7":0,"8":0,"9":0,"10":1,"11":0,"12":1,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":1},"stress3":{"0":0}}}
Thread state: alive
Thread stack trace: app//org.apache.kafka.clients.admin.KafkaAdminClient.listTopics(KafkaAdminClient.java:1867)
app//org.apache.kafka.clients.admin.Admin.listTopics(Admin.java:276)
app//org.apache.spark.sql.kafka010.SubscribePatternStrategy.assignedTopicPartitions(ConsumerStrategy.scala:138)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin.$anonfun$partitionsAssignedToAdmin$1(KafkaOffsetReaderAdmin.scala:499)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin$$Lambda$4960/0x00007f49113811f8.apply(Unknown Source)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin.withRetries(KafkaOffsetReaderAdmin.scala:518)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin.partitionsAssignedToAdmin(KafkaOffsetReaderAdmin.scala:498)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin.fetchLatestOffsets(KafkaOffsetReaderAdmin.scala:297)
app//org.apache.spark.sql.kafka010.KafkaMicroBatchStream.latestOffset(KafkaMicroBatchStream.scala:130)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$4(MicroBatchExecution.scala:531)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5629/0x00007f491148cbb8.apply(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:427)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:425)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$2(MicroBatchExecution.scala:530)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5628/0x00007f491148c398.apply(Unknown Source)
app//scala.collection.immutable.List.map(List.scala:246)
app//scala.collection.immutable.List.map(List.scala:79)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$1(MicroBatchExecution.scala:519)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5627/0x00007f491148c0b8.apply$mcZ$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.scala:17)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.withProgressLocked(MicroBatchExecution.scala:850)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.constructNextBatch(MicroBatchExecution.scala:515)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:308)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5614/0x00007f49114882b8.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:427)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:425)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:289)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5611/0x00007f4911487930.apply$mcZ$sp(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:279)
app//org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:311)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$Lambda$5598/0x00007f4911482030.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:901)
app//org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:289)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.$anonfun$run$1(StreamExecution.scala:211)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1$$Lambda$5593/0x00007f4911480d18.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)


== Sink ==
0: 
1: 
2: [1] [2] [3] [4]
3: 
4: [9] [7] [6] [10] [5] [8]
5: [11]
6: 
7: [16] [12] [13] [15] [20] [14] [17] [19] [18]
8: 
9: [21] [23] [22]
10: [27] [25] [24] [26]
11: [30] [28] [32] [31] [29]
12: 
13: [34] [33] [36] [35]
14: [37] [38] [39] [40] [41] [42] [43]
15: 
16: [45] [44]
17: 
18: [48] [47] [49] [50] [51] [46]
19: [57] [53] [61] [62] [64] [60] [58] [56] [63] [65] [67] [68] [69] [70] [71] [72] [73] [74] [55] [59] [54] [52] [66]
20: [88] [84] [89] [75] [78] [81] [79] [80] [76] [82] [83] [86] [87] [77] [85]
21: 
22: [92] [91] [90]
23: [94] [97] [95] [96] [93]


== Plan ==
== Parsed Logical Plan ==
WriteToMicroBatchDataSource MemorySink, 1095eaf2-97d6-4529-b8b4-0ec67bfd1b29, Append, 23
+- SerializeFromObject [input[0, int, false] AS value#37731]
   +- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$8244/0x00007f49119cf5e0@1ca8a66, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#37730: int
      +- DeserializeToObject newInstance(class scala.Tuple2), obj#37729: scala.Tuple2
         +- Project [cast(key#37705 as string) AS key#37719, cast(value#37706 as string) AS value#37720]
            +- StreamingDataSourceV2Relation [key#37705, value#37706, topic#37707, partition#37708, offset#37709L, timestamp#37710, timestampType#37711], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@6b8d0179, KafkaV2[SubscribePattern[stress.*]], {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":3,"2":1,"3":0,"4":0,"5":2,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0},"stress3":{"0":0}}, {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":4,"2":1,"3":0,"4":0,"5":2,"6":1,"7":0,"8":0,"9":0,"10":1,"11":0,"12":1,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":1},"stress3":{"0":0}}

== Analyzed Logical Plan ==
WriteToMicroBatchDataSource MemorySink, 1095eaf2-97d6-4529-b8b4-0ec67bfd1b29, Append, 23
+- SerializeFromObject [input[0, int, false] AS value#37731]
   +- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$8244/0x00007f49119cf5e0@1ca8a66, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#37730: int
      +- DeserializeToObject newInstance(class scala.Tuple2), obj#37729: scala.Tuple2
         +- Project [cast(key#37705 as string) AS key#37719, cast(value#37706 as string) AS value#37720]
            +- StreamingDataSourceV2Relation [key#37705, value#37706, topic#37707, partition#37708, offset#37709L, timestamp#37710, timestampType#37711], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@6b8d0179, KafkaV2[SubscribePattern[stress.*]], {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":3,"2":1,"3":0,"4":0,"5":2,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0},"stress3":{"0":0}}, {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":4,"2":1,"3":0,"4":0,"5":2,"6":1,"7":0,"8":0,"9":0,"10":1,"11":0,"12":1,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":1},"stress3":{"0":0}}

== Optimized Logical Plan ==
WriteToDataSourceV2 MicroBatchWrite[epoch: 23, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@6bf88127]
+- SerializeFromObject [input[0, int, false] AS value#37731]
   +- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$8244/0x00007f49119cf5e0@1ca8a66, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#37730: int
      +- DeserializeToObject newInstance(class scala.Tuple2), obj#37729: scala.Tuple2
         +- Project [cast(key#37705 as string) AS key#37719, cast(value#37706 as string) AS value#37720]
            +- StreamingDataSourceV2Relation [key#37705, value#37706, topic#37707, partition#37708, offset#37709L, timestamp#37710, timestampType#37711], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@6b8d0179, KafkaV2[SubscribePattern[stress.*]], {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":3,"2":1,"3":0,"4":0,"5":2,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0},"stress3":{"0":0}}, {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":4,"2":1,"3":0,"4":0,"5":2,"6":1,"7":0,"8":0,"9":0,"10":1,"11":0,"12":1,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":1},"stress3":{"0":0}}

== Physical Plan ==
WriteToDataSourceV2 MicroBatchWrite[epoch: 23, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@6bf88127], org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$$Lambda$5715/0x00007f49114b66d8@7e0976b5
+- *(1) SerializeFromObject [input[0, int, false] AS value#37731]
   +- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$8244/0x00007f49119cf5e0@1ca8a66, obj#37730: int
      +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#37729: scala.Tuple2
         +- *(1) Project [cast(key#37705 as string) AS key#37719, cast(value#37706 as string) AS value#37720]
            +- MicroBatchScan[key#37705, value#37706, topic#37707, partition#37708, offset#37709L, timestamp#37710, timestampType#37711] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan
Raw output
sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: 
Timed out waiting for stream: The code passed to failAfter did not complete within 30 seconds.
java.base/java.lang.Thread.getStackTrace(Thread.java:1610)
	org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:277)
	org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
	org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
	org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:480)
	scala.collection.mutable.HashMap$Node.foreach(HashMap.scala:642)
	scala.collection.mutable.HashMap.foreach(HashMap.scala:504)
	org.apache.spark.sql.streaming.StreamTest.fetchStreamAnswer$1(StreamTest.scala:480)

	Caused by: 	null
	java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1764)
		org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:481)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:482)
		scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
		org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
		org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
		org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
		org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
		org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)


== Progress ==
   AssertOnQuery(<condition>, )
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 0 until 4, message = Add partition)
   CheckAnswer: [1],[2],[3],[4]
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 4 until 10, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10]
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 10 until 11, message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@4e60ce87,Map(),null)
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 11 until 20, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20]
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 20 until 23, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@27c8397f,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23]
   StopStream
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 23 until 27, message = Add partition)
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@7312e5ae,Map(),null)
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 27 until 32, message = Delete topic stress1)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32]
   AddKafkaData(topics = HashSet(stress4, stress2, stress1, stress5, stress3), data = Range 32 until 36, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@5e6331c,Map(),null)
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress1, stress5, stress3), data = Range 36 until 43, message = Add topic stress6)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43]
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress1, stress5, stress7, stress3), data = Range 43 until 45, message = Add topic stress7)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45]
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress1, stress5, stress7), data = Range 45 until 51, message = Delete topic stress3)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51]
   StopStream
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 51 until 57, message = Add topic stress8)
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 57 until 66, message = )
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 66 until 74, message = )
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@5256229b,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74]
   StopStream
   AddKafkaData(topics = HashSet(stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 74 until 82, message = )
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 82 until 89, message = Add topic stress9)
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@190bc547,Map(),null)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 89 until 97, message = )
=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97]
   StopStream
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 97 until 102, message = )
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 102 until 105, message = )
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@4b917fe,Map(),null)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 105 until 109, message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109]
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 109 until 110, message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110]
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7), data = Range 110 until 112, message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@23d99994,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112]
   StopStream
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress7, stress10), data = Range 112 until 113, message = Add topic stress10)
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@7158b220,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113]
   StopStream
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress11, stress7, stress10), data = Range 113 until 114, message = Add topic stress11)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress2, stress8, stress1, stress5, stress11, stress7, stress10), data = Range 114 until 121, message = )
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress8, stress1, stress5, stress11, stress7, stress10, stress12, stress2), data = Range 121 until 129, message = Add topic stress12)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress8, stress1, stress5, stress11, stress7, stress10, stress12, stress2), data = empty Range 129 until 129, message = )
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@465a1e81,Map(),null)
   AddKafkaData(topics = HashSet(stress9, stress4, stress6, stress8, stress1, stress5, stress11, stress7, stress10, stress12, stress2), data = Range 129 until 131, message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@a10f753,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131]

== Stream ==
Output Mode: Append
Stream state: {KafkaV2[SubscribePattern[stress.*]]: {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":4,"2":1,"3":0,"4":0,"5":2,"6":1,"7":0,"8":0,"9":0,"10":1,"11":0,"12":1,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":1},"stress3":{"0":0}}}
Thread state: alive
Thread stack trace: app//org.apache.kafka.clients.admin.KafkaAdminClient.listTopics(KafkaAdminClient.java:1867)
app//org.apache.kafka.clients.admin.Admin.listTopics(Admin.java:276)
app//org.apache.spark.sql.kafka010.SubscribePatternStrategy.assignedTopicPartitions(ConsumerStrategy.scala:138)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin.$anonfun$partitionsAssignedToAdmin$1(KafkaOffsetReaderAdmin.scala:499)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin$$Lambda$4960/0x00007f49113811f8.apply(Unknown Source)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin.withRetries(KafkaOffsetReaderAdmin.scala:518)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin.partitionsAssignedToAdmin(KafkaOffsetReaderAdmin.scala:498)
app//org.apache.spark.sql.kafka010.KafkaOffsetReaderAdmin.fetchLatestOffsets(KafkaOffsetReaderAdmin.scala:297)
app//org.apache.spark.sql.kafka010.KafkaMicroBatchStream.latestOffset(KafkaMicroBatchStream.scala:130)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$4(MicroBatchExecution.scala:531)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5629/0x00007f491148cbb8.apply(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:427)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:425)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$2(MicroBatchExecution.scala:530)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5628/0x00007f491148c398.apply(Unknown Source)
app//scala.collection.immutable.List.map(List.scala:246)
app//scala.collection.immutable.List.map(List.scala:79)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$1(MicroBatchExecution.scala:519)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5627/0x00007f491148c0b8.apply$mcZ$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.scala:17)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.withProgressLocked(MicroBatchExecution.scala:850)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.constructNextBatch(MicroBatchExecution.scala:515)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:308)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5614/0x00007f49114882b8.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:427)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:425)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:289)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5611/0x00007f4911487930.apply$mcZ$sp(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:279)
app//org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:311)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$Lambda$5598/0x00007f4911482030.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:901)
app//org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:289)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.$anonfun$run$1(StreamExecution.scala:211)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1$$Lambda$5593/0x00007f4911480d18.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)


== Sink ==
0: 
1: 
2: [1] [2] [3] [4]
3: 
4: [9] [7] [6] [10] [5] [8]
5: [11]
6: 
7: [16] [12] [13] [15] [20] [14] [17] [19] [18]
8: 
9: [21] [23] [22]
10: [27] [25] [24] [26]
11: [30] [28] [32] [31] [29]
12: 
13: [34] [33] [36] [35]
14: [37] [38] [39] [40] [41] [42] [43]
15: 
16: [45] [44]
17: 
18: [48] [47] [49] [50] [51] [46]
19: [57] [53] [61] [62] [64] [60] [58] [56] [63] [65] [67] [68] [69] [70] [71] [72] [73] [74] [55] [59] [54] [52] [66]
20: [88] [84] [89] [75] [78] [81] [79] [80] [76] [82] [83] [86] [87] [77] [85]
21: 
22: [92] [91] [90]
23: [94] [97] [95] [96] [93]


== Plan ==
== Parsed Logical Plan ==
WriteToMicroBatchDataSource MemorySink, 1095eaf2-97d6-4529-b8b4-0ec67bfd1b29, Append, 23
+- SerializeFromObject [input[0, int, false] AS value#37731]
   +- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$8244/0x00007f49119cf5e0@1ca8a66, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#37730: int
      +- DeserializeToObject newInstance(class scala.Tuple2), obj#37729: scala.Tuple2
         +- Project [cast(key#37705 as string) AS key#37719, cast(value#37706 as string) AS value#37720]
            +- StreamingDataSourceV2Relation [key#37705, value#37706, topic#37707, partition#37708, offset#37709L, timestamp#37710, timestampType#37711], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@6b8d0179, KafkaV2[SubscribePattern[stress.*]], {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":3,"2":1,"3":0,"4":0,"5":2,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0},"stress3":{"0":0}}, {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":4,"2":1,"3":0,"4":0,"5":2,"6":1,"7":0,"8":0,"9":0,"10":1,"11":0,"12":1,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":1},"stress3":{"0":0}}

== Analyzed Logical Plan ==
WriteToMicroBatchDataSource MemorySink, 1095eaf2-97d6-4529-b8b4-0ec67bfd1b29, Append, 23
+- SerializeFromObject [input[0, int, false] AS value#37731]
   +- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$8244/0x00007f49119cf5e0@1ca8a66, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#37730: int
      +- DeserializeToObject newInstance(class scala.Tuple2), obj#37729: scala.Tuple2
         +- Project [cast(key#37705 as string) AS key#37719, cast(value#37706 as string) AS value#37720]
            +- StreamingDataSourceV2Relation [key#37705, value#37706, topic#37707, partition#37708, offset#37709L, timestamp#37710, timestampType#37711], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@6b8d0179, KafkaV2[SubscribePattern[stress.*]], {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":3,"2":1,"3":0,"4":0,"5":2,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0},"stress3":{"0":0}}, {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":4,"2":1,"3":0,"4":0,"5":2,"6":1,"7":0,"8":0,"9":0,"10":1,"11":0,"12":1,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":1},"stress3":{"0":0}}

== Optimized Logical Plan ==
WriteToDataSourceV2 MicroBatchWrite[epoch: 23, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@6bf88127]
+- SerializeFromObject [input[0, int, false] AS value#37731]
   +- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$8244/0x00007f49119cf5e0@1ca8a66, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#37730: int
      +- DeserializeToObject newInstance(class scala.Tuple2), obj#37729: scala.Tuple2
         +- Project [cast(key#37705 as string) AS key#37719, cast(value#37706 as string) AS value#37720]
            +- StreamingDataSourceV2Relation [key#37705, value#37706, topic#37707, partition#37708, offset#37709L, timestamp#37710, timestampType#37711], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@6b8d0179, KafkaV2[SubscribePattern[stress.*]], {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":3,"2":1,"3":0,"4":0,"5":2,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0},"stress3":{"0":0}}, {"stress8":{"0":0,"1":0},"stress9":{"0":0,"1":0,"2":0,"3":0,"4":0},"stress4":{"0":3,"1":2,"2":0,"3":0,"4":1,"5":1,"6":0,"7":1,"8":0,"9":2,"10":2,"11":0,"12":1,"13":1,"14":0,"15":1,"16":0,"17":1,"18":0,"19":0,"20":1,"21":0,"22":0,"23":0},"stress5":{"0":8,"1":3,"2":1,"3":2,"4":1,"5":1,"6":2,"7":1,"8":1,"9":2,"10":4,"11":2,"12":2,"13":0,"14":2,"15":1,"16":2,"17":1,"18":0,"19":2,"20":1,"21":2,"22":1,"23":1,"24":1,"25":1,"26":4,"27":2},"stress6":{"0":15},"stress7":{"0":0,"1":0,"2":0},"stress1":{"0":0,"1":0,"2":1,"3":2,"4":4,"5":1,"6":1,"7":0,"8":1,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":1,"17":1,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":1,"28":1,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0},"stress2":{"0":2,"1":4,"2":1,"3":0,"4":0,"5":2,"6":1,"7":0,"8":0,"9":0,"10":1,"11":0,"12":1,"13":1,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":1},"stress3":{"0":0}}

== Physical Plan ==
WriteToDataSourceV2 MicroBatchWrite[epoch: 23, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@6bf88127], org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$$Lambda$5715/0x00007f49114b66d8@7e0976b5
+- *(1) SerializeFromObject [input[0, int, false] AS value#37731]
   +- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$8244/0x00007f49119cf5e0@1ca8a66, obj#37730: int
      +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#37729: scala.Tuple2
         +- *(1) Project [cast(key#37705 as string) AS key#37719, cast(value#37706 as string) AS value#37720]
            +- MicroBatchScan[key#37705, value#37706, topic#37707, partition#37708, offset#37709L, timestamp#37710, timestampType#37711] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan

         
         
	at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)
	at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)
	at org.scalatest.funsuite.AnyFunSuite.newAssertionFailedException(AnyFunSuite.scala:1564)
	at org.scalatest.Assertions.fail(Assertions.scala:933)
	at org.scalatest.Assertions.fail$(Assertions.scala:929)
	at org.scalatest.funsuite.AnyFunSuite.fail(AnyFunSuite.scala:1564)
	at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:462)
	at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:800)
	at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:776)
	at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:342)
	at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaMicroBatchSourceSuite.scala:57)
	at org.apache.spark.sql.streaming.StreamTest.runStressTest(StreamTest.scala:892)
	at org.apache.spark.sql.streaming.StreamTest.runStressTest$(StreamTest.scala:840)
	at org.apache.spark.sql.kafka010.KafkaSourceTest.runStressTest(KafkaMicroBatchSourceSuite.scala:57)
	at org.apache.spark.sql.kafka010.KafkaSourceStressSuite.$anonfun$new$225(KafkaMicroBatchSourceSuite.scala:2752)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
	at org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
	at org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
	at org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
	at org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
	at org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
	at org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)
	at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
	at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:69)
	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
	at scala.collection.immutable.List.foreach(List.scala:333)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
	at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
	at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
	at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
	at org.scalatest.Suite.run(Suite.scala:1114)
	at org.scalatest.Suite.run$(Suite.scala:1096)
	at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
	at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
	at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
	at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
	at java.base/java.lang.Thread.run(Thread.java:833)