&#010;Error adding data: Timeout after waiting for 10000 ms.&#010;org.apache.kafka.clients.producer.internals.FutureRecordMetadata.get(FutureRecordMetadata.java:78)&#010; org.apache.kafka.clients.producer.internals.FutureRecordMetadata.get(FutureRecordMetadata.java:30)&#010; org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$sendMessages$3(KafkaTestUtils.scala:380)&#010; scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)&#010; scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)&#010; scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)&#010; scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)&#010; scala.collection.TraversableLike.map(TraversableLike.scala:238)&#010; scala.collection.TraversableLike.map$(TraversableLike.scala:231)&#010; scala.collection.AbstractTraversable.map(Traversable.scala:108)&#010;&#010;&#010;== Progress ==&#010; AssertOnQuery(<condition>, )&#010; AssertOnQuery(<condition>, Execute)&#010; CheckAnswer: [-20],[-21],[-22],[0],[1],[2],[11],[12],[22]&#010; StopStream&#010; StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@752699f7,Map(),null)&#010; CheckAnswer: [-20],[-21],[-22],[0],[1],[2],[11],[12],[22]&#010;=> AddKafkaData(topics = Set(topic-3), data = WrappedArray(30, 31, 32, 33, 34), message = )&#010; CheckAnswer: [-20],[-21],[-22],[0],[1],[2],[11],[12],[22],[30],[31],[32],[33],[34]&#010; StopStream&#010;&#010;== Stream ==&#010;Output Mode: Append&#010;Stream state: {KafkaSourceV1[Assign[topic-3-4, topic-3-3, topic-3-2, topic-3-1, topic-3-0]]: {"topic-3":{"2":3,"4":3,"1":3,"3":3,"0":3}}}&#010;Thread state: alive&#010;Thread stack trace: java.lang.Thread.sleep(Native Method)&#010;org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$withRetriesWithoutInterrupt$1(KafkaOffsetReader.scala:450)&#010;org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$3667/1298461462.apply$mcV$sp(Unknown Source)&#010;scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&#010;org.apache.spark.util.UninterruptibleThread.runUninterruptibly(UninterruptibleThread.scala:77)&#010;org.apache.spark.sql.kafka010.KafkaOffsetReader.withRetriesWithoutInterrupt(KafkaOffsetReader.scala:443)&#010;org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$partitionsAssignedToConsumer$1(KafkaOffsetReader.scala:380)&#010;org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$3665/241016743.apply(Unknown Source)&#010;org.apache.spark.sql.kafka010.KafkaOffsetReader.runUninterruptibly(KafkaOffsetReader.scala:411)&#010;org.apache.spark.sql.kafka010.KafkaOffsetReader.partitionsAssignedToConsumer(KafkaOffsetReader.scala:380)&#010;org.apache.spark.sql.kafka010.KafkaOffsetReader.fetchLatestOffsets(KafkaOffsetReader.scala:296)&#010;org.apache.spark.sql.kafka010.KafkaSource.getOffset(KafkaSource.scala:129)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$3(MicroBatchExecution.scala:361)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$6079/1769997414.apply(Unknown Source)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$2(MicroBatchExecution.scala:361)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$4478/111548821.apply(Unknown Source)&#010;scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)&#010;scala.collection.TraversableLike$$Lambda$67/1843289228.apply(Unknown Source)&#010;scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)&#010;scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)&#010;scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)&#010;scala.collection.TraversableLike.map(TraversableLike.scala:238)&#010;scala.collection.TraversableLike.map$(TraversableLike.scala:231)&#010;scala.collection.AbstractTraversable.map(Traversable.scala:108)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$1(MicroBatchExecution.scala:357)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$4477/885090544.apply$mcZ$sp(Unknown Source)&#010;scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.withProgressLocked(MicroBatchExecution.scala:579)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.constructNextBatch(MicroBatchExecution.scala:353)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:198)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$4469/64219963.apply$mcV$sp(Unknown Source)&#010;scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:181)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$4467/991354959.apply$mcZ$sp(Unknown Source)&#010;org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)&#010;org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)&#010;&#010;&#010;== Sink ==&#010;0: [-20] [-21] [-22] [11] [12] [0] [1] [2] [22]&#010;&#010;&#010;== Plan ==&#010;null&#010; &#010;


      org.scalatest.exceptions.TestFailedException: 
Error adding data: Timeout after waiting for 10000 ms.
org.apache.kafka.clients.producer.internals.FutureRecordMetadata.get(FutureRecordMetadata.java:78)
	org.apache.kafka.clients.producer.internals.FutureRecordMetadata.get(FutureRecordMetadata.java:30)
	org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$sendMessages$3(KafkaTestUtils.scala:380)
	scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	scala.collection.TraversableLike.map(TraversableLike.scala:238)
	scala.collection.TraversableLike.map$(TraversableLike.scala:231)
	scala.collection.AbstractTraversable.map(Traversable.scala:108)


== Progress ==
   AssertOnQuery(<condition>, )
   AssertOnQuery(<condition>, Execute)
   CheckAnswer: [-20],[-21],[-22],[0],[1],[2],[11],[12],[22]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@752699f7,Map(),null)
   CheckAnswer: [-20],[-21],[-22],[0],[1],[2],[11],[12],[22]
=> AddKafkaData(topics = Set(topic-3), data = WrappedArray(30, 31, 32, 33, 34), message = )
   CheckAnswer: [-20],[-21],[-22],[0],[1],[2],[11],[12],[22],[30],[31],[32],[33],[34]
   StopStream

== Stream ==
Output Mode: Append
Stream state: {KafkaSourceV1[Assign[topic-3-4, topic-3-3, topic-3-2, topic-3-1, topic-3-0]]: {"topic-3":{"2":3,"4":3,"1":3,"3":3,"0":3}}}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$withRetriesWithoutInterrupt$1(KafkaOffsetReader.scala:450)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$3667/1298461462.apply$mcV$sp(Unknown Source)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.util.UninterruptibleThread.runUninterruptibly(UninterruptibleThread.scala:77)
org.apache.spark.sql.kafka010.KafkaOffsetReader.withRetriesWithoutInterrupt(KafkaOffsetReader.scala:443)
org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$partitionsAssignedToConsumer$1(KafkaOffsetReader.scala:380)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$3665/241016743.apply(Unknown Source)
org.apache.spark.sql.kafka010.KafkaOffsetReader.runUninterruptibly(KafkaOffsetReader.scala:411)
org.apache.spark.sql.kafka010.KafkaOffsetReader.partitionsAssignedToConsumer(KafkaOffsetReader.scala:380)
org.apache.spark.sql.kafka010.KafkaOffsetReader.fetchLatestOffsets(KafkaOffsetReader.scala:296)
org.apache.spark.sql.kafka010.KafkaSource.getOffset(KafkaSource.scala:129)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$3(MicroBatchExecution.scala:361)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$6079/1769997414.apply(Unknown Source)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$2(MicroBatchExecution.scala:361)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$4478/111548821.apply(Unknown Source)
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
scala.collection.TraversableLike$$Lambda$67/1843289228.apply(Unknown Source)
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
scala.collection.TraversableLike.map(TraversableLike.scala:238)
scala.collection.TraversableLike.map$(TraversableLike.scala:231)
scala.collection.AbstractTraversable.map(Traversable.scala:108)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$1(MicroBatchExecution.scala:357)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$4477/885090544.apply$mcZ$sp(Unknown Source)
scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.withProgressLocked(MicroBatchExecution.scala:579)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.constructNextBatch(MicroBatchExecution.scala:353)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:198)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$4469/64219963.apply$mcV$sp(Unknown Source)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:181)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$4467/991354959.apply$mcZ$sp(Unknown Source)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)


== Sink ==
0: [-20] [-21] [-22] [11] [12] [0] [1] [2] [22]


== Plan ==
null
         
         
      at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:530)
      at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:529)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
      at org.scalatest.Assertions.fail(Assertions.scala:1091)
      at org.scalatest.Assertions.fail$(Assertions.scala:1087)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
      at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:444)
      at org.apache.spark.sql.streaming.StreamTest.executeAction$1(StreamTest.scala:716)
      at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$56(StreamTest.scala:770)
      at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$56$adapted(StreamTest.scala:757)
      at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
      at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
      at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:757)
      at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:756)
      at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:326)
      at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaMicroBatchSourceSuite.scala:53)
      at org.apache.spark.sql.kafka010.KafkaSourceSuiteBase.testFromSpecificOffsets(KafkaMicroBatchSourceSuite.scala:1500)
      at org.apache.spark.sql.kafka010.KafkaSourceSuiteBase.$anonfun$new$123(KafkaMicroBatchSourceSuite.scala:1279)
      at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
      at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
      at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
      at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286)
      at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
      at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
      at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
      at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
      at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
      at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
      at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381)
      at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458)
      at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
      at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
      at org.scalatest.Suite.run(Suite.scala:1124)
      at org.scalatest.Suite.run$(Suite.scala:1106)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
      at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:518)
      at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
      at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
      at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
      at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
      at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
      at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
      at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1187)
      at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1234)
      at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
      at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
      at org.scalatest.Suite.runNestedSuites(Suite.scala:1232)
      at org.scalatest.Suite.runNestedSuites$(Suite.scala:1166)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
      at org.scalatest.Suite.run(Suite.scala:1121)
      at org.scalatest.Suite.run$(Suite.scala:1106)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1349)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1343)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1343)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1033)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1011)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1509)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1011)
      at org.scalatest.tools.Runner$.main(Runner.scala:827)
      at org.scalatest.tools.Runner.main(Runner.scala)