Timed out waiting for stream: The code passed to failAfter did not complete within 30 seconds.
java.lang.Thread.getStackTrace(Thread.java:1559)
 org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:234)
 org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
 org.apache.spark.sql.kafka010.KafkaSourceTest.failAfterImpl(KafkaMicroBatchSourceSuite.scala:49)
 org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
 org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)
 org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaMicroBatchSourceSuite.scala:49)
 org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:472)
 org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:471)
 scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:145)

 Caused by: null
 java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2151)
 org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:403)
 org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:473)
 scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
 org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
 org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:239)
 org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
 org.apache.spark.sql.kafka010.KafkaSourceTest.failAfterImpl(KafkaMicroBatchSourceSuite.scala:49)
 org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
 org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)


== Progress ==
 AddKafkaData(topics = Set(topic-8-suffix), data = WrappedArray(4, 5, 6), message = )
 CheckAnswer: [2],[3],[4],[5],[6],[7]
 StopStream
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@19672cb8,Map(),null)
 CheckAnswer: [2],[3],[4],[5],[6],[7]
 StopStream
 AddKafkaData(topics = Set(topic-8-suffix), data = WrappedArray(7, 8), message = )
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@33e6018a,Map(),null)
 CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9]
 AssertOnQuery(<condition>, Add partitions)
 AddKafkaData(topics = Set(topic-8-suffix), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = )
=> CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]

== Stream ==
Output Mode: Append
Stream state: {KafkaSourceV1[SubscribePattern[topic-8-.*]]: {"topic-8-suffix":{"2":1,"4":3,"1":1,"3":3,"0":4}}}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:213)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2136/1807888670.apply$mcZ$sp(Unknown Source)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:158)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:281)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:191)


== Sink ==
0: [3] [4] [2]
1: [5]
2: [7] [6]
3: [8] [9]
4: [10] [11]
5: [15] [14]


== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#6134]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3272/1837195191@337edfdb, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#6133: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#6132: scala.Tuple2
 +- Project [cast(key#6108 as string) AS key#6122, cast(value#6109 as string) AS value#6123]
 +- Project [key#6271 AS key#6108, value#6272 AS value#6109, topic#6273 AS topic#6110, partition#6274 AS partition#6111, offset#6275L AS offset#6112L, timestamp#6276 AS timestamp#6113, timestampType#6277 AS timestampType#6114]
 +- LogicalRDD [key#6271, value#6272, topic#6273, partition#6274, offset#6275L, timestamp#6276, timestampType#6277], true

== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#6134]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3272/1837195191@337edfdb, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#6133: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#6132: scala.Tuple2
 +- Project [cast(key#6108 as string) AS key#6122, cast(value#6109 as string) AS value#6123]
 +- Project [key#6271 AS key#6108, value#6272 AS value#6109, topic#6273 AS topic#6110, partition#6274 AS partition#6111, offset#6275L AS offset#6112L, timestamp#6276 AS timestamp#6113, timestampType#6277 AS timestampType#6114]
 +- LogicalRDD [key#6271, value#6272, topic#6273, partition#6274, offset#6275L, timestamp#6276, timestampType#6277], true

== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#6134]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3272/1837195191@337edfdb, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#6133: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#6132: scala.Tuple2
 +- Project [cast(key#6271 as string) AS key#6122, cast(value#6272 as string) AS value#6123]
 +- LogicalRDD [key#6271, value#6272, topic#6273, partition#6274, offset#6275L, timestamp#6276, timestampType#6277], true

== Physical Plan ==
*(1) SerializeFromObject [input[0, int, false] AS value#6134]
+- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3272/1837195191@337edfdb, obj#6133: int
 +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#6132: scala.Tuple2
 +- *(1) Project [cast(key#6271 as string) AS key#6122, cast(value#6272 as string) AS value#6123]
 +- *(1) Scan ExistingRDD kafka[key#6271,value#6272,topic#6273,partition#6274,offset#6275L,timestamp#6276,timestampType#6277]

 

org.scalatest.exceptions.TestFailedException:
Timed out waiting for stream: The code passed to failAfter did not complete within 30 seconds.
java.lang.Thread.getStackTrace(Thread.java:1559)
org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:234)
org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
org.apache.spark.sql.kafka010.KafkaSourceTest.failAfterImpl(KafkaMicroBatchSourceSuite.scala:49)
org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)
org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaMicroBatchSourceSuite.scala:49)
org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:472)
org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:471)
scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:145)
Caused by: null
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2151)
org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:403)
org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:473)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:239)
org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
org.apache.spark.sql.kafka010.KafkaSourceTest.failAfterImpl(KafkaMicroBatchSourceSuite.scala:49)
org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)
== Progress ==
AddKafkaData(topics = Set(topic-8-suffix), data = WrappedArray(4, 5, 6), message = )
CheckAnswer: [2],[3],[4],[5],[6],[7]
StopStream
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@19672cb8,Map(),null)
CheckAnswer: [2],[3],[4],[5],[6],[7]
StopStream
AddKafkaData(topics = Set(topic-8-suffix), data = WrappedArray(7, 8), message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@33e6018a,Map(),null)
CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9]
AssertOnQuery(<condition>, Add partitions)
AddKafkaData(topics = Set(topic-8-suffix), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = )
=> CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]
== Stream ==
Output Mode: Append
Stream state: {KafkaSourceV1[SubscribePattern[topic-8-.*]]: {"topic-8-suffix":{"2":1,"4":3,"1":1,"3":3,"0":4}}}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:213)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2136/1807888670.apply$mcZ$sp(Unknown Source)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:158)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:281)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:191)
== Sink ==
0: [3] [4] [2]
1: [5]
2: [7] [6]
3: [8] [9]
4: [10] [11]
5: [15] [14]
== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#6134]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3272/1837195191@337edfdb, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#6133: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#6132: scala.Tuple2
+- Project [cast(key#6108 as string) AS key#6122, cast(value#6109 as string) AS value#6123]
+- Project [key#6271 AS key#6108, value#6272 AS value#6109, topic#6273 AS topic#6110, partition#6274 AS partition#6111, offset#6275L AS offset#6112L, timestamp#6276 AS timestamp#6113, timestampType#6277 AS timestampType#6114]
+- LogicalRDD [key#6271, value#6272, topic#6273, partition#6274, offset#6275L, timestamp#6276, timestampType#6277], true
== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#6134]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3272/1837195191@337edfdb, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#6133: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#6132: scala.Tuple2
+- Project [cast(key#6108 as string) AS key#6122, cast(value#6109 as string) AS value#6123]
+- Project [key#6271 AS key#6108, value#6272 AS value#6109, topic#6273 AS topic#6110, partition#6274 AS partition#6111, offset#6275L AS offset#6112L, timestamp#6276 AS timestamp#6113, timestampType#6277 AS timestampType#6114]
+- LogicalRDD [key#6271, value#6272, topic#6273, partition#6274, offset#6275L, timestamp#6276, timestampType#6277], true
== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#6134]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3272/1837195191@337edfdb, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#6133: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#6132: scala.Tuple2
+- Project [cast(key#6271 as string) AS key#6122, cast(value#6272 as string) AS value#6123]
+- LogicalRDD [key#6271, value#6272, topic#6273, partition#6274, offset#6275L, timestamp#6276, timestampType#6277], true
== Physical Plan ==
*(1) SerializeFromObject [input[0, int, false] AS value#6134]
+- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3272/1837195191@337edfdb, obj#6133: int
+- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#6132: scala.Tuple2
+- *(1) Project [cast(key#6271 as string) AS key#6122, cast(value#6272 as string) AS value#6123]
+- *(1) Scan ExistingRDD kafka[key#6271,value#6272,topic#6273,partition#6274,offset#6275L,timestamp#6276,timestampType#6277]
at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
at org.scalatest.Assertions.fail(Assertions.scala:1089)
at org.scalatest.Assertions.fail$(Assertions.scala:1085)
at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:453)
at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:783)
at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:759)
at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:329)
at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaMicroBatchSourceSuite.scala:49)
at org.apache.spark.sql.kafka010.KafkaSourceSuiteBase.testFromEarliestOffsets(KafkaMicroBatchSourceSuite.scala:1398)
at org.apache.spark.sql.kafka010.KafkaSourceSuiteBase.$anonfun$new$103(KafkaMicroBatchSourceSuite.scala:1129)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaMicroBatchSourceSuite.scala:49)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaMicroBatchSourceSuite.scala:49)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:388)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:52)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:32)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:29)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:194)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1144)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1340)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1334)
at scala.collection.immutable.List.foreach(List.scala:388)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.main(Runner.scala:827)
at org.scalatest.tools.Runner.main(Runner.scala)