Timed out waiting for stream

== Progress ==
 AssertOnQuery(<condition>, )
 AddKafkaData(topics = Set(topic-12-seems), data = WrappedArray(1, 2, 3), message = )
 CheckAnswer: [2],[3],[4]
=> Assert(<condition>, )
 AddKafkaData(topics = Set(topic-12-bad), data = WrappedArray(4, 5, 6), message = )
 CheckAnswer: [2],[3],[4],[5],[6],[7]

== Stream ==
Output Mode: Append
Stream state: {KafkaSource[SubscribePattern[topic-12-.*]]: {}}
Thread state: alive


== Sink ==
0: 
1: [2]
2: [3] [4]
3: 


== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, true] AS value#3966]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#3965: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#3964: scala.Tuple2
 +- Project [cast(key#4063 as string) AS key#3955, cast(value#4064 as string) AS value#3956]
 +- LogicalRDD [key#4063, value#4064, topic#4065, partition#4066, offset#4067L, timestamp#4068L, timestampType#4069]

== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, true] AS value#3966]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#3965: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#3964: scala.Tuple2
 +- Project [cast(key#4063 as string) AS key#3955, cast(value#4064 as string) AS value#3956]
 +- LogicalRDD [key#4063, value#4064, topic#4065, partition#4066, offset#4067L, timestamp#4068L, timestampType#4069]

== Optimized Logical Plan ==
SerializeFromObject [input[0, int, true] AS value#3966]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#3965: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#3964: scala.Tuple2
 +- Project [cast(key#4063 as string) AS key#3955, cast(value#4064 as string) AS value#3956]
 +- LogicalRDD [key#4063, value#4064, topic#4065, partition#4066, offset#4067L, timestamp#4068L, timestampType#4069]

== Physical Plan ==
*SerializeFromObject [input[0, int, true] AS value#3966]
+- *MapElements <function1>, obj#3965: int
 +- *DeserializeToObject newInstance(class scala.Tuple2), obj#3964: scala.Tuple2
 +- *Project [cast(key#4063 as string) AS key#3955, cast(value#4064 as string) AS value#3956]
 +- Scan ExistingRDD[key#4063,value#4064,topic#4065,partition#4066,offset#4067L,timestamp#4068L,timestampType#4069]
 

org.scalatest.exceptions.TestFailedException:
Timed out waiting for stream
== Progress ==
AssertOnQuery(<condition>, )
AddKafkaData(topics = Set(topic-12-seems), data = WrappedArray(1, 2, 3), message = )
CheckAnswer: [2],[3],[4]
=> Assert(<condition>, )
AddKafkaData(topics = Set(topic-12-bad), data = WrappedArray(4, 5, 6), message = )
CheckAnswer: [2],[3],[4],[5],[6],[7]
== Stream ==
Output Mode: Append
Stream state: {KafkaSource[SubscribePattern[topic-12-.*]]: {}}
Thread state: alive
== Sink ==
0:
1: [2]
2: [3] [4]
3:
== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, true] AS value#3966]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#3965: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#3964: scala.Tuple2
+- Project [cast(key#4063 as string) AS key#3955, cast(value#4064 as string) AS value#3956]
+- LogicalRDD [key#4063, value#4064, topic#4065, partition#4066, offset#4067L, timestamp#4068L, timestampType#4069]
== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, true] AS value#3966]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#3965: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#3964: scala.Tuple2
+- Project [cast(key#4063 as string) AS key#3955, cast(value#4064 as string) AS value#3956]
+- LogicalRDD [key#4063, value#4064, topic#4065, partition#4066, offset#4067L, timestamp#4068L, timestampType#4069]
== Optimized Logical Plan ==
SerializeFromObject [input[0, int, true] AS value#3966]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#3965: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#3964: scala.Tuple2
+- Project [cast(key#4063 as string) AS key#3955, cast(value#4064 as string) AS value#3956]
+- LogicalRDD [key#4063, value#4064, topic#4065, partition#4066, offset#4067L, timestamp#4068L, timestampType#4069]
== Physical Plan ==
*SerializeFromObject [input[0, int, true] AS value#3966]
+- *MapElements <function1>, obj#3965: int
+- *DeserializeToObject newInstance(class scala.Tuple2), obj#3964: scala.Tuple2
+- *Project [cast(key#4063 as string) AS key#3955, cast(value#4064 as string) AS value#3956]
+- Scan ExistingRDD[key#4063,value#4064,topic#4065,partition#4066,offset#4067L,timestamp#4068L,timestampType#4069]
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
at org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:319)
at org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:526)
at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaSourceSuite.scala:34)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$12.apply$mcV$sp(KafkaSourceSuite.scala:295)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$12.apply(KafkaSourceSuite.scala:274)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$12.apply(KafkaSourceSuite.scala:274)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaSourceSuite.scala:34)
at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaSourceSuite.scala:34)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
at org.scalatest.Suite$class.run(Suite.scala:1421)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
at org.scalatest.tools.Runner$.main(Runner.scala:860)
at org.scalatest.tools.Runner.main(Runner.scala)