org.scalatest.exceptions.TestFailedException: Timed out waiting for stream: The code passed to failAfter did not complete within 30 seconds. org.scalatest.concurrent.Timeouts$$anonfun$failAfter$1.apply(Timeouts.scala:249) org.scalatest.concurrent.Timeouts$$anonfun$failAfter$1.apply(Timeouts.scala:249) org.scalatest.concurrent.Timeouts$class.timeoutAfter(Timeouts.scala:345) org.scalatest.concurrent.Timeouts$class.failAfter(Timeouts.scala:245) org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaSourceSuite.scala:41) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:532) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:531) scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230) Caused by: null java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2014) java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2173) org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:569) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply$mcV$sp(StreamTest.scala:533) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply(StreamTest.scala:533) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply(StreamTest.scala:533) org.scalatest.concurrent.Timeouts$class.timeoutAfter(Timeouts.scala:326) org.scalatest.concurrent.Timeouts$class.failAfter(Timeouts.scala:245) org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaSourceSuite.scala:41) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:532) == Progress == AssertOnQuery(<condition>, ) AddKafkaData(topics = Set(topic-20-seems), data = WrappedArray(1, 2, 3), message = ) CheckAnswer: [2],[3],[4] Assert(<condition>, ) AddKafkaData(topics = Set(topic-20-bad), data = WrappedArray(4, 5, 6), message = ) => CheckAnswer: [2],[3],[4],[5],[6],[7] == Stream == Output Mode: Append Stream state: {KafkaSource[SubscribePattern[topic-20-.*]]: {}} Thread state: alive == Sink == 0: 1: [2] 2: [4] [3] 3: == Plan == == Parsed Logical Plan == SerializeFromObject [input[0, int, true] AS value#4354] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4353: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#4352: scala.Tuple2 +- Project [cast(key#4423 as string) AS key#4343, cast(value#4424 as string) AS value#4344] +- LogicalRDD [key#4423, value#4424, topic#4425, partition#4426, offset#4427L, timestamp#4428, timestampType#4429] == Analyzed Logical Plan == value: int SerializeFromObject [input[0, int, true] AS value#4354] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4353: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#4352: scala.Tuple2 +- Project [cast(key#4423 as string) AS key#4343, cast(value#4424 as string) AS value#4344] +- LogicalRDD [key#4423, value#4424, topic#4425, partition#4426, offset#4427L, timestamp#4428, timestampType#4429] == Optimized Logical Plan == SerializeFromObject [input[0, int, true] AS value#4354] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4353: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#4352: scala.Tuple2 +- Project [cast(key#4423 as string) AS key#4343, cast(value#4424 as string) AS value#4344] +- LogicalRDD [key#4423, value#4424, topic#4425, partition#4426, offset#4427L, timestamp#4428, timestampType#4429] == Physical Plan == *SerializeFromObject [input[0, int, true] AS value#4354] +- *MapElements <function1>, obj#4353: int +- *DeserializeToObject newInstance(class scala.Tuple2), obj#4352: scala.Tuple2 +- *Project [cast(key#4423 as string) AS key#4343, cast(value#4424 as string) AS value#4344] +- Scan ExistingRDD[key#4423,value#4424,topic#4425,partition#4426,offset#4427L,timestamp#4428,timestampType#4429]
sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException:
Timed out waiting for stream: The code passed to failAfter did not complete within 30 seconds.
org.scalatest.concurrent.Timeouts$$anonfun$failAfter$1.apply(Timeouts.scala:249)
org.scalatest.concurrent.Timeouts$$anonfun$failAfter$1.apply(Timeouts.scala:249)
org.scalatest.concurrent.Timeouts$class.timeoutAfter(Timeouts.scala:345)
org.scalatest.concurrent.Timeouts$class.failAfter(Timeouts.scala:245)
org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaSourceSuite.scala:41)
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:532)
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:531)
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
Caused by: null
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2014)
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2173)
org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:569)
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply$mcV$sp(StreamTest.scala:533)
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply(StreamTest.scala:533)
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply(StreamTest.scala:533)
org.scalatest.concurrent.Timeouts$class.timeoutAfter(Timeouts.scala:326)
org.scalatest.concurrent.Timeouts$class.failAfter(Timeouts.scala:245)
org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaSourceSuite.scala:41)
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:532)
== Progress ==
AssertOnQuery(<condition>, )
AddKafkaData(topics = Set(topic-20-seems), data = WrappedArray(1, 2, 3), message = )
CheckAnswer: [2],[3],[4]
Assert(<condition>, )
AddKafkaData(topics = Set(topic-20-bad), data = WrappedArray(4, 5, 6), message = )
=> CheckAnswer: [2],[3],[4],[5],[6],[7]
== Stream ==
Output Mode: Append
Stream state: {KafkaSource[SubscribePattern[topic-20-.*]]: {}}
Thread state: alive
== Sink ==
0:
1: [2]
2: [4] [3]
3:
== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, true] AS value#4354]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4353: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#4352: scala.Tuple2
+- Project [cast(key#4423 as string) AS key#4343, cast(value#4424 as string) AS value#4344]
+- LogicalRDD [key#4423, value#4424, topic#4425, partition#4426, offset#4427L, timestamp#4428, timestampType#4429]
== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, true] AS value#4354]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4353: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#4352: scala.Tuple2
+- Project [cast(key#4423 as string) AS key#4343, cast(value#4424 as string) AS value#4344]
+- LogicalRDD [key#4423, value#4424, topic#4425, partition#4426, offset#4427L, timestamp#4428, timestampType#4429]
== Optimized Logical Plan ==
SerializeFromObject [input[0, int, true] AS value#4354]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4353: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#4352: scala.Tuple2
+- Project [cast(key#4423 as string) AS key#4343, cast(value#4424 as string) AS value#4344]
+- LogicalRDD [key#4423, value#4424, topic#4425, partition#4426, offset#4427L, timestamp#4428, timestampType#4429]
== Physical Plan ==
*SerializeFromObject [input[0, int, true] AS value#4354]
+- *MapElements <function1>, obj#4353: int
+- *DeserializeToObject newInstance(class scala.Tuple2), obj#4352: scala.Tuple2
+- *Project [cast(key#4423 as string) AS key#4343, cast(value#4424 as string) AS value#4344]
+- Scan ExistingRDD[key#4423,value#4424,topic#4425,partition#4426,offset#4427L,timestamp#4428,timestampType#4429]
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
at org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:341)
at org.apache.spark.sql.streaming.StreamTest$class.liftedTree1$1(StreamTest.scala:555)
at org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:350)
at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaSourceSuite.scala:41)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$3.apply$mcV$sp(KafkaSourceSuite.scala:335)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$3.apply(KafkaSourceSuite.scala:314)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$3.apply(KafkaSourceSuite.scala:314)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaSourceSuite.scala:41)
at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaSourceSuite.scala:41)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:357)
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:502)
at sbt.ForkMain$Run$2.call(ForkMain.java:296)
at sbt.ForkMain$Run$2.call(ForkMain.java:286)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)