org.scalatest.exceptions.TestFailedException: Stream Thread Died: null java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326) scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208) scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218) scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223) org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201) org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75) org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92) org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76) org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108) org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335) == Progress == AssertOnQuery(<condition>, ) AddKafkaData(topics = Set(topic-11-suffix), data = WrappedArray(1, 2, 3), message = ) CheckAnswer: [2],[3],[4] StopStream StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7b7feaa0,Map()) CheckAnswer: [2],[3],[4] StopStream AddKafkaData(topics = Set(topic-11-suffix), data = WrappedArray(4, 5, 6), message = ) StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@3739bdd3,Map()) CheckAnswer: [2],[3],[4],[5],[6],[7] AddKafkaData(topics = Set(topic-11-suffix), data = WrappedArray(7, 8), message = ) CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9] AssertOnQuery(<condition>, Add partitions) AddKafkaData(topics = Set(topic-11-suffix), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = ) CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17] == Stream == Output Mode: Append Stream state: {KafkaSource[SubscribePattern[topic-11-.*]]: {"topic-11-suffix":{"8":0,"2":2,"5":1,"4":3,"7":1,"1":3,"9":1,"3":3,"6":1,"0":2}}} Thread state: alive Thread stack trace: java.lang.Thread.sleep(Native Method) org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1.apply$mcZ$sp(StreamExecution.scala:326) org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56) org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:294) org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211) java.lang.InterruptedException at java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326) at scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208) at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218) at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223) at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201) at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75) at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92) at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76) at org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108) at org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335) at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:375) at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211) == Sink == 0: 1: [2] 2: [3] [4] 3: [6] [5] [7] 4: [8] 5: [9] 6: 7: [12] [10] [11] 8: [17] [15] [13] [16] [14] == Plan == == Parsed Logical Plan == SerializeFromObject [input[0, int, false] AS value#1337] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1336: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#1335: scala.Tuple2 +- Project [cast(key#1527 as string) AS key#1326, cast(value#1528 as string) AS value#1327] +- LogicalRDD [key#1527, value#1528, topic#1529, partition#1530, offset#1531L, timestamp#1532, timestampType#1533] == Analyzed Logical Plan == value: int SerializeFromObject [input[0, int, false] AS value#1337] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1336: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#1335: scala.Tuple2 +- Project [cast(key#1527 as string) AS key#1326, cast(value#1528 as string) AS value#1327] +- LogicalRDD [key#1527, value#1528, topic#1529, partition#1530, offset#1531L, timestamp#1532, timestampType#1533] == Optimized Logical Plan == SerializeFromObject [input[0, int, false] AS value#1337] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1336: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#1335: scala.Tuple2 +- Project [cast(key#1527 as string) AS key#1326, cast(value#1528 as string) AS value#1327] +- LogicalRDD [key#1527, value#1528, topic#1529, partition#1530, offset#1531L, timestamp#1532, timestampType#1533] == Physical Plan == *SerializeFromObject [input[0, int, false] AS value#1337] +- *MapElements <function1>, obj#1336: int +- *DeserializeToObject newInstance(class scala.Tuple2), obj#1335: scala.Tuple2 +- *Project [cast(key#1527 as string) AS key#1326, cast(value#1528 as string) AS value#1327] +- Scan ExistingRDD[key#1527,value#1528,topic#1529,partition#1530,offset#1531L,timestamp#1532,timestampType#1533]
sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException:
Stream Thread Died: null
java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)
scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)
scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)
org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)
org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)
== Progress ==
AssertOnQuery(<condition>, )
AddKafkaData(topics = Set(topic-11-suffix), data = WrappedArray(1, 2, 3), message = )
CheckAnswer: [2],[3],[4]
StopStream
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7b7feaa0,Map())
CheckAnswer: [2],[3],[4]
StopStream
AddKafkaData(topics = Set(topic-11-suffix), data = WrappedArray(4, 5, 6), message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@3739bdd3,Map())
CheckAnswer: [2],[3],[4],[5],[6],[7]
AddKafkaData(topics = Set(topic-11-suffix), data = WrappedArray(7, 8), message = )
CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9]
AssertOnQuery(<condition>, Add partitions)
AddKafkaData(topics = Set(topic-11-suffix), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = )
CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]
== Stream ==
Output Mode: Append
Stream state: {KafkaSource[SubscribePattern[topic-11-.*]]: {"topic-11-suffix":{"8":0,"2":2,"5":1,"4":3,"7":1,"1":3,"9":1,"3":3,"6":1,"0":2}}}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1.apply$mcZ$sp(StreamExecution.scala:326)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:294)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)
java.lang.InterruptedException
at java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)
at scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)
at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)
at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
at org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)
at org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:375)
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)
== Sink ==
0:
1: [2]
2: [3] [4]
3: [6] [5] [7]
4: [8]
5: [9]
6:
7: [12] [10] [11]
8: [17] [15] [13] [16] [14]
== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#1337]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1336: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#1335: scala.Tuple2
+- Project [cast(key#1527 as string) AS key#1326, cast(value#1528 as string) AS value#1327]
+- LogicalRDD [key#1527, value#1528, topic#1529, partition#1530, offset#1531L, timestamp#1532, timestampType#1533]
== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#1337]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1336: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#1335: scala.Tuple2
+- Project [cast(key#1527 as string) AS key#1326, cast(value#1528 as string) AS value#1327]
+- LogicalRDD [key#1527, value#1528, topic#1529, partition#1530, offset#1531L, timestamp#1532, timestampType#1533]
== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#1337]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1336: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#1335: scala.Tuple2
+- Project [cast(key#1527 as string) AS key#1326, cast(value#1528 as string) AS value#1327]
+- LogicalRDD [key#1527, value#1528, topic#1529, partition#1530, offset#1531L, timestamp#1532, timestampType#1533]
== Physical Plan ==
*SerializeFromObject [input[0, int, false] AS value#1337]
+- *MapElements <function1>, obj#1336: int
+- *DeserializeToObject newInstance(class scala.Tuple2), obj#1335: scala.Tuple2
+- *Project [cast(key#1527 as string) AS key#1326, cast(value#1528 as string) AS value#1327]
+- Scan ExistingRDD[key#1527,value#1528,topic#1529,partition#1530,offset#1531L,timestamp#1532,timestampType#1533]
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
at org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:344)
at org.apache.spark.sql.streaming.StreamTest$class.liftedTree1$1(StreamTest.scala:572)
at org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:353)
at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaSourceSuite.scala:46)
at org.apache.spark.sql.kafka010.KafkaSourceSuite.org$apache$spark$sql$kafka010$KafkaSourceSuite$$testFromLatestOffsets(KafkaSourceSuite.scala:789)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$8.apply$mcV$sp(KafkaSourceSuite.scala:398)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$8.apply(KafkaSourceSuite.scala:395)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$8.apply(KafkaSourceSuite.scala:395)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaSourceSuite.scala:46)
at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaSourceSuite.scala:46)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:357)
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:502)
at sbt.ForkMain$Run$2.call(ForkMain.java:296)
at sbt.ForkMain$Run$2.call(ForkMain.java:286)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)