&#010;Stream Thread Died: null&#010;java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)&#010; scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)&#010; scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)&#010; scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)&#010; org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)&#010; org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)&#010; org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)&#010; org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)&#010; org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)&#010; org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)&#010;&#010;&#010;== Progress ==&#010; AssertOnQuery(<condition>, )&#010; AddKafkaData(topics = Set(topic-20-suffix), data = WrappedArray(1, 2, 3), message = )&#010; CheckAnswer: [2],[3],[4]&#010; StopStream&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4de93edd,Map())&#010; CheckAnswer: [2],[3],[4]&#010; StopStream&#010; AddKafkaData(topics = Set(topic-20-suffix), data = WrappedArray(4, 5, 6), message = )&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@395197cb,Map())&#010; CheckAnswer: [2],[3],[4],[5],[6],[7]&#010; AddKafkaData(topics = Set(topic-20-suffix), data = WrappedArray(7, 8), message = )&#010; CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9]&#010; AssertOnQuery(<condition>, Add partitions)&#010; AddKafkaData(topics = Set(topic-20-suffix), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = )&#010; CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]&#010;&#010;== Stream ==&#010;Output Mode: Append&#010;Stream state: {KafkaSource[SubscribePattern[topic-20-.*]]: {"topic-20-suffix":{"8":1,"2":2,"5":1,"4":3,"7":0,"1":3,"9":1,"3":3,"6":1,"0":2}}}&#010;Thread state: alive&#010;Thread stack trace: java.lang.Thread.sleep(Native Method)&#010;org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1.apply$mcZ$sp(StreamExecution.scala:326)&#010;org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:294)&#010;org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)&#010;java.lang.InterruptedException&#010; at java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)&#010; at scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)&#010; at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)&#010; at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)&#010; at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)&#010; at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)&#010; at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)&#010; at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)&#010; at org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)&#010; at org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)&#010; at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:375)&#010; at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)&#010;&#010;&#010;== Sink ==&#010;0: &#010;1: [2]&#010;2: [4] [3]&#010;3: [5] [7] [6]&#010;4: [8]&#010;5: [9]&#010;6: &#010;7: [11] [12] [10] [13]&#010;8: [14] [15] [17] [16]&#010;&#010;&#010;== Plan ==&#010;== Parsed Logical Plan ==&#010;SerializeFromObject [input[0, int, false] AS value#2911]&#010;+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#2910: int&#010; +- DeserializeToObject newInstance(class scala.Tuple2), obj#2909: scala.Tuple2&#010; +- Project [cast(key#3101 as string) AS key#2900, cast(value#3102 as string) AS value#2901]&#010; +- LogicalRDD [key#3101, value#3102, topic#3103, partition#3104, offset#3105L, timestamp#3106, timestampType#3107]&#010;&#010;== Analyzed Logical Plan ==&#010;value: int&#010;SerializeFromObject [input[0, int, false] AS value#2911]&#010;+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#2910: int&#010; +- DeserializeToObject newInstance(class scala.Tuple2), obj#2909: scala.Tuple2&#010; +- Project [cast(key#3101 as string) AS key#2900, cast(value#3102 as string) AS value#2901]&#010; +- LogicalRDD [key#3101, value#3102, topic#3103, partition#3104, offset#3105L, timestamp#3106, timestampType#3107]&#010;&#010;== Optimized Logical Plan ==&#010;SerializeFromObject [input[0, int, false] AS value#2911]&#010;+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#2910: int&#010; +- DeserializeToObject newInstance(class scala.Tuple2), obj#2909: scala.Tuple2&#010; +- Project [cast(key#3101 as string) AS key#2900, cast(value#3102 as string) AS value#2901]&#010; +- LogicalRDD [key#3101, value#3102, topic#3103, partition#3104, offset#3105L, timestamp#3106, timestampType#3107]&#010;&#010;== Physical Plan ==&#010;*SerializeFromObject [input[0, int, false] AS value#2911]&#010;+- *MapElements <function1>, obj#2910: int&#010; +- *DeserializeToObject newInstance(class scala.Tuple2), obj#2909: scala.Tuple2&#010; +- *Project [cast(key#3101 as string) AS key#2900, cast(value#3102 as string) AS value#2901]&#010; +- Scan ExistingRDD[key#3101,value#3102,topic#3103,partition#3104,offset#3105L,timestamp#3106,timestampType#3107]&#010; &#010;


      org.scalatest.exceptions.TestFailedException: 
Stream Thread Died: null
java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)
	scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)
	scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
	scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
	org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)
	org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
	org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
	org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
	org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)
	org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)


== Progress ==
   AssertOnQuery(<condition>, )
   AddKafkaData(topics = Set(topic-20-suffix), data = WrappedArray(1, 2, 3), message = )
   CheckAnswer: [2],[3],[4]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4de93edd,Map())
   CheckAnswer: [2],[3],[4]
   StopStream
   AddKafkaData(topics = Set(topic-20-suffix), data = WrappedArray(4, 5, 6), message = )
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@395197cb,Map())
   CheckAnswer: [2],[3],[4],[5],[6],[7]
   AddKafkaData(topics = Set(topic-20-suffix), data = WrappedArray(7, 8), message = )
   CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9]
   AssertOnQuery(<condition>, Add partitions)
   AddKafkaData(topics = Set(topic-20-suffix), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = )
   CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]

== Stream ==
Output Mode: Append
Stream state: {KafkaSource[SubscribePattern[topic-20-.*]]: {"topic-20-suffix":{"8":1,"2":2,"5":1,"4":3,"7":0,"1":3,"9":1,"3":3,"6":1,"0":2}}}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1.apply$mcZ$sp(StreamExecution.scala:326)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:294)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)
java.lang.InterruptedException
	at java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)
	at scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)
	at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
	at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
	at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)
	at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
	at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
	at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
	at org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)
	at org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)
	at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:375)
	at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)


== Sink ==
0: 
1: [2]
2: [4] [3]
3: [5] [7] [6]
4: [8]
5: [9]
6: 
7: [11] [12] [10] [13]
8: [14] [15] [17] [16]


== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#2911]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#2910: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#2909: scala.Tuple2
      +- Project [cast(key#3101 as string) AS key#2900, cast(value#3102 as string) AS value#2901]
         +- LogicalRDD [key#3101, value#3102, topic#3103, partition#3104, offset#3105L, timestamp#3106, timestampType#3107]

== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#2911]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#2910: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#2909: scala.Tuple2
      +- Project [cast(key#3101 as string) AS key#2900, cast(value#3102 as string) AS value#2901]
         +- LogicalRDD [key#3101, value#3102, topic#3103, partition#3104, offset#3105L, timestamp#3106, timestampType#3107]

== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#2911]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#2910: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#2909: scala.Tuple2
      +- Project [cast(key#3101 as string) AS key#2900, cast(value#3102 as string) AS value#2901]
         +- LogicalRDD [key#3101, value#3102, topic#3103, partition#3104, offset#3105L, timestamp#3106, timestampType#3107]

== Physical Plan ==
*SerializeFromObject [input[0, int, false] AS value#2911]
+- *MapElements <function1>, obj#2910: int
   +- *DeserializeToObject newInstance(class scala.Tuple2), obj#2909: scala.Tuple2
      +- *Project [cast(key#3101 as string) AS key#2900, cast(value#3102 as string) AS value#2901]
         +- Scan ExistingRDD[key#3101,value#3102,topic#3103,partition#3104,offset#3105L,timestamp#3106,timestampType#3107]
         
         
      at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
      at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
      at org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:344)
      at org.apache.spark.sql.streaming.StreamTest$class.liftedTree1$1(StreamTest.scala:572)
      at org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:353)
      at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaSourceSuite.scala:46)
      at org.apache.spark.sql.kafka010.KafkaSourceSuite.org$apache$spark$sql$kafka010$KafkaSourceSuite$$testFromLatestOffsets(KafkaSourceSuite.scala:789)
      at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$8.apply$mcV$sp(KafkaSourceSuite.scala:398)
      at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$8.apply(KafkaSourceSuite.scala:395)
      at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$8.apply(KafkaSourceSuite.scala:395)
      at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
      at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
      at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
      at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
      at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaSourceSuite.scala:46)
      at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255)
      at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaSourceSuite.scala:46)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
      at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
      at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
      at org.scalatest.Suite$class.run(Suite.scala:1424)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
      at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
      at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
      at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
      at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
      at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
      at org.scalatest.Suite$class.run(Suite.scala:1421)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
      at org.scalatest.tools.Runner$.main(Runner.scala:860)
      at org.scalatest.tools.Runner.main(Runner.scala)