Stream Thread Died: null
java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)
 scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)
 scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
 scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
 org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)
 org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
 org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
 org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
 org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)
 org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)


== Progress ==
 AssertOnQuery(<condition>, )
 AddKafkaData(topics = Set(topic-14), data = WrappedArray(1, 2, 3), message = )
 CheckAnswer: [2],[3],[4]
 StopStream
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@79b8ecb,Map())
 CheckAnswer: [2],[3],[4]
 StopStream
 AddKafkaData(topics = Set(topic-14), data = WrappedArray(4, 5, 6), message = )
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@6ea3a513,Map())
 CheckAnswer: [2],[3],[4],[5],[6],[7]
 AddKafkaData(topics = Set(topic-14), data = WrappedArray(7, 8), message = )
 CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9]
 AssertOnQuery(<condition>, Add partitions)
 AddKafkaData(topics = Set(topic-14), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = )
 CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]

== Stream ==
Output Mode: Append
Stream state: {KafkaSource[Assign[topic-14-4, topic-14-3, topic-14-2, topic-14-1, topic-14-0]]: {"topic-14":{"2":4,"4":4,"1":3,"3":2,"0":4}}}
Thread state: alive
Thread stack trace: org.apache.kafka.common.protocol.types.Schema.write(Schema.java:53)
org.apache.kafka.common.protocol.types.ArrayOf.write(ArrayOf.java:58)
org.apache.kafka.common.protocol.types.Schema.write(Schema.java:54)
org.apache.kafka.common.protocol.types.Struct.writeTo(Struct.java:251)
org.apache.kafka.common.requests.RequestSend.serialize(RequestSend.java:37)
org.apache.kafka.common.requests.RequestSend.<init>(RequestSend.java:29)
org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.send(ConsumerNetworkClient.java:110)
org.apache.kafka.clients.consumer.internals.Fetcher.sendListOffsetRequest(Fetcher.java:430)
org.apache.kafka.clients.consumer.internals.Fetcher.listOffset(Fetcher.java:314)
org.apache.kafka.clients.consumer.internals.Fetcher.resetOffset(Fetcher.java:298)
org.apache.kafka.clients.consumer.internals.Fetcher.updateFetchPositions(Fetcher.java:170)
org.apache.kafka.clients.consumer.KafkaConsumer.updateFetchPositions(KafkaConsumer.java:1409)
org.apache.kafka.clients.consumer.KafkaConsumer.position(KafkaConsumer.java:1197)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1$$anonfun$apply$9$$anonfun$4.apply(KafkaOffsetReader.scala:180)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1$$anonfun$apply$9$$anonfun$4.apply(KafkaOffsetReader.scala:180)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.mutable.AbstractSet.scala$collection$SetLike$$super$map(Set.scala:46)
scala.collection.SetLike$class.map(SetLike.scala:92)
scala.collection.mutable.AbstractSet.map(Set.scala:46)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1$$anonfun$apply$9.apply(KafkaOffsetReader.scala:180)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1$$anonfun$apply$9.apply(KafkaOffsetReader.scala:172)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$org$apache$spark$sql$kafka010$KafkaOffsetReader$$withRetriesWithoutInterrupt$1.apply$mcV$sp(KafkaOffsetReader.scala:263)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$org$apache$spark$sql$kafka010$KafkaOffsetReader$$withRetriesWithoutInterrupt$1.apply(KafkaOffsetReader.scala:262)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$org$apache$spark$sql$kafka010$KafkaOffsetReader$$withRetriesWithoutInterrupt$1.apply(KafkaOffsetReader.scala:262)
org.apache.spark.util.UninterruptibleThread.runUninterruptibly(UninterruptibleThread.scala:85)
org.apache.spark.sql.kafka010.KafkaOffsetReader.org$apache$spark$sql$kafka010$KafkaOffsetReader$$withRetriesWithoutInterrupt(KafkaOffsetReader.scala:261)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1.apply(KafkaOffsetReader.scala:172)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1.apply(KafkaOffsetReader.scala:172)
org.apache.spark.sql.kafka010.KafkaOffsetReader.runUninterruptibly(KafkaOffsetReader.scala:230)
org.apache.spark.sql.kafka010.KafkaOffsetReader.fetchLatestOffsets(KafkaOffsetReader.scala:171)
org.apache.spark.sql.kafka010.KafkaSource.getOffset(KafkaSource.scala:165)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$8$$anonfun$apply$6.apply(StreamExecution.scala:510)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$8$$anonfun$apply$6.apply(StreamExecution.scala:510)
org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:279)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:58)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$8.apply(StreamExecution.scala:509)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$8.apply(StreamExecution.scala:507)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$constructNextBatch(StreamExecution.scala:507)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1$$anonfun$apply$mcZ$sp$1.apply$mcV$sp(StreamExecution.scala:305)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1$$anonfun$apply$mcZ$sp$1.apply(StreamExecution.scala:298)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1$$anonfun$apply$mcZ$sp$1.apply(StreamExecution.scala:298)
org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:279)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:58)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1.apply$mcZ$sp(StreamExecution.scala:298)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:294)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)
java.lang.InterruptedException
 at java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)
 at scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)
 at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
 at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
 at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)
 at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
 at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
 at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
 at org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)
 at org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)
 at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:375)
 at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)


== Sink ==
0: 
1: [2]
2: [4] [3]
3: [5] [7] [6]
4: [8]
5: [9]
6: [10]
7: [11]
8: [12] [14] [13]
9: [15]
10: [17] [16]


== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#1907]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1906: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#1905: scala.Tuple2
 +- Project [cast(key#2131 as string) AS key#1896, cast(value#2132 as string) AS value#1897]
 +- LogicalRDD [key#2131, value#2132, topic#2133, partition#2134, offset#2135L, timestamp#2136, timestampType#2137]

== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#1907]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1906: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#1905: scala.Tuple2
 +- Project [cast(key#2131 as string) AS key#1896, cast(value#2132 as string) AS value#1897]
 +- LogicalRDD [key#2131, value#2132, topic#2133, partition#2134, offset#2135L, timestamp#2136, timestampType#2137]

== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#1907]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1906: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#1905: scala.Tuple2
 +- Project [cast(key#2131 as string) AS key#1896, cast(value#2132 as string) AS value#1897]
 +- LogicalRDD [key#2131, value#2132, topic#2133, partition#2134, offset#2135L, timestamp#2136, timestampType#2137]

== Physical Plan ==
*SerializeFromObject [input[0, int, false] AS value#1907]
+- *MapElements <function1>, obj#1906: int
 +- *DeserializeToObject newInstance(class scala.Tuple2), obj#1905: scala.Tuple2
 +- *Project [cast(key#2131 as string) AS key#1896, cast(value#2132 as string) AS value#1897]
 +- Scan ExistingRDD[key#2131,value#2132,topic#2133,partition#2134,offset#2135L,timestamp#2136,timestampType#2137]
 

org.scalatest.exceptions.TestFailedException:
Stream Thread Died: null
java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)
scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)
scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)
org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)
org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)
== Progress ==
AssertOnQuery(<condition>, )
AddKafkaData(topics = Set(topic-14), data = WrappedArray(1, 2, 3), message = )
CheckAnswer: [2],[3],[4]
StopStream
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@79b8ecb,Map())
CheckAnswer: [2],[3],[4]
StopStream
AddKafkaData(topics = Set(topic-14), data = WrappedArray(4, 5, 6), message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@6ea3a513,Map())
CheckAnswer: [2],[3],[4],[5],[6],[7]
AddKafkaData(topics = Set(topic-14), data = WrappedArray(7, 8), message = )
CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9]
AssertOnQuery(<condition>, Add partitions)
AddKafkaData(topics = Set(topic-14), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = )
CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]
== Stream ==
Output Mode: Append
Stream state: {KafkaSource[Assign[topic-14-4, topic-14-3, topic-14-2, topic-14-1, topic-14-0]]: {"topic-14":{"2":4,"4":4,"1":3,"3":2,"0":4}}}
Thread state: alive
Thread stack trace: org.apache.kafka.common.protocol.types.Schema.write(Schema.java:53)
org.apache.kafka.common.protocol.types.ArrayOf.write(ArrayOf.java:58)
org.apache.kafka.common.protocol.types.Schema.write(Schema.java:54)
org.apache.kafka.common.protocol.types.Struct.writeTo(Struct.java:251)
org.apache.kafka.common.requests.RequestSend.serialize(RequestSend.java:37)
org.apache.kafka.common.requests.RequestSend.<init>(RequestSend.java:29)
org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.send(ConsumerNetworkClient.java:110)
org.apache.kafka.clients.consumer.internals.Fetcher.sendListOffsetRequest(Fetcher.java:430)
org.apache.kafka.clients.consumer.internals.Fetcher.listOffset(Fetcher.java:314)
org.apache.kafka.clients.consumer.internals.Fetcher.resetOffset(Fetcher.java:298)
org.apache.kafka.clients.consumer.internals.Fetcher.updateFetchPositions(Fetcher.java:170)
org.apache.kafka.clients.consumer.KafkaConsumer.updateFetchPositions(KafkaConsumer.java:1409)
org.apache.kafka.clients.consumer.KafkaConsumer.position(KafkaConsumer.java:1197)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1$$anonfun$apply$9$$anonfun$4.apply(KafkaOffsetReader.scala:180)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1$$anonfun$apply$9$$anonfun$4.apply(KafkaOffsetReader.scala:180)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.Iterator$class.foreach(Iterator.scala:893)
scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
scala.collection.AbstractIterable.foreach(Iterable.scala:54)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.mutable.AbstractSet.scala$collection$SetLike$$super$map(Set.scala:46)
scala.collection.SetLike$class.map(SetLike.scala:92)
scala.collection.mutable.AbstractSet.map(Set.scala:46)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1$$anonfun$apply$9.apply(KafkaOffsetReader.scala:180)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1$$anonfun$apply$9.apply(KafkaOffsetReader.scala:172)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$org$apache$spark$sql$kafka010$KafkaOffsetReader$$withRetriesWithoutInterrupt$1.apply$mcV$sp(KafkaOffsetReader.scala:263)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$org$apache$spark$sql$kafka010$KafkaOffsetReader$$withRetriesWithoutInterrupt$1.apply(KafkaOffsetReader.scala:262)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$org$apache$spark$sql$kafka010$KafkaOffsetReader$$withRetriesWithoutInterrupt$1.apply(KafkaOffsetReader.scala:262)
org.apache.spark.util.UninterruptibleThread.runUninterruptibly(UninterruptibleThread.scala:85)
org.apache.spark.sql.kafka010.KafkaOffsetReader.org$apache$spark$sql$kafka010$KafkaOffsetReader$$withRetriesWithoutInterrupt(KafkaOffsetReader.scala:261)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1.apply(KafkaOffsetReader.scala:172)
org.apache.spark.sql.kafka010.KafkaOffsetReader$$anonfun$fetchLatestOffsets$1.apply(KafkaOffsetReader.scala:172)
org.apache.spark.sql.kafka010.KafkaOffsetReader.runUninterruptibly(KafkaOffsetReader.scala:230)
org.apache.spark.sql.kafka010.KafkaOffsetReader.fetchLatestOffsets(KafkaOffsetReader.scala:171)
org.apache.spark.sql.kafka010.KafkaSource.getOffset(KafkaSource.scala:165)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$8$$anonfun$apply$6.apply(StreamExecution.scala:510)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$8$$anonfun$apply$6.apply(StreamExecution.scala:510)
org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:279)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:58)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$8.apply(StreamExecution.scala:509)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$8.apply(StreamExecution.scala:507)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
scala.collection.AbstractTraversable.map(Traversable.scala:104)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$constructNextBatch(StreamExecution.scala:507)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1$$anonfun$apply$mcZ$sp$1.apply$mcV$sp(StreamExecution.scala:305)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1$$anonfun$apply$mcZ$sp$1.apply(StreamExecution.scala:298)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1$$anonfun$apply$mcZ$sp$1.apply(StreamExecution.scala:298)
org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:279)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:58)
org.apache.spark.sql.execution.streaming.StreamExecution$$anonfun$org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches$1.apply$mcZ$sp(StreamExecution.scala:298)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:294)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)
java.lang.InterruptedException
at java.util.concurrent.locks.AbstractQueuedSynchronizer.tryAcquireSharedNanos(AbstractQueuedSynchronizer.java:1326)
at scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:208)
at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
at scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223)
at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201)
at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:92)
at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:76)
at org.apache.spark.sql.execution.streaming.state.StateStoreCoordinatorRef.deactivateInstances(StateStoreCoordinator.scala:108)
at org.apache.spark.sql.streaming.StreamingQueryManager.notifyQueryTermination(StreamingQueryManager.scala:335)
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runBatches(StreamExecution.scala:375)
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)
== Sink ==
0:
1: [2]
2: [4] [3]
3: [5] [7] [6]
4: [8]
5: [9]
6: [10]
7: [11]
8: [12] [14] [13]
9: [15]
10: [17] [16]
== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#1907]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1906: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#1905: scala.Tuple2
+- Project [cast(key#2131 as string) AS key#1896, cast(value#2132 as string) AS value#1897]
+- LogicalRDD [key#2131, value#2132, topic#2133, partition#2134, offset#2135L, timestamp#2136, timestampType#2137]
== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#1907]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1906: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#1905: scala.Tuple2
+- Project [cast(key#2131 as string) AS key#1896, cast(value#2132 as string) AS value#1897]
+- LogicalRDD [key#2131, value#2132, topic#2133, partition#2134, offset#2135L, timestamp#2136, timestampType#2137]
== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#1907]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#1906: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#1905: scala.Tuple2
+- Project [cast(key#2131 as string) AS key#1896, cast(value#2132 as string) AS value#1897]
+- LogicalRDD [key#2131, value#2132, topic#2133, partition#2134, offset#2135L, timestamp#2136, timestampType#2137]
== Physical Plan ==
*SerializeFromObject [input[0, int, false] AS value#1907]
+- *MapElements <function1>, obj#1906: int
+- *DeserializeToObject newInstance(class scala.Tuple2), obj#1905: scala.Tuple2
+- *Project [cast(key#2131 as string) AS key#1896, cast(value#2132 as string) AS value#1897]
+- Scan ExistingRDD[key#2131,value#2132,topic#2133,partition#2134,offset#2135L,timestamp#2136,timestampType#2137]
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
at org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:344)
at org.apache.spark.sql.streaming.StreamTest$class.liftedTree1$1(StreamTest.scala:572)
at org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:353)
at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaSourceSuite.scala:46)
at org.apache.spark.sql.kafka010.KafkaSourceSuite.org$apache$spark$sql$kafka010$KafkaSourceSuite$$testFromLatestOffsets(KafkaSourceSuite.scala:789)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$2.apply$mcV$sp(KafkaSourceSuite.scala:347)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$2.apply(KafkaSourceSuite.scala:345)
at org.apache.spark.sql.kafka010.KafkaSourceSuite$$anonfun$24$$anonfun$apply$2.apply(KafkaSourceSuite.scala:345)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaSourceSuite.scala:46)
at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaSourceSuite.scala:46)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
at org.scalatest.Suite$class.run(Suite.scala:1421)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
at org.scalatest.tools.Runner$.main(Runner.scala:860)
at org.scalatest.tools.Runner.main(Runner.scala)