org.scalatest.exceptions.TestFailedException: Assert on query failed: Add partitions: The code passed to eventually never returned normally. Attempted 3866 times over 1.0008564472 minutes. Last failure message: assertion failed: Partition [topic-4, 5] metadata not propagated after timeout. org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439) org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391) org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:337) org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:336) org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479) org.apache.spark.sql.kafka010.KafkaTestUtils.waitUntilMetadataIsPropagated(KafkaTestUtils.scala:441) org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$addPartitions$1(KafkaTestUtils.scala:235) scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:158) Caused by: assertion failed: Partition [topic-4, 5] metadata not propagated after timeout scala.Predef$.assert(Predef.scala:223) org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$waitUntilMetadataIsPropagated$1(KafkaTestUtils.scala:442) scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395) org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439) org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391) org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:337) org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:336) == Progress == AssertOnQuery(<condition>, ) AddKafkaData(topics = Set(topic-4), data = WrappedArray(1, 2, 3), message = ) CheckAnswer: [2],[3],[4] StopStream StartStream(ContinuousTrigger(1000),org.apache.spark.util.SystemClock@7bbeec6d,Map(),null) CheckAnswer: [2],[3],[4] StopStream AddKafkaData(topics = Set(topic-4), data = WrappedArray(4, 5, 6), message = ) StartStream(ContinuousTrigger(1000),org.apache.spark.util.SystemClock@c3bd017,Map(),null) CheckAnswer: [2],[3],[4],[5],[6],[7] AddKafkaData(topics = Set(topic-4), data = WrappedArray(7, 8), message = ) CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9] => AssertOnQuery(<condition>, Add partitions) AddKafkaData(topics = Set(topic-4), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = ) CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17] == Stream == Output Mode: Append Stream state: {KafkaSource[Subscribe[topic-4]]: {"topic-4":{"2":1,"4":2,"1":1,"3":1,"0":4}}} Thread state: alive Thread stack trace: sun.misc.Unsafe.park(Native Method) java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) java.util.concurrent.locks.AbstractQueuedSynchronizer.parkAndCheckInterrupt(AbstractQueuedSynchronizer.java:836) java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:997) java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1304) scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:242) scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:258) scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:187) org.apache.spark.util.ThreadUtils$.awaitReady(ThreadUtils.scala:242) org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:736) org.apache.spark.SparkContext.runJob(SparkContext.scala:2013) org.apache.spark.SparkContext.runJob(SparkContext.scala:2034) org.apache.spark.SparkContext.runJob(SparkContext.scala:2053) org.apache.spark.SparkContext.runJob(SparkContext.scala:2078) org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:961) org.apache.spark.rdd.RDD$$Lambda$3133/615638840.apply(Unknown Source) org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) org.apache.spark.rdd.RDD.withScope(RDD.scala:366) org.apache.spark.rdd.RDD.collect(RDD.scala:960) org.apache.spark.sql.execution.streaming.continuous.WriteToContinuousDataSourceExec.doExecute(WriteToContinuousDataSourceExec.scala:54) org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:131) org.apache.spark.sql.execution.SparkPlan$$Lambda$2656/1926498685.apply(Unknown Source) org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:155) org.apache.spark.sql.execution.SparkPlan$$Lambda$2657/1568972499.apply(Unknown Source) org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.$anonfun$runContinuous$4(ContinuousExecution.scala:257) org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution$$Lambda$5486/998380775.apply(Unknown Source) org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100) org.apache.spark.sql.execution.SQLExecution$$$Lambda$2594/1008285636.apply(Unknown Source) org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87) org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.$anonfun$runContinuous$3(ContinuousExecution.scala:257) org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution$$Lambda$5484/758437333.apply(Unknown Source) org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327) org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325) org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67) org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.runContinuous(ContinuousExecution.scala:256) org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.runActivatedStream(ContinuousExecution.scala:110) org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:331) org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:243) == Sink == 0: 1: [2] [3] [4] 2: 3: 4: 5: [6] [7] [5] 6: 7: [9] [8] 8: 9: 10: 11: 12: 13: 14: 15: 16: 17: 18: 19: 20: 21: 22: 23: 24: 25: 26: 27: 28: 29: 30: 31: 32: 33: 34: 35: 36: 37: 38: 39: 40: 41: 42: 43: 44: 45: 46: 47: 48: 49: 50: 51: 52: 53: 54: 55: 56: 57: 58: 59: 60: 61: 62: 63: 64: 65: 66: 67: 68: == Plan == == Parsed Logical Plan == WriteToContinuousDataSource org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@489d61ca +- SerializeFromObject [input[0, int, false] AS value#16151] +- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3244/1078487718@799e81dd, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#16150: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#16149: scala.Tuple2 +- Project [cast(key#16125 as string) AS key#16139, cast(value#16126 as string) AS value#16140] +- StreamingDataSourceV2Relation [key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@7ba79dff, KafkaSource[Subscribe[topic-4]], {"topic-4":{"2":1,"4":0,"1":1,"3":0,"0":2}} == Analyzed Logical Plan == WriteToContinuousDataSource org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@489d61ca +- SerializeFromObject [input[0, int, false] AS value#16151] +- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3244/1078487718@799e81dd, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#16150: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#16149: scala.Tuple2 +- Project [cast(key#16125 as string) AS key#16139, cast(value#16126 as string) AS value#16140] +- StreamingDataSourceV2Relation [key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@7ba79dff, KafkaSource[Subscribe[topic-4]], {"topic-4":{"2":1,"4":0,"1":1,"3":0,"0":2}} == Optimized Logical Plan == WriteToContinuousDataSource org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@489d61ca +- SerializeFromObject [input[0, int, false] AS value#16151] +- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3244/1078487718@799e81dd, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#16150: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#16149: scala.Tuple2 +- Project [cast(key#16125 as string) AS key#16139, cast(value#16126 as string) AS value#16140] +- StreamingDataSourceV2Relation [key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@7ba79dff, KafkaSource[Subscribe[topic-4]], {"topic-4":{"2":1,"4":0,"1":1,"3":0,"0":2}} == Physical Plan == WriteToContinuousDataSource org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@489d61ca +- *(1) SerializeFromObject [input[0, int, false] AS value#16151] +- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3244/1078487718@799e81dd, obj#16150: int +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#16149: scala.Tuple2 +- *(1) Project [cast(key#16125 as string) AS key#16139, cast(value#16126 as string) AS value#16140] +- *(1) Project [key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131] +- *(1) ContinuousScan[key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan
sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException:
Assert on query failed: Add partitions: The code passed to eventually never returned normally. Attempted 3866 times over 1.0008564472 minutes. Last failure message: assertion failed: Partition [topic-4, 5] metadata not propagated after timeout.
org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432)
org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439)
org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391)
org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
org.scalatest.concurrent.Eventually.eventually(Eventually.scala:337)
org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:336)
org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
org.apache.spark.sql.kafka010.KafkaTestUtils.waitUntilMetadataIsPropagated(KafkaTestUtils.scala:441)
org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$addPartitions$1(KafkaTestUtils.scala:235)
scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:158)
Caused by: assertion failed: Partition [topic-4, 5] metadata not propagated after timeout
scala.Predef$.assert(Predef.scala:223)
org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$waitUntilMetadataIsPropagated$1(KafkaTestUtils.scala:442)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395)
org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409)
org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439)
org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391)
org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
org.scalatest.concurrent.Eventually.eventually(Eventually.scala:337)
org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:336)
== Progress ==
AssertOnQuery(<condition>, )
AddKafkaData(topics = Set(topic-4), data = WrappedArray(1, 2, 3), message = )
CheckAnswer: [2],[3],[4]
StopStream
StartStream(ContinuousTrigger(1000),org.apache.spark.util.SystemClock@7bbeec6d,Map(),null)
CheckAnswer: [2],[3],[4]
StopStream
AddKafkaData(topics = Set(topic-4), data = WrappedArray(4, 5, 6), message = )
StartStream(ContinuousTrigger(1000),org.apache.spark.util.SystemClock@c3bd017,Map(),null)
CheckAnswer: [2],[3],[4],[5],[6],[7]
AddKafkaData(topics = Set(topic-4), data = WrappedArray(7, 8), message = )
CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9]
=> AssertOnQuery(<condition>, Add partitions)
AddKafkaData(topics = Set(topic-4), data = WrappedArray(9, 10, 11, 12, 13, 14, 15, 16), message = )
CheckAnswer: [2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]
== Stream ==
Output Mode: Append
Stream state: {KafkaSource[Subscribe[topic-4]]: {"topic-4":{"2":1,"4":2,"1":1,"3":1,"0":4}}}
Thread state: alive
Thread stack trace: sun.misc.Unsafe.park(Native Method)
java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)
java.util.concurrent.locks.AbstractQueuedSynchronizer.parkAndCheckInterrupt(AbstractQueuedSynchronizer.java:836)
java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:997)
java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1304)
scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:242)
scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:258)
scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:187)
org.apache.spark.util.ThreadUtils$.awaitReady(ThreadUtils.scala:242)
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:736)
org.apache.spark.SparkContext.runJob(SparkContext.scala:2013)
org.apache.spark.SparkContext.runJob(SparkContext.scala:2034)
org.apache.spark.SparkContext.runJob(SparkContext.scala:2053)
org.apache.spark.SparkContext.runJob(SparkContext.scala:2078)
org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:961)
org.apache.spark.rdd.RDD$$Lambda$3133/615638840.apply(Unknown Source)
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
org.apache.spark.rdd.RDD.withScope(RDD.scala:366)
org.apache.spark.rdd.RDD.collect(RDD.scala:960)
org.apache.spark.sql.execution.streaming.continuous.WriteToContinuousDataSourceExec.doExecute(WriteToContinuousDataSourceExec.scala:54)
org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:131)
org.apache.spark.sql.execution.SparkPlan$$Lambda$2656/1926498685.apply(Unknown Source)
org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:155)
org.apache.spark.sql.execution.SparkPlan$$Lambda$2657/1568972499.apply(Unknown Source)
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.$anonfun$runContinuous$4(ContinuousExecution.scala:257)
org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution$$Lambda$5486/998380775.apply(Unknown Source)
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
org.apache.spark.sql.execution.SQLExecution$$$Lambda$2594/1008285636.apply(Unknown Source)
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.$anonfun$runContinuous$3(ContinuousExecution.scala:257)
org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution$$Lambda$5484/758437333.apply(Unknown Source)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.runContinuous(ContinuousExecution.scala:256)
org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.runActivatedStream(ContinuousExecution.scala:110)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:331)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:243)
== Sink ==
0:
1: [2] [3] [4]
2:
3:
4:
5: [6] [7] [5]
6:
7: [9] [8]
8:
9:
10:
11:
12:
13:
14:
15:
16:
17:
18:
19:
20:
21:
22:
23:
24:
25:
26:
27:
28:
29:
30:
31:
32:
33:
34:
35:
36:
37:
38:
39:
40:
41:
42:
43:
44:
45:
46:
47:
48:
49:
50:
51:
52:
53:
54:
55:
56:
57:
58:
59:
60:
61:
62:
63:
64:
65:
66:
67:
68:
== Plan ==
== Parsed Logical Plan ==
WriteToContinuousDataSource org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@489d61ca
+- SerializeFromObject [input[0, int, false] AS value#16151]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3244/1078487718@799e81dd, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#16150: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#16149: scala.Tuple2
+- Project [cast(key#16125 as string) AS key#16139, cast(value#16126 as string) AS value#16140]
+- StreamingDataSourceV2Relation [key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@7ba79dff, KafkaSource[Subscribe[topic-4]], {"topic-4":{"2":1,"4":0,"1":1,"3":0,"0":2}}
== Analyzed Logical Plan ==
WriteToContinuousDataSource org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@489d61ca
+- SerializeFromObject [input[0, int, false] AS value#16151]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3244/1078487718@799e81dd, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#16150: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#16149: scala.Tuple2
+- Project [cast(key#16125 as string) AS key#16139, cast(value#16126 as string) AS value#16140]
+- StreamingDataSourceV2Relation [key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@7ba79dff, KafkaSource[Subscribe[topic-4]], {"topic-4":{"2":1,"4":0,"1":1,"3":0,"0":2}}
== Optimized Logical Plan ==
WriteToContinuousDataSource org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@489d61ca
+- SerializeFromObject [input[0, int, false] AS value#16151]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3244/1078487718@799e81dd, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#16150: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#16149: scala.Tuple2
+- Project [cast(key#16125 as string) AS key#16139, cast(value#16126 as string) AS value#16140]
+- StreamingDataSourceV2Relation [key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@7ba79dff, KafkaSource[Subscribe[topic-4]], {"topic-4":{"2":1,"4":0,"1":1,"3":0,"0":2}}
== Physical Plan ==
WriteToContinuousDataSource org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@489d61ca
+- *(1) SerializeFromObject [input[0, int, false] AS value#16151]
+- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceSuiteBase$$Lambda$3244/1078487718@799e81dd, obj#16150: int
+- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#16149: scala.Tuple2
+- *(1) Project [cast(key#16125 as string) AS key#16139, cast(value#16126 as string) AS value#16140]
+- *(1) Project [key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131]
+- *(1) ContinuousScan[key#16125, value#16126, topic#16127, partition#16128, offset#16129L, timestamp#16130, timestampType#16131] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan
at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
at org.scalatest.Assertions.fail(Assertions.scala:1089)
at org.scalatest.Assertions.fail$(Assertions.scala:1085)
at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:443)
at org.apache.spark.sql.streaming.StreamTest.executeAction$1(StreamTest.scala:645)
at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$56(StreamTest.scala:769)
at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$56$adapted(StreamTest.scala:756)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:756)
at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:755)
at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:325)
at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaMicroBatchSourceSuite.scala:49)
at org.apache.spark.sql.kafka010.KafkaSourceSuiteBase.testFromLatestOffsets(KafkaMicroBatchSourceSuite.scala:1474)
at org.apache.spark.sql.kafka010.KafkaSourceSuiteBase.$anonfun$new$117(KafkaMicroBatchSourceSuite.scala:1213)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
at sbt.ForkMain$Run$2.call(ForkMain.java:296)
at sbt.ForkMain$Run$2.call(ForkMain.java:286)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)