== Results ==
!== Correct Answer - 94 == == Spark Answer - 90 ==
 struct<value:int> struct<value:int>
 [10] [10]
 [11] [11]
 [12] [12]
 [13] [13]
 [14] [14]
 [15] [15]
 [16] [16]
 [17] [17]
 [18] [18]
 [19] [19]
 [1] [1]
 [20] [20]
 [21] [21]
 [22] [22]
 [23] [23]
 [24] [24]
 [25] [25]
 [26] [26]
 [27] [27]
 [28] [28]
 [29] [29]
 [2] [2]
 [30] [30]
 [31] [31]
 [32] [32]
 [33] [33]
 [34] [34]
 [35] [35]
 [36] [36]
 [37] [37]
 [38] [38]
 [39] [39]
 [3] [3]
 [40] [40]
 [41] [41]
 [42] [42]
 [43] [43]
 [44] [44]
 [45] [45]
 [46] [46]
 [47] [47]
 [48] [48]
 [49] [49]
 [4] [4]
 [50] [50]
 [51] [51]
 [52] [52]
 [53] [53]
 [54] [54]
 [55] [55]
 [56] [56]
 [57] [57]
 [58] [58]
 [59] [59]
 [5] [5]
 [60] [60]
 [61] [61]
 [62] [62]
 [63] [63]
 [64] [64]
 [65] [65]
 [66] [66]
 [67] [67]
 [68] [68]
 [69] [69]
 [6] [6]
 [70] [70]
 [71] [71]
 [72] [72]
![73] [77]
![74] [78]
![75] [79]
![76] [7]
![77] [80]
![78] [81]
![79] [82]
![7] [83]
![80] [84]
![81] [85]
![82] [86]
![83] [87]
![84] [88]
![85] [89]
![86] [8]
![87] [90]
![88] [91]
![89] [92]
![8] [93]
![90] [94]
![91] [9]
![92] 
![93] 
![94] 
![9] 
 

== Progress ==
 AssertOnQuery(<condition>, )
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 0 until 7, message = Delete topic stress3)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7]
 StopStream
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 7 until 11, message = )
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 11 until 18, message = )
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@a6bdcee,Map(),null)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18]
 StopStream
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 18 until 25, message = )
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 25 until 29, message = Add partition)
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 29 until 34, message = Add partition)
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7d4c00f5,Map(),null)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34]
 StopStream
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 34 until 36, message = Add partition)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 36 until 38, message = Add topic stress7)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 38 until 40, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 40 until 41, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range 41 until 47, message = Add topic stress9)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range 47 until 54, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range 54 until 61, message = )
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7539d715,Map(),null)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61]
 StopStream
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 61 until 69, message = Add topic stress11)
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@3be828f6,Map(),null)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 69 until 72, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 72 until 76, message = Add topic stress13)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 76 until 85, message = Delete topic stress12)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 85 until 86, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 86 until 93, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 93 until 94, message = )
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = empty Range 94 until 94, message = Add topic stress15)
=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94]
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94]
 StopStream
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@35761a0f,Map(),null)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94]
 StopStream
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 94 until 100, message = )
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 100 until 106, message = )
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10, stress16), data = Range 106 until 115, message = Add topic stress17)
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10, stress16), data = Range 115 until 116, message = )
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress18, stress10, stress16), data = Range 116 until 124, message = Add topic stress19)
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress18, stress10, stress16), data = Range 124 until 133, message = )
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7e7d3098,Map(),null)
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress18, stress10, stress16), data = empty Range 133 until 133, message = )
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress18, stress10, stress16), data = Range 133 until 134, message = )
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress18, stress10, stress16), data = Range 134 until 143, message = Add topic stress21)
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress18, stress22, stress10, stress16), data = Range 143 until 145, message = Add topic stress23)
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress22, stress10, stress16), data = Range 145 until 152, message = Delete topic stress18)
 AddKafkaData(topics = Set(stress14, stress24, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress22, stress10, stress16), data = Range 152 until 153, message = Add topic stress25)
 AddKafkaData(topics = Set(stress14, stress24, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress22, stress10, stress16), data = Range 153 until 159, message = )
 AddKafkaData(topics = Set(stress14, stress24, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress26, stress22, stress10, stress16), data = Range 159 until 165, message = Add topic stress27)
 AddKafkaData(topics = Set(stress14, stress24, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress26, stress22, stress10, stress16), data = Range 165 until 172, message = )
 AddKafkaData(topics = Set(stress14, stress24, stress4, stress28, stress6, stress2, stress20, stress8, stress1, stress5, stress26, stress22, stress10, stress16), data = Range 172 until 178, message = Add topic stress29)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143],[144],[145],[146],[147],[148],[149],[150],[151],[152],[153],[154],[155],[156],[157],[158],[159],[160],[161],[162],[163],[164],[165],[166],[167],[168],[169],[170],[171],[172],[173],[174],[175],[176],[177],[178]

== Stream ==
Output Mode: Append
Stream state: {KafkaV2[SubscribePattern[stress.*]]: {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10},"stress14":{"0":0}}}
Thread state: alive
Thread stack trace: app//org.apache.log4j.helpers.PatternConverter.format(PatternConverter.java:65)
app//org.apache.log4j.PatternLayout.format(PatternLayout.java:506)
app//org.apache.log4j.WriterAppender.subAppend(WriterAppender.java:310)
app//org.apache.log4j.WriterAppender.append(WriterAppender.java:162)
app//org.apache.log4j.AppenderSkeleton.doAppend(AppenderSkeleton.java:251)
app//org.apache.log4j.helpers.AppenderAttachableImpl.appendLoopOnAppenders(AppenderAttachableImpl.java:66)
app//org.apache.log4j.Category.callAppenders(Category.java:206)
app//org.apache.log4j.Category.forcedLog(Category.java:391)
app//org.apache.log4j.Category.log(Category.java:856)
app//org.slf4j.impl.Log4jLoggerAdapter.log(Log4jLoggerAdapter.java:581)
app//org.apache.kafka.common.utils.LogContext$LocationAwareKafkaLogger.writeLog(LogContext.java:434)
app//org.apache.kafka.common.utils.LogContext$LocationAwareKafkaLogger.info(LogContext.java:387)
app//org.apache.kafka.clients.consumer.internals.Fetcher.resetOffsetIfNeeded(Fetcher.java:584)
app//org.apache.kafka.clients.consumer.internals.Fetcher.access$2200(Fetcher.java:110)
app//org.apache.kafka.clients.consumer.internals.Fetcher$2.onSuccess(Fetcher.java:615)
app//org.apache.kafka.clients.consumer.internals.Fetcher$2.onSuccess(Fetcher.java:603)
app//org.apache.kafka.clients.consumer.internals.RequestFuture.fireSuccess(RequestFuture.java:167)
app//org.apache.kafka.clients.consumer.internals.RequestFuture.complete(RequestFuture.java:127)
app//org.apache.kafka.clients.consumer.internals.Fetcher.handleListOffsetResponse(Fetcher.java:835)
app//org.apache.kafka.clients.consumer.internals.Fetcher.access$2400(Fetcher.java:110)
app//org.apache.kafka.clients.consumer.internals.Fetcher$4.onSuccess(Fetcher.java:750)
app//org.apache.kafka.clients.consumer.internals.Fetcher$4.onSuccess(Fetcher.java:745)
app//org.apache.kafka.clients.consumer.internals.RequestFuture$1.onSuccess(RequestFuture.java:204)
app//org.apache.kafka.clients.consumer.internals.RequestFuture.fireSuccess(RequestFuture.java:167)
app//org.apache.kafka.clients.consumer.internals.RequestFuture.complete(RequestFuture.java:127)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient$RequestFutureCompletionHandler.fireCompletion(ConsumerNetworkClient.java:575)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.firePendingCompletedRequests(ConsumerNetworkClient.java:389)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:297)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:236)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:227)
app//org.apache.kafka.clients.consumer.KafkaConsumer.position(KafkaConsumer.java:1668)
app//org.apache.kafka.clients.consumer.KafkaConsumer.position(KafkaConsumer.java:1626)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$fetchLatestOffsets$10(KafkaOffsetReader.scala:261)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$2455/0x0000000800f5b040.apply(Unknown Source)
app//scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
app//scala.collection.TraversableLike$$Lambda$76/0x000000080016d840.apply(Unknown Source)
app//scala.collection.Iterator.foreach(Iterator.scala:941)
app//scala.collection.Iterator.foreach$(Iterator.scala:941)
app//scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
app//scala.collection.IterableLike.foreach(IterableLike.scala:74)
app//scala.collection.IterableLike.foreach$(IterableLike.scala:73)
app//scala.collection.AbstractIterable.foreach(Iterable.scala:56)
app//scala.collection.TraversableLike.map(TraversableLike.scala:237)
app//scala.collection.TraversableLike.map$(TraversableLike.scala:230)
app//scala.collection.mutable.AbstractSet.scala$collection$SetLike$$super$map(Set.scala:48)
app//scala.collection.SetLike.map(SetLike.scala:104)
app//scala.collection.SetLike.map$(SetLike.scala:104)
app//scala.collection.mutable.AbstractSet.map(Set.scala:48)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$fetchLatestOffsets$2(KafkaOffsetReader.scala:261)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$2319/0x0000000800ef4840.apply(Unknown Source)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$withRetriesWithoutInterrupt$1(KafkaOffsetReader.scala:358)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$2320/0x0000000800ef4c40.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
app//org.apache.spark.util.UninterruptibleThread.runUninterruptibly(UninterruptibleThread.scala:77)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.withRetriesWithoutInterrupt(KafkaOffsetReader.scala:357)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$fetchLatestOffsets$1(KafkaOffsetReader.scala:215)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$2318/0x0000000800ef4440.apply(Unknown Source)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.runUninterruptibly(KafkaOffsetReader.scala:325)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.fetchLatestOffsets(KafkaOffsetReader.scala:215)
app//org.apache.spark.sql.kafka010.KafkaMicroBatchStream.latestOffset(KafkaMicroBatchStream.scala:86)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$4(MicroBatchExecution.scala:366)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2310/0x0000000800ef0440.apply(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$2(MicroBatchExecution.scala:362)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2309/0x0000000800ef0840.apply(Unknown Source)
app//scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
app//scala.collection.TraversableLike$$Lambda$76/0x000000080016d840.apply(Unknown Source)
app//scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
app//scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
app//scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
app//scala.collection.TraversableLike.map(TraversableLike.scala:237)
app//scala.collection.TraversableLike.map$(TraversableLike.scala:230)
app//scala.collection.AbstractTraversable.map(Traversable.scala:108)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$1(MicroBatchExecution.scala:354)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2308/0x0000000800eef440.apply$mcZ$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.withProgressLocked(MicroBatchExecution.scala:577)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.constructNextBatch(MicroBatchExecution.scala:350)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:195)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2297/0x0000000800eeb840.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:178)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2295/0x0000000800eea440.apply$mcZ$sp(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:172)
app//org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:331)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:243)


== Sink ==
0: 
1: [1] [3] [5] [7] [2] [4] [6]
2: [8] [9] [10] [11] [12] [14] [16] [18] [13] [15] [17]
3: [33] [34] [30] [28] [26] [29] [31] [19] [21] [23] [25] [32] [27] [20] [22] [24]
4: [35] [58] [36] [47] [49] [59] [55] [41] [60] [44] [52] [40] [56] [38] [43] [51] [37] [57] [45] [53] [46] [48] [54] [61] [42] [50] [39]
5: [65] [66] [63] [68] [62] [67] [64] [69]
6: [71] [70]
7: [72]
8: [79] [78] [77]
9: [82] [85] [84] [80] [81] [83]
10: [86]
11: [87] [88]
12: [92] [89] [93] [91] [90]
13: [94]
14: 


== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#10142]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5790/0x0000000800a22840@56289ad5, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10141: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#10140: scala.Tuple2
 +- Project [cast(key#10116 as string) AS key#10130, cast(value#10117 as string) AS value#10131]
 +- StreamingDataSourceV2Relation [key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@34774427, KafkaV2[SubscribePattern[stress.*]], {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10}}, {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10},"stress14":{"0":0}}

== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#10142]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5790/0x0000000800a22840@56289ad5, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10141: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#10140: scala.Tuple2
 +- Project [cast(key#10116 as string) AS key#10130, cast(value#10117 as string) AS value#10131]
 +- StreamingDataSourceV2Relation [key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@34774427, KafkaV2[SubscribePattern[stress.*]], {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10}}, {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10},"stress14":{"0":0}}

== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#10142]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5790/0x0000000800a22840@56289ad5, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10141: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#10140: scala.Tuple2
 +- Project [cast(key#10116 as string) AS key#10130, cast(value#10117 as string) AS value#10131]
 +- StreamingDataSourceV2Relation [key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@34774427, KafkaV2[SubscribePattern[stress.*]], {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10}}, {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10},"stress14":{"0":0}}

== Physical Plan ==
*(1) SerializeFromObject [input[0, int, false] AS value#10142]
+- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5790/0x0000000800a22840@56289ad5, obj#10141: int
 +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#10140: scala.Tuple2
 +- *(1) Project [cast(key#10116 as string) AS key#10130, cast(value#10117 as string) AS value#10131]
 +- *(1) Project [key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122]
 +- *(1) MicroBatchScan[key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan

 

org.scalatest.exceptions.TestFailedException:
== Results ==
!== Correct Answer - 94 == == Spark Answer - 90 ==
struct<value:int> struct<value:int>
[10] [10]
[11] [11]
[12] [12]
[13] [13]
[14] [14]
[15] [15]
[16] [16]
[17] [17]
[18] [18]
[19] [19]
[1] [1]
[20] [20]
[21] [21]
[22] [22]
[23] [23]
[24] [24]
[25] [25]
[26] [26]
[27] [27]
[28] [28]
[29] [29]
[2] [2]
[30] [30]
[31] [31]
[32] [32]
[33] [33]
[34] [34]
[35] [35]
[36] [36]
[37] [37]
[38] [38]
[39] [39]
[3] [3]
[40] [40]
[41] [41]
[42] [42]
[43] [43]
[44] [44]
[45] [45]
[46] [46]
[47] [47]
[48] [48]
[49] [49]
[4] [4]
[50] [50]
[51] [51]
[52] [52]
[53] [53]
[54] [54]
[55] [55]
[56] [56]
[57] [57]
[58] [58]
[59] [59]
[5] [5]
[60] [60]
[61] [61]
[62] [62]
[63] [63]
[64] [64]
[65] [65]
[66] [66]
[67] [67]
[68] [68]
[69] [69]
[6] [6]
[70] [70]
[71] [71]
[72] [72]
![73] [77]
![74] [78]
![75] [79]
![76] [7]
![77] [80]
![78] [81]
![79] [82]
![7] [83]
![80] [84]
![81] [85]
![82] [86]
![83] [87]
![84] [88]
![85] [89]
![86] [8]
![87] [90]
![88] [91]
![89] [92]
![8] [93]
![90] [94]
![91] [9]
![92]
![93]
![94]
![9]
== Progress ==
AssertOnQuery(<condition>, )
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 0 until 7, message = Delete topic stress3)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7]
StopStream
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 7 until 11, message = )
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 11 until 18, message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@a6bdcee,Map(),null)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18]
StopStream
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 18 until 25, message = )
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 25 until 29, message = Add partition)
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 29 until 34, message = Add partition)
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7d4c00f5,Map(),null)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34]
StopStream
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 34 until 36, message = Add partition)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 36 until 38, message = Add topic stress7)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 38 until 40, message = )
AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 40 until 41, message = )
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range 41 until 47, message = Add topic stress9)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range 47 until 54, message = )
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range 54 until 61, message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7539d715,Map(),null)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61]
StopStream
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 61 until 69, message = Add topic stress11)
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@3be828f6,Map(),null)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 69 until 72, message = )
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 72 until 76, message = Add topic stress13)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 76 until 85, message = Delete topic stress12)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 85 until 86, message = )
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 86 until 93, message = )
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 93 until 94, message = )
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = empty Range 94 until 94, message = Add topic stress15)
=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94]
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94]
StopStream
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@35761a0f,Map(),null)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94]
StopStream
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 94 until 100, message = )
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 100 until 106, message = )
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10, stress16), data = Range 106 until 115, message = Add topic stress17)
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress10, stress16), data = Range 115 until 116, message = )
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress18, stress10, stress16), data = Range 116 until 124, message = Add topic stress19)
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress18, stress10, stress16), data = Range 124 until 133, message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7e7d3098,Map(),null)
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress18, stress10, stress16), data = empty Range 133 until 133, message = )
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress8, stress1, stress5, stress18, stress10, stress16), data = Range 133 until 134, message = )
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress18, stress10, stress16), data = Range 134 until 143, message = Add topic stress21)
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress18, stress22, stress10, stress16), data = Range 143 until 145, message = Add topic stress23)
AddKafkaData(topics = Set(stress14, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress22, stress10, stress16), data = Range 145 until 152, message = Delete topic stress18)
AddKafkaData(topics = Set(stress14, stress24, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress22, stress10, stress16), data = Range 152 until 153, message = Add topic stress25)
AddKafkaData(topics = Set(stress14, stress24, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress22, stress10, stress16), data = Range 153 until 159, message = )
AddKafkaData(topics = Set(stress14, stress24, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress26, stress22, stress10, stress16), data = Range 159 until 165, message = Add topic stress27)
AddKafkaData(topics = Set(stress14, stress24, stress4, stress6, stress2, stress20, stress8, stress1, stress5, stress26, stress22, stress10, stress16), data = Range 165 until 172, message = )
AddKafkaData(topics = Set(stress14, stress24, stress4, stress28, stress6, stress2, stress20, stress8, stress1, stress5, stress26, stress22, stress10, stress16), data = Range 172 until 178, message = Add topic stress29)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143],[144],[145],[146],[147],[148],[149],[150],[151],[152],[153],[154],[155],[156],[157],[158],[159],[160],[161],[162],[163],[164],[165],[166],[167],[168],[169],[170],[171],[172],[173],[174],[175],[176],[177],[178]
== Stream ==
Output Mode: Append
Stream state: {KafkaV2[SubscribePattern[stress.*]]: {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10},"stress14":{"0":0}}}
Thread state: alive
Thread stack trace: app//org.apache.log4j.helpers.PatternConverter.format(PatternConverter.java:65)
app//org.apache.log4j.PatternLayout.format(PatternLayout.java:506)
app//org.apache.log4j.WriterAppender.subAppend(WriterAppender.java:310)
app//org.apache.log4j.WriterAppender.append(WriterAppender.java:162)
app//org.apache.log4j.AppenderSkeleton.doAppend(AppenderSkeleton.java:251)
app//org.apache.log4j.helpers.AppenderAttachableImpl.appendLoopOnAppenders(AppenderAttachableImpl.java:66)
app//org.apache.log4j.Category.callAppenders(Category.java:206)
app//org.apache.log4j.Category.forcedLog(Category.java:391)
app//org.apache.log4j.Category.log(Category.java:856)
app//org.slf4j.impl.Log4jLoggerAdapter.log(Log4jLoggerAdapter.java:581)
app//org.apache.kafka.common.utils.LogContext$LocationAwareKafkaLogger.writeLog(LogContext.java:434)
app//org.apache.kafka.common.utils.LogContext$LocationAwareKafkaLogger.info(LogContext.java:387)
app//org.apache.kafka.clients.consumer.internals.Fetcher.resetOffsetIfNeeded(Fetcher.java:584)
app//org.apache.kafka.clients.consumer.internals.Fetcher.access$2200(Fetcher.java:110)
app//org.apache.kafka.clients.consumer.internals.Fetcher$2.onSuccess(Fetcher.java:615)
app//org.apache.kafka.clients.consumer.internals.Fetcher$2.onSuccess(Fetcher.java:603)
app//org.apache.kafka.clients.consumer.internals.RequestFuture.fireSuccess(RequestFuture.java:167)
app//org.apache.kafka.clients.consumer.internals.RequestFuture.complete(RequestFuture.java:127)
app//org.apache.kafka.clients.consumer.internals.Fetcher.handleListOffsetResponse(Fetcher.java:835)
app//org.apache.kafka.clients.consumer.internals.Fetcher.access$2400(Fetcher.java:110)
app//org.apache.kafka.clients.consumer.internals.Fetcher$4.onSuccess(Fetcher.java:750)
app//org.apache.kafka.clients.consumer.internals.Fetcher$4.onSuccess(Fetcher.java:745)
app//org.apache.kafka.clients.consumer.internals.RequestFuture$1.onSuccess(RequestFuture.java:204)
app//org.apache.kafka.clients.consumer.internals.RequestFuture.fireSuccess(RequestFuture.java:167)
app//org.apache.kafka.clients.consumer.internals.RequestFuture.complete(RequestFuture.java:127)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient$RequestFutureCompletionHandler.fireCompletion(ConsumerNetworkClient.java:575)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.firePendingCompletedRequests(ConsumerNetworkClient.java:389)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:297)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:236)
app//org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(ConsumerNetworkClient.java:227)
app//org.apache.kafka.clients.consumer.KafkaConsumer.position(KafkaConsumer.java:1668)
app//org.apache.kafka.clients.consumer.KafkaConsumer.position(KafkaConsumer.java:1626)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$fetchLatestOffsets$10(KafkaOffsetReader.scala:261)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$2455/0x0000000800f5b040.apply(Unknown Source)
app//scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
app//scala.collection.TraversableLike$$Lambda$76/0x000000080016d840.apply(Unknown Source)
app//scala.collection.Iterator.foreach(Iterator.scala:941)
app//scala.collection.Iterator.foreach$(Iterator.scala:941)
app//scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
app//scala.collection.IterableLike.foreach(IterableLike.scala:74)
app//scala.collection.IterableLike.foreach$(IterableLike.scala:73)
app//scala.collection.AbstractIterable.foreach(Iterable.scala:56)
app//scala.collection.TraversableLike.map(TraversableLike.scala:237)
app//scala.collection.TraversableLike.map$(TraversableLike.scala:230)
app//scala.collection.mutable.AbstractSet.scala$collection$SetLike$$super$map(Set.scala:48)
app//scala.collection.SetLike.map(SetLike.scala:104)
app//scala.collection.SetLike.map$(SetLike.scala:104)
app//scala.collection.mutable.AbstractSet.map(Set.scala:48)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$fetchLatestOffsets$2(KafkaOffsetReader.scala:261)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$2319/0x0000000800ef4840.apply(Unknown Source)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$withRetriesWithoutInterrupt$1(KafkaOffsetReader.scala:358)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$2320/0x0000000800ef4c40.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
app//org.apache.spark.util.UninterruptibleThread.runUninterruptibly(UninterruptibleThread.scala:77)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.withRetriesWithoutInterrupt(KafkaOffsetReader.scala:357)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$fetchLatestOffsets$1(KafkaOffsetReader.scala:215)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader$$Lambda$2318/0x0000000800ef4440.apply(Unknown Source)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.runUninterruptibly(KafkaOffsetReader.scala:325)
app//org.apache.spark.sql.kafka010.KafkaOffsetReader.fetchLatestOffsets(KafkaOffsetReader.scala:215)
app//org.apache.spark.sql.kafka010.KafkaMicroBatchStream.latestOffset(KafkaMicroBatchStream.scala:86)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$4(MicroBatchExecution.scala:366)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2310/0x0000000800ef0440.apply(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$2(MicroBatchExecution.scala:362)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2309/0x0000000800ef0840.apply(Unknown Source)
app//scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
app//scala.collection.TraversableLike$$Lambda$76/0x000000080016d840.apply(Unknown Source)
app//scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
app//scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
app//scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
app//scala.collection.TraversableLike.map(TraversableLike.scala:237)
app//scala.collection.TraversableLike.map$(TraversableLike.scala:230)
app//scala.collection.AbstractTraversable.map(Traversable.scala:108)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$constructNextBatch$1(MicroBatchExecution.scala:354)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2308/0x0000000800eef440.apply$mcZ$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.withProgressLocked(MicroBatchExecution.scala:577)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.constructNextBatch(MicroBatchExecution.scala:350)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:195)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2297/0x0000000800eeb840.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:178)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2295/0x0000000800eea440.apply$mcZ$sp(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:172)
app//org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:331)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:243)
== Sink ==
0:
1: [1] [3] [5] [7] [2] [4] [6]
2: [8] [9] [10] [11] [12] [14] [16] [18] [13] [15] [17]
3: [33] [34] [30] [28] [26] [29] [31] [19] [21] [23] [25] [32] [27] [20] [22] [24]
4: [35] [58] [36] [47] [49] [59] [55] [41] [60] [44] [52] [40] [56] [38] [43] [51] [37] [57] [45] [53] [46] [48] [54] [61] [42] [50] [39]
5: [65] [66] [63] [68] [62] [67] [64] [69]
6: [71] [70]
7: [72]
8: [79] [78] [77]
9: [82] [85] [84] [80] [81] [83]
10: [86]
11: [87] [88]
12: [92] [89] [93] [91] [90]
13: [94]
14:
== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#10142]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5790/0x0000000800a22840@56289ad5, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10141: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#10140: scala.Tuple2
+- Project [cast(key#10116 as string) AS key#10130, cast(value#10117 as string) AS value#10131]
+- StreamingDataSourceV2Relation [key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@34774427, KafkaV2[SubscribePattern[stress.*]], {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10}}, {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10},"stress14":{"0":0}}
== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#10142]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5790/0x0000000800a22840@56289ad5, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10141: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#10140: scala.Tuple2
+- Project [cast(key#10116 as string) AS key#10130, cast(value#10117 as string) AS value#10131]
+- StreamingDataSourceV2Relation [key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@34774427, KafkaV2[SubscribePattern[stress.*]], {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10}}, {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10},"stress14":{"0":0}}
== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#10142]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5790/0x0000000800a22840@56289ad5, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10141: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#10140: scala.Tuple2
+- Project [cast(key#10116 as string) AS key#10130, cast(value#10117 as string) AS value#10131]
+- StreamingDataSourceV2Relation [key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@34774427, KafkaV2[SubscribePattern[stress.*]], {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10}}, {"stress5":{"8":0,"11":0,"2":1,"5":0,"14":1,"13":1,"4":1,"7":0,"1":1,"10":0,"9":0,"3":1,"12":0,"15":0,"6":0,"0":1},"stress8":{"2":4,"5":5,"4":3,"1":4,"3":4,"0":3},"stress10":{"2":2,"4":2,"1":1,"3":2,"0":1},"stress1":{"8":0,"11":1,"2":1,"5":1,"14":1,"13":1,"4":0,"7":1,"1":15,"10":0,"9":0,"3":0,"12":1,"15":0,"6":2,"0":14},"stress4":{"8":1,"2":4,"5":1,"4":2,"7":1,"1":2,"9":1,"3":3,"6":1,"0":1},"stress6":{"2":1,"1":0,"3":0,"0":0},"stress2":{"8":2,"2":1,"5":0,"4":0,"7":0,"10":0,"1":1,"9":1,"3":1,"6":0,"0":10},"stress14":{"0":0}}
== Physical Plan ==
*(1) SerializeFromObject [input[0, int, false] AS value#10142]
+- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5790/0x0000000800a22840@56289ad5, obj#10141: int
+- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#10140: scala.Tuple2
+- *(1) Project [cast(key#10116 as string) AS key#10130, cast(value#10117 as string) AS value#10131]
+- *(1) Project [key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122]
+- *(1) MicroBatchScan[key#10116, value#10117, topic#10118, partition#10119, offset#10120L, timestamp#10121, timestampType#10122] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan
at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
at org.scalatest.Assertions.fail(Assertions.scala:1089)
at org.scalatest.Assertions.fail$(Assertions.scala:1085)
at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:449)
at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$53(StreamTest.scala:730)
at scala.Option.foreach(Option.scala:274)
at org.apache.spark.sql.streaming.StreamTest.executeAction$1(StreamTest.scala:730)
at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$56(StreamTest.scala:775)
at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$56$adapted(StreamTest.scala:762)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:762)
at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:761)
at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:328)
at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaMicroBatchSourceSuite.scala:49)
at org.apache.spark.sql.streaming.StreamTest.runStressTest(StreamTest.scala:873)
at org.apache.spark.sql.streaming.StreamTest.runStressTest$(StreamTest.scala:825)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runStressTest(KafkaMicroBatchSourceSuite.scala:49)
at org.apache.spark.sql.kafka010.KafkaSourceStressSuite.$anonfun$new$136(KafkaMicroBatchSourceSuite.scala:1593)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:105)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaMicroBatchSourceSuite.scala:49)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaMicroBatchSourceSuite.scala:49)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:54)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:54)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1144)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.main(Runner.scala:827)
at org.scalatest.tools.Runner.main(Runner.scala)