== Results ==
!== Correct Answer - 123 == == Spark Answer - 119 ==
 struct<value:int> struct<value:int>
 [100] [100]
 [101] [101]
 [102] [102]
 [103] [103]
 [104] [104]
 [105] [105]
 [106] [106]
![107] [10]
![108] [111]
![109] [112]
![10] [113]
![110] [114]
![111] [115]
![112] [116]
![113] [117]
![114] [118]
![115] [119]
![116] [11]
![117] [120]
![118] [121]
![119] [122]
![11] [123]
![120] [12]
![121] [13]
![122] [14]
![123] [15]
![12] [16]
![13] [17]
![14] [18]
![15] [19]
![16] [1]
![17] [20]
![18] [21]
![19] [22]
![1] [23]
![20] [24]
![21] [25]
![22] [26]
![23] [27]
![24] [28]
![25] [29]
![26] [2]
![27] [30]
![28] [31]
![29] [32]
![2] [33]
![30] [34]
![31] [35]
![32] [36]
![33] [37]
![34] [38]
![35] [39]
![36] [3]
![37] [40]
![38] [41]
![39] [42]
![3] [43]
![40] [44]
![41] [45]
![42] [46]
![43] [47]
![44] [48]
![45] [49]
![46] [4]
![47] [50]
![48] [51]
![49] [52]
![4] [53]
![50] [54]
![51] [55]
![52] [56]
![53] [57]
![54] [58]
![55] [59]
![56] [5]
![57] [60]
![58] [61]
![59] [62]
![5] [63]
![60] [64]
![61] [65]
![62] [66]
![63] [67]
![64] [68]
![65] [69]
![66] [6]
![67] [70]
![68] [71]
![69] [72]
![6] [73]
![70] [74]
![71] [75]
![72] [76]
![73] [77]
![74] [78]
![75] [79]
![76] [7]
![77] [80]
![78] [81]
![79] [82]
![7] [83]
![80] [84]
![81] [85]
![82] [86]
![83] [87]
![84] [88]
![85] [89]
![86] [8]
![87] [90]
![88] [91]
![89] [92]
![8] [93]
![90] [94]
![91] [95]
![92] [96]
![93] [97]
![94] [98]
![95] [99]
![96] [9]
![97] 
![98] 
![99] 
![9] 
 

== Progress ==
 AssertOnQuery(<condition>, )
 CheckAnswer: 
 StopStream
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4fa9d5a9,Map(),null)
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 0 until 6, message = Delete topic stress3)
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 6 until 14, message = )
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14]
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 14 until 17, message = )
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 17 until 21, message = Add partition)
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 21 until 23, message = Add partition)
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 23 until 29, message = )
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29]
 StopStream
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 29 until 36, message = )
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 36 until 39, message = )
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4bf43578,Map(),null)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39]
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39]
 StopStream
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 39 until 42, message = Add partition)
 AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 42 until 44, message = )
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4816fd55,Map(),null)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44]
 StopStream
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4cebf528,Map(),null)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 44 until 51, message = Add topic stress7)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 51 until 57, message = )
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57]
 StopStream
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = empty Range 57 until 57, message = Add topic stress9)
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4ac32df7,Map(),null)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57]
 StopStream
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4926f297,Map(),null)
 AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 57 until 64, message = Add topic stress11)
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 64 until 73, message = Add topic stress13)
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 73 until 74, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 74 until 81, message = )
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81]
 StopStream
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4d376558,Map(),null)
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 81 until 86, message = Add partition)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86]
 StopStream
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 86 until 88, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 88 until 90, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 90 until 99, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 99 until 102, message = )
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@570ddfcf,Map(),null)
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 102 until 106, message = Add partition)
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 106 until 110, message = Add topic stress15)
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 110 until 116, message = Delete topic stress14)
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 116 until 118, message = Delete topic stress2)
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 118 until 123, message = Add partition)
=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123]
 StopStream
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 123 until 131, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 131 until 137, message = )
 AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 137 until 143, message = Add partition)
 StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4eef1763,Map(),null)
 CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143]

== Stream ==
Output Mode: Append
Stream state: {KafkaV2[SubscribePattern[stress.*]]: {"stress5":{"17":0,"8":0,"11":0,"20":0,"2":1,"5":0,"14":0,"13":0,"4":0,"22":0,"16":0,"7":1,"1":3,"10":0,"19":0,"18":0,"9":0,"21":0,"3":2,"12":0,"15":0,"6":1,"0":7},"stress8":{"8":0,"11":0,"2":3,"5":0,"4":0,"7":0,"1":2,"10":0,"9":0,"3":0,"12":0,"6":0,"0":2},"stress10":{"2":1,"4":0,"1":2,"3":1,"0":11},"stress1":{"17":0,"8":0,"11":1,"20":0,"2":1,"5":2,"14":2,"13":0,"4":1,"22":0,"16":0,"7":0,"1":1,"10":0,"19":0,"18":0,"9":2,"21":0,"3":1,"12":0,"15":0,"6":1,"0":13},"stress4":{"17":0,"8":0,"11":0,"20":0,"2":1,"5":0,"14":0,"13":0,"4":0,"16":0,"7":1,"1":2,"10":0,"19":0,"18":0,"9":0,"21":0,"3":1,"12":0,"15":0,"6":0,"0":2},"stress6":{"8":0,"2":3,"5":2,"4":4,"7":0,"10":0,"1":3,"9":0,"3":2,"6":0,"0":2},"stress12":{"8":0,"11":1,"2":2,"5":0,"4":2,"7":1,"10":0,"1":4,"9":1,"3":4,"6":2,"0":2}}}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:213)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2140/9614599.apply$mcZ$sp(Unknown Source)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:158)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:281)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:191)


== Sink ==
0: 
1: [1] [2] [3] [4] [5] [6]
2: [7]
3: [13] [8] [14] [12] [9] [11] [10]
4: [15]
5: [16] [17]
6: 
7: [20] [19] [18] [21]
8: 
9: [22] [23]
10: [24] [25]
11: [26] [28] [29] [27]
12: [36] [37] [32] [38] [31] [30] [39] [35] [34] [33]
13: [40] [44] [41] [43] [42]
14: [46] [45]
15: [48] [49] [50] [47] [51]
16: [52] [57] [56] [53] [54] [55]
17: 
18: [58]
19: [59] [62] [61] [64] [60] [63]
20: 
21: [65]
22: [66] [67] [68] [69] [70] [71] [72] [73]
23: [74]
24: [75]
25: [76] [80] [79] [78] [77] [81]
26: 
27: 
28: [86] [85] [83] [82] [84]
29: [94] [87] [102] [91] [97] [93] [99] [88] [89] [100] [95] [96] [90] [101] [92] [98]
30: 
31: 
32: [103] [104] [105] [106]
33: [112] [116] [111] [115] [114] [113]
34: [118] [117]
35: 
36: [120] [122] [121] [119] [123]
37: 


== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#10807]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5595/1955345580@35fe13b0, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10806: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#10805: scala.Tuple2
 +- Project [cast(key#10781 as string) AS key#10795, cast(value#10782 as string) AS value#10796]
 +- Project [key#11234 AS key#10781, value#11235 AS value#10782, topic#11236 AS topic#10783, partition#11237 AS partition#10784, offset#11238L AS offset#10785L, timestamp#11239 AS timestamp#10786, timestampType#11240 AS timestampType#10787]
 +- Streaming RelationV2 kafka[key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:37041,kafka.d...)

== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#10807]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5595/1955345580@35fe13b0, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10806: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#10805: scala.Tuple2
 +- Project [cast(key#10781 as string) AS key#10795, cast(value#10782 as string) AS value#10796]
 +- Project [key#11234 AS key#10781, value#11235 AS value#10782, topic#11236 AS topic#10783, partition#11237 AS partition#10784, offset#11238L AS offset#10785L, timestamp#11239 AS timestamp#10786, timestampType#11240 AS timestampType#10787]
 +- Streaming RelationV2 kafka[key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:37041,kafka.d...)

== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#10807]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5595/1955345580@35fe13b0, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10806: int
 +- DeserializeToObject newInstance(class scala.Tuple2), obj#10805: scala.Tuple2
 +- Project [cast(key#11234 as string) AS key#10795, cast(value#11235 as string) AS value#10796]
 +- Streaming RelationV2 kafka[key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:37041,kafka.d...)

== Physical Plan ==
*(1) SerializeFromObject [input[0, int, false] AS value#10807]
+- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5595/1955345580@35fe13b0, obj#10806: int
 +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#10805: scala.Tuple2
 +- *(1) Project [cast(key#11234 as string) AS key#10795, cast(value#11235 as string) AS value#10796]
 +- *(1) Project [key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240]
 +- *(1) ScanV2 kafka[key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:37041,kafka.d...)

 

org.scalatest.exceptions.TestFailedException:
== Results ==
!== Correct Answer - 123 == == Spark Answer - 119 ==
struct<value:int> struct<value:int>
[100] [100]
[101] [101]
[102] [102]
[103] [103]
[104] [104]
[105] [105]
[106] [106]
![107] [10]
![108] [111]
![109] [112]
![10] [113]
![110] [114]
![111] [115]
![112] [116]
![113] [117]
![114] [118]
![115] [119]
![116] [11]
![117] [120]
![118] [121]
![119] [122]
![11] [123]
![120] [12]
![121] [13]
![122] [14]
![123] [15]
![12] [16]
![13] [17]
![14] [18]
![15] [19]
![16] [1]
![17] [20]
![18] [21]
![19] [22]
![1] [23]
![20] [24]
![21] [25]
![22] [26]
![23] [27]
![24] [28]
![25] [29]
![26] [2]
![27] [30]
![28] [31]
![29] [32]
![2] [33]
![30] [34]
![31] [35]
![32] [36]
![33] [37]
![34] [38]
![35] [39]
![36] [3]
![37] [40]
![38] [41]
![39] [42]
![3] [43]
![40] [44]
![41] [45]
![42] [46]
![43] [47]
![44] [48]
![45] [49]
![46] [4]
![47] [50]
![48] [51]
![49] [52]
![4] [53]
![50] [54]
![51] [55]
![52] [56]
![53] [57]
![54] [58]
![55] [59]
![56] [5]
![57] [60]
![58] [61]
![59] [62]
![5] [63]
![60] [64]
![61] [65]
![62] [66]
![63] [67]
![64] [68]
![65] [69]
![66] [6]
![67] [70]
![68] [71]
![69] [72]
![6] [73]
![70] [74]
![71] [75]
![72] [76]
![73] [77]
![74] [78]
![75] [79]
![76] [7]
![77] [80]
![78] [81]
![79] [82]
![7] [83]
![80] [84]
![81] [85]
![82] [86]
![83] [87]
![84] [88]
![85] [89]
![86] [8]
![87] [90]
![88] [91]
![89] [92]
![8] [93]
![90] [94]
![91] [95]
![92] [96]
![93] [97]
![94] [98]
![95] [99]
![96] [9]
![97]
![98]
![99]
![9]
== Progress ==
AssertOnQuery(<condition>, )
CheckAnswer:
StopStream
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4fa9d5a9,Map(),null)
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 0 until 6, message = Delete topic stress3)
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 6 until 14, message = )
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14]
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 14 until 17, message = )
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17]
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 17 until 21, message = Add partition)
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 21 until 23, message = Add partition)
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 23 until 29, message = )
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29]
StopStream
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 29 until 36, message = )
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 36 until 39, message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4bf43578,Map(),null)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39]
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39]
StopStream
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 39 until 42, message = Add partition)
AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range 42 until 44, message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4816fd55,Map(),null)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44]
StopStream
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4cebf528,Map(),null)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 44 until 51, message = Add topic stress7)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range 51 until 57, message = )
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57]
StopStream
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = empty Range 57 until 57, message = Add topic stress9)
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4ac32df7,Map(),null)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57]
StopStream
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4926f297,Map(),null)
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range 57 until 64, message = Add topic stress11)
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 64 until 73, message = Add topic stress13)
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 73 until 74, message = )
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 74 until 81, message = )
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81]
StopStream
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4d376558,Map(),null)
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 81 until 86, message = Add partition)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86]
StopStream
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 86 until 88, message = )
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 88 until 90, message = )
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 90 until 99, message = )
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 99 until 102, message = )
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@570ddfcf,Map(),null)
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 102 until 106, message = Add partition)
AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 106 until 110, message = Add topic stress15)
AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range 110 until 116, message = Delete topic stress14)
AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 116 until 118, message = Delete topic stress2)
AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 118 until 123, message = Add partition)
=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123]
StopStream
AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 123 until 131, message = )
AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 131 until 137, message = )
AddKafkaData(topics = Set(stress4, stress6, stress12, stress8, stress1, stress5, stress10), data = Range 137 until 143, message = Add partition)
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4eef1763,Map(),null)
CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143]
== Stream ==
Output Mode: Append
Stream state: {KafkaV2[SubscribePattern[stress.*]]: {"stress5":{"17":0,"8":0,"11":0,"20":0,"2":1,"5":0,"14":0,"13":0,"4":0,"22":0,"16":0,"7":1,"1":3,"10":0,"19":0,"18":0,"9":0,"21":0,"3":2,"12":0,"15":0,"6":1,"0":7},"stress8":{"8":0,"11":0,"2":3,"5":0,"4":0,"7":0,"1":2,"10":0,"9":0,"3":0,"12":0,"6":0,"0":2},"stress10":{"2":1,"4":0,"1":2,"3":1,"0":11},"stress1":{"17":0,"8":0,"11":1,"20":0,"2":1,"5":2,"14":2,"13":0,"4":1,"22":0,"16":0,"7":0,"1":1,"10":0,"19":0,"18":0,"9":2,"21":0,"3":1,"12":0,"15":0,"6":1,"0":13},"stress4":{"17":0,"8":0,"11":0,"20":0,"2":1,"5":0,"14":0,"13":0,"4":0,"16":0,"7":1,"1":2,"10":0,"19":0,"18":0,"9":0,"21":0,"3":1,"12":0,"15":0,"6":0,"0":2},"stress6":{"8":0,"2":3,"5":2,"4":4,"7":0,"10":0,"1":3,"9":0,"3":2,"6":0,"0":2},"stress12":{"8":0,"11":1,"2":2,"5":0,"4":2,"7":1,"10":0,"1":4,"9":1,"3":4,"6":2,"0":2}}}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:213)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2140/9614599.apply$mcZ$sp(Unknown Source)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:158)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:281)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:191)
== Sink ==
0:
1: [1] [2] [3] [4] [5] [6]
2: [7]
3: [13] [8] [14] [12] [9] [11] [10]
4: [15]
5: [16] [17]
6:
7: [20] [19] [18] [21]
8:
9: [22] [23]
10: [24] [25]
11: [26] [28] [29] [27]
12: [36] [37] [32] [38] [31] [30] [39] [35] [34] [33]
13: [40] [44] [41] [43] [42]
14: [46] [45]
15: [48] [49] [50] [47] [51]
16: [52] [57] [56] [53] [54] [55]
17:
18: [58]
19: [59] [62] [61] [64] [60] [63]
20:
21: [65]
22: [66] [67] [68] [69] [70] [71] [72] [73]
23: [74]
24: [75]
25: [76] [80] [79] [78] [77] [81]
26:
27:
28: [86] [85] [83] [82] [84]
29: [94] [87] [102] [91] [97] [93] [99] [88] [89] [100] [95] [96] [90] [101] [92] [98]
30:
31:
32: [103] [104] [105] [106]
33: [112] [116] [111] [115] [114] [113]
34: [118] [117]
35:
36: [120] [122] [121] [119] [123]
37:
== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#10807]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5595/1955345580@35fe13b0, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10806: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#10805: scala.Tuple2
+- Project [cast(key#10781 as string) AS key#10795, cast(value#10782 as string) AS value#10796]
+- Project [key#11234 AS key#10781, value#11235 AS value#10782, topic#11236 AS topic#10783, partition#11237 AS partition#10784, offset#11238L AS offset#10785L, timestamp#11239 AS timestamp#10786, timestampType#11240 AS timestampType#10787]
+- Streaming RelationV2 kafka[key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:37041,kafka.d...)
== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#10807]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5595/1955345580@35fe13b0, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10806: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#10805: scala.Tuple2
+- Project [cast(key#10781 as string) AS key#10795, cast(value#10782 as string) AS value#10796]
+- Project [key#11234 AS key#10781, value#11235 AS value#10782, topic#11236 AS topic#10783, partition#11237 AS partition#10784, offset#11238L AS offset#10785L, timestamp#11239 AS timestamp#10786, timestampType#11240 AS timestampType#10787]
+- Streaming RelationV2 kafka[key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:37041,kafka.d...)
== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#10807]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5595/1955345580@35fe13b0, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#10806: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#10805: scala.Tuple2
+- Project [cast(key#11234 as string) AS key#10795, cast(value#11235 as string) AS value#10796]
+- Streaming RelationV2 kafka[key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:37041,kafka.d...)
== Physical Plan ==
*(1) SerializeFromObject [input[0, int, false] AS value#10807]
+- *(1) MapElements org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$Lambda$5595/1955345580@35fe13b0, obj#10806: int
+- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#10805: scala.Tuple2
+- *(1) Project [cast(key#11234 as string) AS key#10795, cast(value#11235 as string) AS value#10796]
+- *(1) Project [key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240]
+- *(1) ScanV2 kafka[key#11234, value#11235, topic#11236, partition#11237, offset#11238L, timestamp#11239, timestampType#11240] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:37041,kafka.d...)
at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
at org.scalatest.Assertions.fail(Assertions.scala:1089)
at org.scalatest.Assertions.fail$(Assertions.scala:1085)
at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:453)
at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$53(StreamTest.scala:728)
at scala.Option.foreach(Option.scala:274)
at org.apache.spark.sql.streaming.StreamTest.executeAction$1(StreamTest.scala:728)
at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$56(StreamTest.scala:773)
at org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$56$adapted(StreamTest.scala:760)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:760)
at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:759)
at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:329)
at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaMicroBatchSourceSuite.scala:49)
at org.apache.spark.sql.streaming.StreamTest.runStressTest(StreamTest.scala:871)
at org.apache.spark.sql.streaming.StreamTest.runStressTest$(StreamTest.scala:823)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runStressTest(KafkaMicroBatchSourceSuite.scala:49)
at org.apache.spark.sql.kafka010.KafkaSourceStressSuite.$anonfun$new$119(KafkaMicroBatchSourceSuite.scala:1480)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaMicroBatchSourceSuite.scala:49)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaMicroBatchSourceSuite.scala:49)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:52)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1144)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1340)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1334)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.main(Runner.scala:827)
at org.scalatest.tools.Runner.main(Runner.scala)