&#010;&#010;== Results ==&#010;!== Correct Answer - 63 == == Spark Answer - 60 ==&#010; struct<value:int> struct<value:int>&#010; [10] [10]&#010; [11] [11]&#010; [12] [12]&#010; [13] [13]&#010; [14] [14]&#010; [15] [15]&#010; [16] [16]&#010; [17] [17]&#010; [18] [18]&#010; [19] [19]&#010; [1] [1]&#010; [20] [20]&#010; [21] [21]&#010; [22] [22]&#010; [23] [23]&#010; [24] [24]&#010; [25] [25]&#010; [26] [26]&#010; [27] [27]&#010; [28] [28]&#010; [29] [29]&#010; [2] [2]&#010; [30] [30]&#010; [31] [31]&#010; [32] [32]&#010; [33] [33]&#010; [34] [34]&#010; [35] [35]&#010; [36] [36]&#010; [37] [37]&#010; [38] [38]&#010; [39] [39]&#010; [3] [3]&#010; [40] [40]&#010; [41] [41]&#010; [42] [42]&#010; [43] [43]&#010; [44] [44]&#010; [45] [45]&#010; [46] [46]&#010; [47] [47]&#010; [48] [48]&#010; [49] [49]&#010; [4] [4]&#010; [50] [50]&#010;![51] [54]&#010;![52] [55]&#010;![53] [56]&#010;![54] [57]&#010;![55] [58]&#010;![56] [59]&#010;![57] [5]&#010;![58] [60]&#010;![59] [61]&#010;![5] [62]&#010;![60] [63]&#010;![61] [6]&#010;![62] [7]&#010;![63] [8]&#010;![6] [9]&#010;![7] &#010;![8] &#010;![9] &#010; &#010;&#010;== Progress ==&#010; AssertOnQuery(<condition>, )&#010; AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range(0, 1, 2), message = Delete topic stress3)&#010; AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range(3, 4, 5, 6, 7, 8), message = Add topic stress7)&#010; AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range(9), message = Delete topic stress6)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10]&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10]&#010; StopStream&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4b3f9f32,Map(),null)&#010; AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range(10, 11, 12, 13, 14, 15, 16, 17), message = )&#010; AddKafkaData(topics = Set(stress1, stress2, stress4), data = Range(18, 19, 20, 21, 22, 23, 24, 25, 26), message = Delete topic stress5)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27]&#010; StopStream&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4ea73ad2,Map(),null)&#010; AddKafkaData(topics = Set(stress1, stress2, stress4, stress8), data = Range(27, 28, 29, 30, 31, 32, 33, 34, 35), message = Add topic stress9)&#010; AddKafkaData(topics = Set(stress1, stress2, stress4, stress8), data = Range(36, 37, 38, 39, 40), message = )&#010; AddKafkaData(topics = Set(stress1, stress2, stress4), data = Range(41, 42, 43, 44, 45, 46, 47), message = Delete topic stress8)&#010; AddKafkaData(topics = Set(stress1, stress4), data = Range(48, 49), message = Delete topic stress2)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50]&#010; AddKafkaData(topics = Set(stress1, stress4, stress10), data = Range(50, 51, 52), message = Add topic stress11)&#010; AddKafkaData(topics = Set(stress1, stress4), data = Range(53, 54, 55, 56), message = Delete topic stress10)&#010; AddKafkaData(topics = Set(stress1), data = Range(57, 58, 59, 60, 61, 62), message = Delete topic stress4)&#010;=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63]&#010; StopStream&#010; AddKafkaData(topics = Set(stress1), data = Range(63, 64), message = )&#010; AddKafkaData(topics = Set(stress1), data = Range(65, 66), message = )&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(67, 68, 69), message = Add topic stress13)&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(70, 71, 72), message = Add partition)&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@2544d798,Map(),null)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73]&#010; StopStream&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(73, 74, 75), message = )&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(76, 77, 78, 79, 80, 81, 82, 83), message = )&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(84, 85, 86, 87, 88, 89, 90, 91), message = )&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(92, 93, 94, 95, 96), message = )&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(97, 98, 99, 100), message = )&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7e4e2b6,Map(),null)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101]&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(101, 102, 103, 104, 105), message = Delete topic stress1)&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(106, 107, 108, 109), message = )&#010; AddKafkaData(topics = Set(stress1, stress12), data = Range(), message = Add partition)&#010; AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(110, 111, 112, 113, 114, 115), message = Add topic stress15)&#010; AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(116, 117, 118, 119, 120), message = )&#010; AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(121, 122), message = Delete topic stress1)&#010; AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(123), message = Add partition)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124]&#010; StopStream&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@79273b44,Map(),null)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124]&#010; AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(124, 125, 126, 127, 128), message = Add partition)&#010; AddKafkaData(topics = Set(stress1, stress14), data = Range(129, 130, 131, 132, 133, 134, 135, 136, 137), message = Delete topic stress12)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138]&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138]&#010; StopStream&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@53ade025,Map(),null)&#010; AddKafkaData(topics = Set(stress1, stress14, stress16), data = Range(138, 139, 140), message = Add topic stress17)&#010; AddKafkaData(topics = Set(stress1, stress14, stress16), data = Range(141, 142, 143, 144, 145, 146, 147), message = )&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143],[144],[145],[146],[147],[148]&#010; StopStream&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@50d605df,Map(),null)&#010; CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143],[144],[145],[146],[147],[148]&#010;&#010;== Stream ==&#010;Output Mode: Append&#010;Stream state: {KafkaV2[SubscribePattern[stress.*]]: {"stress1":{"2":4,"4":4,"1":5,"3":4,"0":3}}}&#010;Thread state: alive&#010;Thread stack trace: java.lang.Thread.sleep(Native Method)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1.apply$mcZ$sp(MicroBatchExecution.scala:220)&#010;org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:165)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:286)&#010;org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:198)&#010;&#010;&#010;== Sink ==&#010;0: &#010;1: [1] [3] [2]&#010;2: [6] [4] [7] [5]&#010;3: [8] [9]&#010;4: &#010;5: [10]&#010;6: [14] [13] [15] [12] [11]&#010;7: [18] [17] [16]&#010;8: [23] [22] [27] [19] [24] [21] [26] [20] [25]&#010;9: [30] [29] [28]&#010;10: [35] [34] [31] [36] [33] [32]&#010;11: &#010;12: [37] [39] [38] [40]&#010;13: [41]&#010;14: [45] [43] [47] [42] [46] [44] [48]&#010;15: [50] [49]&#010;16: [55] [56] [54]&#010;17: [57]&#010;18: [59] [61] [60] [58] [63] [62]&#010;&#010;&#010;== Plan ==&#010;== Parsed Logical Plan ==&#010;SerializeFromObject [input[0, int, false] AS value#11567]&#010;+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#11566: int&#010; +- DeserializeToObject newInstance(class scala.Tuple2), obj#11565: scala.Tuple2&#010; +- Project [cast(key#11541 as string) AS key#11555, cast(value#11542 as string) AS value#11556]&#010; +- Project [key#11804 AS key#11541, value#11805 AS value#11542, topic#11806 AS topic#11543, partition#11807 AS partition#11544, offset#11808L AS offset#11545L, timestamp#11809 AS timestamp#11546, timestampType#11810 AS timestampType#11547]&#010; +- Streaming RelationV2 kafka[key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:46207,kafka.d...)&#010;&#010;== Analyzed Logical Plan ==&#010;value: int&#010;SerializeFromObject [input[0, int, false] AS value#11567]&#010;+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#11566: int&#010; +- DeserializeToObject newInstance(class scala.Tuple2), obj#11565: scala.Tuple2&#010; +- Project [cast(key#11541 as string) AS key#11555, cast(value#11542 as string) AS value#11556]&#010; +- Project [key#11804 AS key#11541, value#11805 AS value#11542, topic#11806 AS topic#11543, partition#11807 AS partition#11544, offset#11808L AS offset#11545L, timestamp#11809 AS timestamp#11546, timestampType#11810 AS timestampType#11547]&#010; +- Streaming RelationV2 kafka[key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:46207,kafka.d...)&#010;&#010;== Optimized Logical Plan ==&#010;SerializeFromObject [input[0, int, false] AS value#11567]&#010;+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#11566: int&#010; +- DeserializeToObject newInstance(class scala.Tuple2), obj#11565: scala.Tuple2&#010; +- Project [cast(key#11804 as string) AS key#11555, cast(value#11805 as string) AS value#11556]&#010; +- Streaming RelationV2 kafka[key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:46207,kafka.d...)&#010;&#010;== Physical Plan ==&#010;*(1) SerializeFromObject [input[0, int, false] AS value#11567]&#010;+- *(1) MapElements <function1>, obj#11566: int&#010; +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#11565: scala.Tuple2&#010; +- *(1) Project [cast(key#11804 as string) AS key#11555, cast(value#11805 as string) AS value#11556]&#010; +- *(1) Project [key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810]&#010; +- *(1) ScanV2 kafka[key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:46207,kafka.d...)&#010;&#010; &#010;


      org.scalatest.exceptions.TestFailedException: 

== Results ==
!== Correct Answer - 63 ==   == Spark Answer - 60 ==
 struct<value:int>           struct<value:int>
 [10]                        [10]
 [11]                        [11]
 [12]                        [12]
 [13]                        [13]
 [14]                        [14]
 [15]                        [15]
 [16]                        [16]
 [17]                        [17]
 [18]                        [18]
 [19]                        [19]
 [1]                         [1]
 [20]                        [20]
 [21]                        [21]
 [22]                        [22]
 [23]                        [23]
 [24]                        [24]
 [25]                        [25]
 [26]                        [26]
 [27]                        [27]
 [28]                        [28]
 [29]                        [29]
 [2]                         [2]
 [30]                        [30]
 [31]                        [31]
 [32]                        [32]
 [33]                        [33]
 [34]                        [34]
 [35]                        [35]
 [36]                        [36]
 [37]                        [37]
 [38]                        [38]
 [39]                        [39]
 [3]                         [3]
 [40]                        [40]
 [41]                        [41]
 [42]                        [42]
 [43]                        [43]
 [44]                        [44]
 [45]                        [45]
 [46]                        [46]
 [47]                        [47]
 [48]                        [48]
 [49]                        [49]
 [4]                         [4]
 [50]                        [50]
![51]                        [54]
![52]                        [55]
![53]                        [56]
![54]                        [57]
![55]                        [58]
![56]                        [59]
![57]                        [5]
![58]                        [60]
![59]                        [61]
![5]                         [62]
![60]                        [63]
![61]                        [6]
![62]                        [7]
![63]                        [8]
![6]                         [9]
![7]                         
![8]                         
![9]                         
    

== Progress ==
   AssertOnQuery(<condition>, )
   AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range(0, 1, 2), message = Delete topic stress3)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range(3, 4, 5, 6, 7, 8), message = Add topic stress7)
   AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range(9), message = Delete topic stress6)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4b3f9f32,Map(),null)
   AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range(10, 11, 12, 13, 14, 15, 16, 17), message = )
   AddKafkaData(topics = Set(stress1, stress2, stress4), data = Range(18, 19, 20, 21, 22, 23, 24, 25, 26), message = Delete topic stress5)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4ea73ad2,Map(),null)
   AddKafkaData(topics = Set(stress1, stress2, stress4, stress8), data = Range(27, 28, 29, 30, 31, 32, 33, 34, 35), message = Add topic stress9)
   AddKafkaData(topics = Set(stress1, stress2, stress4, stress8), data = Range(36, 37, 38, 39, 40), message = )
   AddKafkaData(topics = Set(stress1, stress2, stress4), data = Range(41, 42, 43, 44, 45, 46, 47), message = Delete topic stress8)
   AddKafkaData(topics = Set(stress1, stress4), data = Range(48, 49), message = Delete topic stress2)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50]
   AddKafkaData(topics = Set(stress1, stress4, stress10), data = Range(50, 51, 52), message = Add topic stress11)
   AddKafkaData(topics = Set(stress1, stress4), data = Range(53, 54, 55, 56), message = Delete topic stress10)
   AddKafkaData(topics = Set(stress1), data = Range(57, 58, 59, 60, 61, 62), message = Delete topic stress4)
=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63]
   StopStream
   AddKafkaData(topics = Set(stress1), data = Range(63, 64), message = )
   AddKafkaData(topics = Set(stress1), data = Range(65, 66), message = )
   AddKafkaData(topics = Set(stress1, stress12), data = Range(67, 68, 69), message = Add topic stress13)
   AddKafkaData(topics = Set(stress1, stress12), data = Range(70, 71, 72), message = Add partition)
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@2544d798,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73]
   StopStream
   AddKafkaData(topics = Set(stress1, stress12), data = Range(73, 74, 75), message = )
   AddKafkaData(topics = Set(stress1, stress12), data = Range(76, 77, 78, 79, 80, 81, 82, 83), message = )
   AddKafkaData(topics = Set(stress1, stress12), data = Range(84, 85, 86, 87, 88, 89, 90, 91), message = )
   AddKafkaData(topics = Set(stress1, stress12), data = Range(92, 93, 94, 95, 96), message = )
   AddKafkaData(topics = Set(stress1, stress12), data = Range(97, 98, 99, 100), message = )
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7e4e2b6,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101]
   AddKafkaData(topics = Set(stress1, stress12), data = Range(101, 102, 103, 104, 105), message = Delete topic stress1)
   AddKafkaData(topics = Set(stress1, stress12), data = Range(106, 107, 108, 109), message = )
   AddKafkaData(topics = Set(stress1, stress12), data = Range(), message = Add partition)
   AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(110, 111, 112, 113, 114, 115), message = Add topic stress15)
   AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(116, 117, 118, 119, 120), message = )
   AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(121, 122), message = Delete topic stress1)
   AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(123), message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@79273b44,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124]
   AddKafkaData(topics = Set(stress1, stress12, stress14), data = Range(124, 125, 126, 127, 128), message = Add partition)
   AddKafkaData(topics = Set(stress1, stress14), data = Range(129, 130, 131, 132, 133, 134, 135, 136, 137), message = Delete topic stress12)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@53ade025,Map(),null)
   AddKafkaData(topics = Set(stress1, stress14, stress16), data = Range(138, 139, 140), message = Add topic stress17)
   AddKafkaData(topics = Set(stress1, stress14, stress16), data = Range(141, 142, 143, 144, 145, 146, 147), message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143],[144],[145],[146],[147],[148]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@50d605df,Map(),null)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143],[144],[145],[146],[147],[148]

== Stream ==
Output Mode: Append
Stream state: {KafkaV2[SubscribePattern[stress.*]]: {"stress1":{"2":4,"4":4,"1":5,"3":4,"0":3}}}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1.apply$mcZ$sp(MicroBatchExecution.scala:220)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:165)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:286)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:198)


== Sink ==
0: 
1: [1] [3] [2]
2: [6] [4] [7] [5]
3: [8] [9]
4: 
5: [10]
6: [14] [13] [15] [12] [11]
7: [18] [17] [16]
8: [23] [22] [27] [19] [24] [21] [26] [20] [25]
9: [30] [29] [28]
10: [35] [34] [31] [36] [33] [32]
11: 
12: [37] [39] [38] [40]
13: [41]
14: [45] [43] [47] [42] [46] [44] [48]
15: [50] [49]
16: [55] [56] [54]
17: [57]
18: [59] [61] [60] [58] [63] [62]


== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#11567]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#11566: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#11565: scala.Tuple2
      +- Project [cast(key#11541 as string) AS key#11555, cast(value#11542 as string) AS value#11556]
         +- Project [key#11804 AS key#11541, value#11805 AS value#11542, topic#11806 AS topic#11543, partition#11807 AS partition#11544, offset#11808L AS offset#11545L, timestamp#11809 AS timestamp#11546, timestampType#11810 AS timestampType#11547]
            +- Streaming RelationV2 kafka[key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:46207,kafka.d...)

== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, false] AS value#11567]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#11566: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#11565: scala.Tuple2
      +- Project [cast(key#11541 as string) AS key#11555, cast(value#11542 as string) AS value#11556]
         +- Project [key#11804 AS key#11541, value#11805 AS value#11542, topic#11806 AS topic#11543, partition#11807 AS partition#11544, offset#11808L AS offset#11545L, timestamp#11809 AS timestamp#11546, timestampType#11810 AS timestampType#11547]
            +- Streaming RelationV2 kafka[key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:46207,kafka.d...)

== Optimized Logical Plan ==
SerializeFromObject [input[0, int, false] AS value#11567]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#11566: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#11565: scala.Tuple2
      +- Project [cast(key#11804 as string) AS key#11555, cast(value#11805 as string) AS value#11556]
         +- Streaming RelationV2 kafka[key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:46207,kafka.d...)

== Physical Plan ==
*(1) SerializeFromObject [input[0, int, false] AS value#11567]
+- *(1) MapElements <function1>, obj#11566: int
   +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#11565: scala.Tuple2
      +- *(1) Project [cast(key#11804 as string) AS key#11555, cast(value#11805 as string) AS value#11556]
         +- *(1) Project [key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810]
            +- *(1) ScanV2 kafka[key#11804, value#11805, topic#11806, partition#11807, offset#11808L, timestamp#11809, timestampType#11810] (Options: [kafka.metadata.max.age.ms=1,failOnDataLoss=false,kafka.bootstrap.servers=127.0.0.1:46207,kafka.d...)

         
         
      at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
      at org.scalatest.Assertions$class.fail(Assertions.scala:1089)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
      at org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:453)
      at org.apache.spark.sql.streaming.StreamTest$$anonfun$executeAction$1$18.apply(StreamTest.scala:728)
      at org.apache.spark.sql.streaming.StreamTest$$anonfun$executeAction$1$18.apply(StreamTest.scala:728)
      at scala.Option.foreach(Option.scala:257)
      at org.apache.spark.sql.streaming.StreamTest$class.executeAction$1(StreamTest.scala:727)
      at org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1.apply(StreamTest.scala:773)
      at org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1.apply(StreamTest.scala:760)
      at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
      at org.apache.spark.sql.streaming.StreamTest$class.liftedTree1$1(StreamTest.scala:760)
      at org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:759)
      at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaMicroBatchSourceSuite.scala:50)
      at org.apache.spark.sql.streaming.StreamTest$class.runStressTest(StreamTest.scala:871)
      at org.apache.spark.sql.kafka010.KafkaSourceTest.runStressTest(KafkaMicroBatchSourceSuite.scala:50)
      at org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$23.apply$mcV$sp(KafkaMicroBatchSourceSuite.scala:1528)
      at org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$23.apply(KafkaMicroBatchSourceSuite.scala:1507)
      at org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$23.apply(KafkaMicroBatchSourceSuite.scala:1507)
      at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:104)
      at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
      at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
      at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaMicroBatchSourceSuite.scala:50)
      at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:221)
      at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaMicroBatchSourceSuite.scala:50)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
      at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
      at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
      at org.scalatest.Suite$class.run(Suite.scala:1147)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
      at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:53)
      at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
      at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
      at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:53)
      at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1210)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1257)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1255)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1255)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
      at org.scalatest.Suite$class.run(Suite.scala:1144)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
      at org.scalatest.tools.Runner$.main(Runner.scala:827)
      at org.scalatest.tools.Runner.main(Runner.scala)