org.scalatest.exceptions.TestFailedException: Timed out waiting for stream: The code passed to failAfter did not complete within 30 seconds. org.scalatest.concurrent.Timeouts$$anonfun$failAfter$1.apply(Timeouts.scala:249) org.scalatest.concurrent.Timeouts$$anonfun$failAfter$1.apply(Timeouts.scala:249) org.scalatest.concurrent.Timeouts$class.timeoutAfter(Timeouts.scala:345) org.scalatest.concurrent.Timeouts$class.failAfter(Timeouts.scala:245) org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaSourceSuite.scala:41) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:532) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:531) scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230) Caused by: null java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2014) java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2173) org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:569) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply$mcV$sp(StreamTest.scala:533) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply(StreamTest.scala:533) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply(StreamTest.scala:533) org.scalatest.concurrent.Timeouts$class.timeoutAfter(Timeouts.scala:326) org.scalatest.concurrent.Timeouts$class.failAfter(Timeouts.scala:245) org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaSourceSuite.scala:41) org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:532) == Progress == AssertOnQuery(<condition>, ) AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range(0, 1, 2, 3, 4, 5, 6, 7, 8), message = Delete topic stress3) => CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9] StopStream AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range(), message = Add topic stress7) StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@5f35ad88,Map()) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range(9, 10, 11, 12, 13), message = Add topic stress9) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range(), message = ) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(14, 15, 16, 17, 18, 19), message = Add topic stress11) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(20, 21, 22), message = Delete topic stress1) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(23), message = ) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(24, 25, 26, 27, 28), message = Add partition) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29] StopStream StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@51484db1,Map()) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(29, 30, 31, 32, 33, 34), message = Add partition) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35] StopStream AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(35, 36), message = ) StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4666b6fb,Map()) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37] CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37] StopStream AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(37, 38, 39, 40, 41, 42, 43, 44), message = Add partition) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(45, 46), message = ) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(47), message = Add partition) StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@13fd5a25,Map()) AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(48, 49, 50), message = ) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range(51, 52, 53, 54, 55, 56, 57), message = Add topic stress13) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range(58, 59, 60, 61), message = ) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62] AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range(62, 63, 64, 65, 66, 67, 68, 69), message = Add partition) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70] StopStream StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@6156ec3f,Map()) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(70, 71, 72, 73, 74, 75), message = Delete topic stress10) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76] CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76] StopStream AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(76, 77, 78, 79, 80, 81, 82, 83), message = ) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(84, 85, 86, 87, 88, 89, 90), message = Add partition) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(91, 92, 93, 94, 95, 96), message = ) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(97, 98, 99, 100), message = ) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(101, 102, 103, 104, 105, 106, 107), message = ) StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@72e01061,Map()) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(108, 109, 110, 111), message = ) AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(112), message = Add partition) AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(), message = Add topic stress15) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113] StopStream AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(113, 114), message = ) AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress16), data = Range(115, 116), message = Add topic stress17) StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@142a1a8,Map()) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117] StopStream AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress16), data = Range(117, 118, 119, 120, 121), message = ) StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7897ffd7,Map()) AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress18, stress16), data = Range(122), message = Add topic stress19) AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress18, stress16), data = Range(123, 124), message = ) CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125] == Stream == Output Mode: Append Stream state: {KafkaSource[SubscribePattern[stress.*]]: {"stress5":{"2":1,"1":2,"0":2},"stress1":{"2":1,"4":1,"1":1,"3":1,"0":1},"stress4":{"2":1,"1":1,"3":2,"0":1},"stress3":{"2":2,"1":1,"3":1,"0":1},"stress2":{"1":3,"0":2}}} Thread state: alive == Sink == 0: == Plan == == Parsed Logical Plan == SerializeFromObject [input[0, int, true] AS value#4816] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4815: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#4814: scala.Tuple2 +- Project [cast(key#4876 as string) AS key#4805, cast(value#4877 as string) AS value#4806] +- LogicalRDD [key#4876, value#4877, topic#4878, partition#4879, offset#4880L, timestamp#4881, timestampType#4882] == Analyzed Logical Plan == value: int SerializeFromObject [input[0, int, true] AS value#4816] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4815: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#4814: scala.Tuple2 +- Project [cast(key#4876 as string) AS key#4805, cast(value#4877 as string) AS value#4806] +- LogicalRDD [key#4876, value#4877, topic#4878, partition#4879, offset#4880L, timestamp#4881, timestampType#4882] == Optimized Logical Plan == SerializeFromObject [input[0, int, true] AS value#4816] +- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4815: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#4814: scala.Tuple2 +- Project [cast(key#4876 as string) AS key#4805, cast(value#4877 as string) AS value#4806] +- LogicalRDD [key#4876, value#4877, topic#4878, partition#4879, offset#4880L, timestamp#4881, timestampType#4882] == Physical Plan == *SerializeFromObject [input[0, int, true] AS value#4816] +- *MapElements <function1>, obj#4815: int +- *DeserializeToObject newInstance(class scala.Tuple2), obj#4814: scala.Tuple2 +- *Project [cast(key#4876 as string) AS key#4805, cast(value#4877 as string) AS value#4806] +- Scan ExistingRDD[key#4876,value#4877,topic#4878,partition#4879,offset#4880L,timestamp#4881,timestampType#4882]

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: 
Timed out waiting for stream: The code passed to failAfter did not complete within 30 seconds.
org.scalatest.concurrent.Timeouts$$anonfun$failAfter$1.apply(Timeouts.scala:249)
	org.scalatest.concurrent.Timeouts$$anonfun$failAfter$1.apply(Timeouts.scala:249)
	org.scalatest.concurrent.Timeouts$class.timeoutAfter(Timeouts.scala:345)
	org.scalatest.concurrent.Timeouts$class.failAfter(Timeouts.scala:245)
	org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaSourceSuite.scala:41)
	org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:532)
	org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:531)
	scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
	scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
	scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)

	Caused by: 	null
	java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2014)
		java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2173)
		org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:569)
		org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply$mcV$sp(StreamTest.scala:533)
		org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply(StreamTest.scala:533)
		org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24$$anonfun$apply$17.apply(StreamTest.scala:533)
		org.scalatest.concurrent.Timeouts$class.timeoutAfter(Timeouts.scala:326)
		org.scalatest.concurrent.Timeouts$class.failAfter(Timeouts.scala:245)
		org.apache.spark.sql.kafka010.KafkaSourceTest.failAfter(KafkaSourceSuite.scala:41)
		org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1$$anonfun$apply$24.apply(StreamTest.scala:532)


== Progress ==
   AssertOnQuery(<condition>, )
   AddKafkaData(topics = Set(stress1, stress2, stress4, stress5), data = Range(0, 1, 2, 3, 4, 5, 6, 7, 8), message = Delete topic stress3)
=> CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9]
   StopStream
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5), data = Range(), message = Add topic stress7)
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@5f35ad88,Map())
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range(9, 10, 11, 12, 13), message = Add topic stress9)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5), data = Range(), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(14, 15, 16, 17, 18, 19), message = Add topic stress11)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(20, 21, 22), message = Delete topic stress1)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(23), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(24, 25, 26, 27, 28), message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@51484db1,Map())
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(29, 30, 31, 32, 33, 34), message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35]
   StopStream
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(35, 36), message = )
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@4666b6fb,Map())
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37]
   StopStream
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(37, 38, 39, 40, 41, 42, 43, 44), message = Add partition)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(45, 46), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(47), message = Add partition)
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@13fd5a25,Map())
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = Range(48, 49, 50), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range(51, 52, 53, 54, 55, 56, 57), message = Add topic stress13)
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range(58, 59, 60, 61), message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62]
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress10), data = Range(62, 63, 64, 65, 66, 67, 68, 69), message = Add partition)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@6156ec3f,Map())
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(70, 71, 72, 73, 74, 75), message = Delete topic stress10)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76]
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76]
   StopStream
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(76, 77, 78, 79, 80, 81, 82, 83), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(84, 85, 86, 87, 88, 89, 90), message = Add partition)
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(91, 92, 93, 94, 95, 96), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(97, 98, 99, 100), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(101, 102, 103, 104, 105, 106, 107), message = )
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@72e01061,Map())
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(108, 109, 110, 111), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(112), message = Add partition)
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(), message = Add topic stress15)
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113]
   StopStream
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5), data = Range(113, 114), message = )
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress16), data = Range(115, 116), message = Add topic stress17)
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@142a1a8,Map())
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117]
   StopStream
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress16), data = Range(117, 118, 119, 120, 121), message = )
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@7897ffd7,Map())
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress18, stress16), data = Range(122), message = Add topic stress19)
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, stress5, stress18, stress16), data = Range(123, 124), message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125]

== Stream ==
Output Mode: Append
Stream state: {KafkaSource[SubscribePattern[stress.*]]: {"stress5":{"2":1,"1":2,"0":2},"stress1":{"2":1,"4":1,"1":1,"3":1,"0":1},"stress4":{"2":1,"1":1,"3":2,"0":1},"stress3":{"2":2,"1":1,"3":1,"0":1},"stress2":{"1":3,"0":2}}}
Thread state: alive


== Sink ==
0: 


== Plan ==
== Parsed Logical Plan ==
SerializeFromObject [input[0, int, true] AS value#4816]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4815: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#4814: scala.Tuple2
      +- Project [cast(key#4876 as string) AS key#4805, cast(value#4877 as string) AS value#4806]
         +- LogicalRDD [key#4876, value#4877, topic#4878, partition#4879, offset#4880L, timestamp#4881, timestampType#4882]

== Analyzed Logical Plan ==
value: int
SerializeFromObject [input[0, int, true] AS value#4816]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4815: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#4814: scala.Tuple2
      +- Project [cast(key#4876 as string) AS key#4805, cast(value#4877 as string) AS value#4806]
         +- LogicalRDD [key#4876, value#4877, topic#4878, partition#4879, offset#4880L, timestamp#4881, timestampType#4882]

== Optimized Logical Plan ==
SerializeFromObject [input[0, int, true] AS value#4816]
+- MapElements <function1>, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#4815: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#4814: scala.Tuple2
      +- Project [cast(key#4876 as string) AS key#4805, cast(value#4877 as string) AS value#4806]
         +- LogicalRDD [key#4876, value#4877, topic#4878, partition#4879, offset#4880L, timestamp#4881, timestampType#4882]

== Physical Plan ==
*SerializeFromObject [input[0, int, true] AS value#4816]
+- *MapElements <function1>, obj#4815: int
   +- *DeserializeToObject newInstance(class scala.Tuple2), obj#4814: scala.Tuple2
      +- *Project [cast(key#4876 as string) AS key#4805, cast(value#4877 as string) AS value#4806]
         +- Scan ExistingRDD[key#4876,value#4877,topic#4878,partition#4879,offset#4880L,timestamp#4881,timestampType#4882]
         
         
	at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
	at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
	at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
	at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
	at org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:341)
	at org.apache.spark.sql.streaming.StreamTest$class.liftedTree1$1(StreamTest.scala:555)
	at org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:350)
	at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaSourceSuite.scala:41)
	at org.apache.spark.sql.streaming.StreamTest$class.runStressTest(StreamTest.scala:643)
	at org.apache.spark.sql.kafka010.KafkaSourceTest.runStressTest(KafkaSourceSuite.scala:41)
	at org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$11.apply$mcV$sp(KafkaSourceSuite.scala:768)
	at org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$11.apply(KafkaSourceSuite.scala:748)
	at org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$11.apply(KafkaSourceSuite.scala:748)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaSourceSuite.scala:41)
	at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255)
	at org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaSourceSuite.scala:41)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:357)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:502)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)