org.scalatest.exceptions.TestFailedException: Timed out waiting for stream: The code passed to failAfter did not complete within 60 seconds. java.lang.Thread.getStackTrace(Thread.java:1559) org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:234) org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233) org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.failAfterImpl(FlatMapGroupsWithStateSuite.scala:48) org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230) org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229) org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.failAfter(FlatMapGroupsWithStateSuite.scala:48) org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:463) org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:462) scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149) Caused by: null java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2014) java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2173) org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:457) org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:464) scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127) org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:239) org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233) org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.failAfterImpl(FlatMapGroupsWithStateSuite.scala:48) org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230) == Progress == StartStream(ProcessingTimeTrigger(1000),org.apache.spark.sql.streaming.util.StreamManualClock@5dbb4746,Map(),null) AddData to MemoryStream[value#37312]: a AdvanceManualClock(1000) => CheckNewAnswer: [a,1] AssertOnQuery(<condition>, Check total state rows = List(1), updated state rows = List(1)) AddData to MemoryStream[value#37312]: b AdvanceManualClock(1000) CheckNewAnswer: [b,1] AssertOnQuery(<condition>, Check total state rows = List(2), updated state rows = List(1)) AddData to MemoryStream[value#37312]: b AdvanceManualClock(10000) CheckNewAnswer: [a,-1],[b,2] AssertOnQuery(<condition>, Check total state rows = List(1), updated state rows = List(2)) StopStream StartStream(ProcessingTimeTrigger(1000),org.apache.spark.sql.streaming.util.StreamManualClock@5dbb4746,Map(),null) AddData to MemoryStream[value#37312]: c AdvanceManualClock(11000) CheckNewAnswer: [b,-1],[c,1] AssertOnQuery(<condition>, Check total state rows = List(1), updated state rows = List(2)) AdvanceManualClock(12000) AssertOnQuery(<condition>, ) AssertOnQuery(<condition>, Execute) CheckNewAnswer: [c,-1] AssertOnQuery(<condition>, Check total state rows = List(0), updated state rows = List(0)) == Stream == Output Mode: Update Stream state: {MemoryStream[value#37312]: 0} Thread state: alive Thread stack trace: java.lang.Object.wait(Native Method) org.apache.spark.util.ManualClock.waitTillTime(ManualClock.scala:61) org.apache.spark.sql.streaming.util.StreamManualClock.waitTillTime(StreamManualClock.scala:34) org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:66) org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175) org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332) org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244) == Sink == 0: [a,1] == Plan == == Parsed Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@c1611e7 +- SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1, true, false) AS _1#37321, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#37322] +- FlatMapGroupsWithState org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8260/1132553517@145e62f5, cast(value#37316 as string).toString, cast(value#37312 as string).toString, [value#37316], [value#37312], obj#37320: scala.Tuple2, class[count[0]: bigint], Update, false, ProcessingTimeTimeout +- AppendColumns org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8261/2009461501@7ab15bc, class java.lang.String, [StructField(value,StringType,true)], cast(value#37312 as string).toString, [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, input[0, java.lang.String, true], true, false) AS value#37316] +- StreamingDataSourceV2Relation [value#37312], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@3eae8cdb, MemoryStream[value#37312], -1, 0 == Analyzed Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@c1611e7 +- SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1, true, false) AS _1#37321, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#37322] +- FlatMapGroupsWithState org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8260/1132553517@145e62f5, cast(value#37316 as string).toString, cast(value#37312 as string).toString, [value#37316], [value#37312], obj#37320: scala.Tuple2, class[count[0]: bigint], Update, false, ProcessingTimeTimeout +- AppendColumns org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8261/2009461501@7ab15bc, class java.lang.String, [StructField(value,StringType,true)], cast(value#37312 as string).toString, [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, input[0, java.lang.String, true], true, false) AS value#37316] +- StreamingDataSourceV2Relation [value#37312], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@3eae8cdb, MemoryStream[value#37312], -1, 0 == Optimized Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@c1611e7 +- SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1, true, false) AS _1#37321, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#37322] +- FlatMapGroupsWithState org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8260/1132553517@145e62f5, value#37316.toString, value#37312.toString, [value#37316], [value#37312], obj#37320: scala.Tuple2, class[count[0]: bigint], Update, false, ProcessingTimeTimeout +- AppendColumns org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8261/2009461501@7ab15bc, class java.lang.String, [StructField(value,StringType,true)], value#37312.toString, [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, input[0, java.lang.String, true], true, false) AS value#37316] +- StreamingDataSourceV2Relation [value#37312], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@3eae8cdb, MemoryStream[value#37312], -1, 0 == Physical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@c1611e7 +- *(3) SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1, true, false) AS _1#37321, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#37322] +- FlatMapGroupsWithState org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8260/1132553517@145e62f5, value#37316.toString, value#37312.toString, [value#37316], [value#37312], obj#37320: scala.Tuple2, state info [ checkpoint = file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/streaming.metadata-9c587cec-ffee-4b23-adfe-e7349291c74d/state, runId = 8276650a-5c89-4c47-a070-3e42472ef06b, opId = 0, ver = 0, numPartitions = 5], class[count[0]: bigint], 2, Update, ProcessingTimeTimeout, 1000, 0 +- *(2) Sort [value#37316 ASC NULLS FIRST], false, 0 +- Exchange hashpartitioning(value#37316, 5), true +- AppendColumns org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8261/2009461501@7ab15bc, value#37312.toString, [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, input[0, java.lang.String, true], true, false) AS value#37316] +- *(1) Project [value#37312] +- MicroBatchScan[value#37312] MemoryStreamDataSource

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: 
Timed out waiting for stream: The code passed to failAfter did not complete within 60 seconds.
java.lang.Thread.getStackTrace(Thread.java:1559)
	org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:234)
	org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
	org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.failAfterImpl(FlatMapGroupsWithStateSuite.scala:48)
	org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
	org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)
	org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.failAfter(FlatMapGroupsWithStateSuite.scala:48)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:463)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:462)
	scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149)

	Caused by: 	null
	java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2014)
		java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2173)
		org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:457)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:464)
		scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
		org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
		org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:239)
		org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
		org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.failAfterImpl(FlatMapGroupsWithStateSuite.scala:48)
		org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)


== Progress ==
   StartStream(ProcessingTimeTrigger(1000),org.apache.spark.sql.streaming.util.StreamManualClock@5dbb4746,Map(),null)
   AddData to MemoryStream[value#37312]: a
   AdvanceManualClock(1000)
=> CheckNewAnswer: [a,1]
   AssertOnQuery(<condition>, Check total state rows = List(1), updated state rows = List(1))
   AddData to MemoryStream[value#37312]: b
   AdvanceManualClock(1000)
   CheckNewAnswer: [b,1]
   AssertOnQuery(<condition>, Check total state rows = List(2), updated state rows = List(1))
   AddData to MemoryStream[value#37312]: b
   AdvanceManualClock(10000)
   CheckNewAnswer: [a,-1],[b,2]
   AssertOnQuery(<condition>, Check total state rows = List(1), updated state rows = List(2))
   StopStream
   StartStream(ProcessingTimeTrigger(1000),org.apache.spark.sql.streaming.util.StreamManualClock@5dbb4746,Map(),null)
   AddData to MemoryStream[value#37312]: c
   AdvanceManualClock(11000)
   CheckNewAnswer: [b,-1],[c,1]
   AssertOnQuery(<condition>, Check total state rows = List(1), updated state rows = List(2))
   AdvanceManualClock(12000)
   AssertOnQuery(<condition>, )
   AssertOnQuery(<condition>, Execute)
   CheckNewAnswer: [c,-1]
   AssertOnQuery(<condition>, Check total state rows = List(0), updated state rows = List(0))

== Stream ==
Output Mode: Update
Stream state: {MemoryStream[value#37312]: 0}
Thread state: alive
Thread stack trace: java.lang.Object.wait(Native Method)
org.apache.spark.util.ManualClock.waitTillTime(ManualClock.scala:61)
org.apache.spark.sql.streaming.util.StreamManualClock.waitTillTime(StreamManualClock.scala:34)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:66)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)


== Sink ==
0: [a,1]


== Plan ==
== Parsed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@c1611e7
+- SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1, true, false) AS _1#37321, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#37322]
   +- FlatMapGroupsWithState org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8260/1132553517@145e62f5, cast(value#37316 as string).toString, cast(value#37312 as string).toString, [value#37316], [value#37312], obj#37320: scala.Tuple2, class[count[0]: bigint], Update, false, ProcessingTimeTimeout
      +- AppendColumns org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8261/2009461501@7ab15bc, class java.lang.String, [StructField(value,StringType,true)], cast(value#37312 as string).toString, [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, input[0, java.lang.String, true], true, false) AS value#37316]
         +- StreamingDataSourceV2Relation [value#37312], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@3eae8cdb, MemoryStream[value#37312], -1, 0

== Analyzed Logical Plan ==

WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@c1611e7
+- SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1, true, false) AS _1#37321, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#37322]
   +- FlatMapGroupsWithState org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8260/1132553517@145e62f5, cast(value#37316 as string).toString, cast(value#37312 as string).toString, [value#37316], [value#37312], obj#37320: scala.Tuple2, class[count[0]: bigint], Update, false, ProcessingTimeTimeout
      +- AppendColumns org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8261/2009461501@7ab15bc, class java.lang.String, [StructField(value,StringType,true)], cast(value#37312 as string).toString, [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, input[0, java.lang.String, true], true, false) AS value#37316]
         +- StreamingDataSourceV2Relation [value#37312], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@3eae8cdb, MemoryStream[value#37312], -1, 0

== Optimized Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@c1611e7
+- SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1, true, false) AS _1#37321, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#37322]
   +- FlatMapGroupsWithState org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8260/1132553517@145e62f5, value#37316.toString, value#37312.toString, [value#37316], [value#37312], obj#37320: scala.Tuple2, class[count[0]: bigint], Update, false, ProcessingTimeTimeout
      +- AppendColumns org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8261/2009461501@7ab15bc, class java.lang.String, [StructField(value,StringType,true)], value#37312.toString, [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, input[0, java.lang.String, true], true, false) AS value#37316]
         +- StreamingDataSourceV2Relation [value#37312], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@3eae8cdb, MemoryStream[value#37312], -1, 0

== Physical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@c1611e7
+- *(3) SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1, true, false) AS _1#37321, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#37322]
   +- FlatMapGroupsWithState org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8260/1132553517@145e62f5, value#37316.toString, value#37312.toString, [value#37316], [value#37312], obj#37320: scala.Tuple2, state info [ checkpoint = file:/home/jenkins/workspace/spark-master-test-sbt-hadoop-3.2-ubuntu-testing/target/tmp/streaming.metadata-9c587cec-ffee-4b23-adfe-e7349291c74d/state, runId = 8276650a-5c89-4c47-a070-3e42472ef06b, opId = 0, ver = 0, numPartitions = 5], class[count[0]: bigint], 2, Update, ProcessingTimeTimeout, 1000, 0
      +- *(2) Sort [value#37316 ASC NULLS FIRST], false, 0
         +- Exchange hashpartitioning(value#37316, 5), true
            +- AppendColumns org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite$$Lambda$8261/2009461501@7ab15bc, value#37312.toString, [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, input[0, java.lang.String, true], true, false) AS value#37316]
               +- *(1) Project [value#37312]
                  +- MicroBatchScan[value#37312] MemoryStreamDataSource

         
         
	at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
	at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
	at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
	at org.scalatest.Assertions.fail(Assertions.scala:1089)
	at org.scalatest.Assertions.fail$(Assertions.scala:1085)
	at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
	at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:444)
	at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:780)
	at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:756)
	at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:326)
	at org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.testStream(FlatMapGroupsWithStateSuite.scala:48)
	at org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.$anonfun$new$92(FlatMapGroupsWithStateSuite.scala:798)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:47)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:31)
	at org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(FlatMapGroupsWithStateSuite.scala:48)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:231)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:229)
	at org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.withSQLConf(FlatMapGroupsWithStateSuite.scala:48)
	at org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.$anonfun$testWithAllStateVersions$2(FlatMapGroupsWithStateSuite.scala:1274)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1147)
	at org.scalatest.Suite.run$(Suite.scala:1129)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)