&#010;&#010;== Results ==&#010;!== Correct Answer - 10 == == Spark Answer - 0 ==&#010;!struct<_1:timestamp,_2:int> struct<>&#010;![1969-12-31 16:00:00.0,0] &#010;![1969-12-31 16:00:00.1,1] &#010;![1969-12-31 16:00:00.2,2] &#010;![1969-12-31 16:00:00.3,3] &#010;![1969-12-31 16:00:00.4,4] &#010;![1969-12-31 16:00:00.5,5] &#010;![1969-12-31 16:00:00.6,6] &#010;![1969-12-31 16:00:00.7,7] &#010;![1969-12-31 16:00:00.8,8] &#010;![1969-12-31 16:00:00.9,9] &#010; &#010;&#010;== Progress ==&#010; AdvanceRateManualClock(1)&#010;=> CheckLastBatch: [1969-12-31 16:00:00.0,0],[1969-12-31 16:00:00.1,1],[1969-12-31 16:00:00.2,2],[1969-12-31 16:00:00.3,3],[1969-12-31 16:00:00.4,4],[1969-12-31 16:00:00.5,5],[1969-12-31 16:00:00.6,6],[1969-12-31 16:00:00.7,7],[1969-12-31 16:00:00.8,8],[1969-12-31 16:00:00.9,9]&#010; StopStream&#010; StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@32d29185,Map(),null)&#010; AdvanceRateManualClock(2)&#010; CheckLastBatch: [1969-12-31 16:00:01.0,10],[1969-12-31 16:00:01.1,11],[1969-12-31 16:00:01.2,12],[1969-12-31 16:00:01.3,13],[1969-12-31 16:00:01.4,14],[1969-12-31 16:00:01.5,15],[1969-12-31 16:00:01.6,16],[1969-12-31 16:00:01.7,17],[1969-12-31 16:00:01.8,18],[1969-12-31 16:00:01.9,19]&#010;&#010;== Stream ==&#010;Output Mode: Append&#010;Stream state: {org.apache.spark.sql.execution.streaming.sources.RateStreamMicroBatchReader@5f22085a: {"0":{"value":-1,"runTimeMs":0}}}&#010;Thread state: alive&#010;Thread stack trace: sun.misc.Unsafe.park(Native Method)&#010;java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)&#010;java.util.concurrent.locks.AbstractQueuedSynchronizer.parkAndCheckInterrupt(AbstractQueuedSynchronizer.java:836)&#010;java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:997)&#010;java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1304)&#010;scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:202)&#010;scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)&#010;scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:153)&#010;org.apache.spark.util.ThreadUtils$.awaitReady(ThreadUtils.scala:222)&#010;org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:633)&#010;org.apache.spark.SparkContext.runJob(SparkContext.scala:2030)&#010;org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2.scala:84)&#010;org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)&#010;org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)&#010;org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)&#010;org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010;org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)&#010;org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)&#010;org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247)&#010;org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294)&#010;org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3272)&#010;org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2722)&#010;org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2722)&#010;org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253)&#010;org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)&#010;org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252)&#010;org.apache.spark.sql.Dataset.collect(Dataset.scala:2722)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch$3$$anonfun$apply$18.apply(MicroBatchExecution.scala:511)&#010;org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch$3.apply(MicroBatchExecution.scala:506)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:271)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:58)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch(MicroBatchExecution.scala:505)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply$mcV$sp(MicroBatchExecution.scala:147)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply(MicroBatchExecution.scala:135)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply(MicroBatchExecution.scala:135)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:271)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:58)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1.apply$mcZ$sp(MicroBatchExecution.scala:135)&#010;org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:131)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:279)&#010;org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:189)&#010;&#010;&#010;== Sink ==&#010;0: &#010;&#010;&#010;== Plan ==&#010;== Parsed Logical Plan ==&#010;WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@52172c39&#010;+- Project [timestamp#281809 AS timestamp#281776, value#281810L AS value#281777L]&#010; +- Streaming RelationV2 ratev2[timestamp#281809, value#281810L] (Options: [rowsPerSecond=10,numPartitions=1,useManualClock=true])&#010;&#010;== Analyzed Logical Plan ==&#010;WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@52172c39&#010;+- Project [timestamp#281809 AS timestamp#281776, value#281810L AS value#281777L]&#010; +- Streaming RelationV2 ratev2[timestamp#281809, value#281810L] (Options: [rowsPerSecond=10,numPartitions=1,useManualClock=true])&#010;&#010;== Optimized Logical Plan ==&#010;WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@52172c39&#010;+- Streaming RelationV2 ratev2[timestamp#281809, value#281810L] (Options: [rowsPerSecond=10,numPartitions=1,useManualClock=true])&#010;&#010;== Physical Plan ==&#010;WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@52172c39&#010;+- *(1) ScanV2 ratev2[timestamp#281809, value#281810L] (Options: [rowsPerSecond=10,numPartitions=1,useManualClock=true])&#010; &#010;


      org.scalatest.exceptions.TestFailedException: 

== Results ==
!== Correct Answer - 10 ==     == Spark Answer - 0 ==
!struct<_1:timestamp,_2:int>   struct<>
![1969-12-31 16:00:00.0,0]     
![1969-12-31 16:00:00.1,1]     
![1969-12-31 16:00:00.2,2]     
![1969-12-31 16:00:00.3,3]     
![1969-12-31 16:00:00.4,4]     
![1969-12-31 16:00:00.5,5]     
![1969-12-31 16:00:00.6,6]     
![1969-12-31 16:00:00.7,7]     
![1969-12-31 16:00:00.8,8]     
![1969-12-31 16:00:00.9,9]     
    

== Progress ==
   AdvanceRateManualClock(1)
=> CheckLastBatch: [1969-12-31 16:00:00.0,0],[1969-12-31 16:00:00.1,1],[1969-12-31 16:00:00.2,2],[1969-12-31 16:00:00.3,3],[1969-12-31 16:00:00.4,4],[1969-12-31 16:00:00.5,5],[1969-12-31 16:00:00.6,6],[1969-12-31 16:00:00.7,7],[1969-12-31 16:00:00.8,8],[1969-12-31 16:00:00.9,9]
   StopStream
   StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@32d29185,Map(),null)
   AdvanceRateManualClock(2)
   CheckLastBatch: [1969-12-31 16:00:01.0,10],[1969-12-31 16:00:01.1,11],[1969-12-31 16:00:01.2,12],[1969-12-31 16:00:01.3,13],[1969-12-31 16:00:01.4,14],[1969-12-31 16:00:01.5,15],[1969-12-31 16:00:01.6,16],[1969-12-31 16:00:01.7,17],[1969-12-31 16:00:01.8,18],[1969-12-31 16:00:01.9,19]

== Stream ==
Output Mode: Append
Stream state: {org.apache.spark.sql.execution.streaming.sources.RateStreamMicroBatchReader@5f22085a: {"0":{"value":-1,"runTimeMs":0}}}
Thread state: alive
Thread stack trace: sun.misc.Unsafe.park(Native Method)
java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)
java.util.concurrent.locks.AbstractQueuedSynchronizer.parkAndCheckInterrupt(AbstractQueuedSynchronizer.java:836)
java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:997)
java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1304)
scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:202)
scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:153)
org.apache.spark.util.ThreadUtils$.awaitReady(ThreadUtils.scala:222)
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:633)
org.apache.spark.SparkContext.runJob(SparkContext.scala:2030)
org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2.scala:84)
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247)
org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294)
org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3272)
org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2722)
org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2722)
org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253)
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252)
org.apache.spark.sql.Dataset.collect(Dataset.scala:2722)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch$3$$anonfun$apply$18.apply(MicroBatchExecution.scala:511)
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch$3.apply(MicroBatchExecution.scala:506)
org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:271)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:58)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch(MicroBatchExecution.scala:505)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply$mcV$sp(MicroBatchExecution.scala:147)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply(MicroBatchExecution.scala:135)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply(MicroBatchExecution.scala:135)
org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:271)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:58)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1.apply$mcZ$sp(MicroBatchExecution.scala:135)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:131)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:279)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:189)


== Sink ==
0: 


== Plan ==
== Parsed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@52172c39
+- Project [timestamp#281809 AS timestamp#281776, value#281810L AS value#281777L]
   +- Streaming RelationV2 ratev2[timestamp#281809, value#281810L] (Options: [rowsPerSecond=10,numPartitions=1,useManualClock=true])

== Analyzed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@52172c39
+- Project [timestamp#281809 AS timestamp#281776, value#281810L AS value#281777L]
   +- Streaming RelationV2 ratev2[timestamp#281809, value#281810L] (Options: [rowsPerSecond=10,numPartitions=1,useManualClock=true])

== Optimized Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@52172c39
+- Streaming RelationV2 ratev2[timestamp#281809, value#281810L] (Options: [rowsPerSecond=10,numPartitions=1,useManualClock=true])

== Physical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@52172c39
+- *(1) ScanV2 ratev2[timestamp#281809, value#281810L] (Options: [rowsPerSecond=10,numPartitions=1,useManualClock=true])
         
         
      at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
      at org.scalatest.Assertions$class.fail(Assertions.scala:1089)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
      at org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:430)
      at org.apache.spark.sql.streaming.StreamTest$$anonfun$executeAction$1$18.apply(StreamTest.scala:669)
      at org.apache.spark.sql.streaming.StreamTest$$anonfun$executeAction$1$18.apply(StreamTest.scala:669)
      at scala.Option.foreach(Option.scala:257)
      at org.apache.spark.sql.streaming.StreamTest$class.executeAction$1(StreamTest.scala:668)
      at org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1.apply(StreamTest.scala:704)
      at org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1.apply(StreamTest.scala:693)
      at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
      at org.apache.spark.sql.streaming.StreamTest$class.liftedTree1$1(StreamTest.scala:693)
      at org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:692)
      at org.apache.spark.sql.execution.streaming.RateSourceV2Suite.testStream(RateSourceV2Suite.scala:34)
      at org.apache.spark.sql.execution.streaming.RateSourceV2Suite$$anonfun$1.apply$mcV$sp(RateSourceV2Suite.scala:66)
      at org.apache.spark.sql.execution.streaming.RateSourceV2Suite$$anonfun$1.apply(RateSourceV2Suite.scala:59)
      at org.apache.spark.sql.execution.streaming.RateSourceV2Suite$$anonfun$1.apply(RateSourceV2Suite.scala:59)
      at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
      at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
      at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
      at org.apache.spark.sql.execution.streaming.RateSourceV2Suite.org$scalatest$BeforeAndAfterEach$$super$runTest(RateSourceV2Suite.scala:34)
      at org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:221)
      at org.apache.spark.sql.execution.streaming.RateSourceV2Suite.runTest(RateSourceV2Suite.scala:34)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
      at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
      at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
      at org.scalatest.Suite$class.run(Suite.scala:1147)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
      at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
      at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
      at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
      at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:52)
      at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1210)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1257)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1255)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1255)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
      at org.scalatest.Suite$class.run(Suite.scala:1144)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
      at org.scalatest.tools.Runner$.main(Runner.scala:827)
      at org.scalatest.tools.Runner.main(Runner.scala)