Failed to execute query using catalyst:
Error: execute, tree:
Exchange hashpartitioning(p_mfgr#204718, 5), true, [id=#152235]
+- Scan hive default.part [p_mfgr#204718, p_name#204717, p_size#204721], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#204718, 5), true, [id=#152235]
+- Scan hive default.part [p_mfgr#204718, p_name#204717, p_size#204721], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]

 at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
 at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
 at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
 at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
 at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
 at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
 at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
 at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
 at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:45)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
 at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)
 at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:408)
 at org.apache.spark.sql.execution.HiveResult$.hiveResultString(HiveResult.scala:52)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$32(HiveComparisonTest.scala:351)
 at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
 at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
 at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.getResult$1(HiveComparisonTest.scala:351)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:353)
 at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
 at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
 at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
 at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
 at scala.collection.TraversableLike.map(TraversableLike.scala:237)
 at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
 at scala.collection.AbstractTraversable.map(Traversable.scala:108)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
 at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
 at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
 at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
 at org.scalatest.Transformer.apply(Transformer.scala:22)
 at org.scalatest.Transformer.apply(Transformer.scala:20)
 at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
 at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
 at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
 at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
 at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
 at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
 at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
 at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
 at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
 at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
 at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
 at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
 at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
 at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
 at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
 at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
 at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
 at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
 at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
 at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
 at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
 at org.scalatest.Suite.run(Suite.scala:1147)
 at org.scalatest.Suite.run$(Suite.scala:1129)
 at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
 at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
 at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
 at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
 at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
 at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
 at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
 at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
 at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
 at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
 at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
 at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
 at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
 at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
 at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
 at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
 at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
 at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
 at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
 at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
 at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
 at org.scalatest.Suite.run(Suite.scala:1144)
 at org.scalatest.Suite.run$(Suite.scala:1129)
 at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
 at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
 at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
 at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
 at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
 at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
 at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
 at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
 at org.scalatest.tools.Runner$.main(Runner.scala:827)
 at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
scala.collection.Iterator.foreach(Iterator.scala:941)
scala.collection.Iterator.foreach$(Iterator.scala:941)
scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
scala.collection.IterableLike.foreach(IterableLike.scala:74)
scala.collection.IterableLike.foreach$(IterableLike.scala:73)
scala.collection.AbstractIterable.foreach(Iterable.scala:56)
scala.collection.TraversableLike.map(TraversableLike.scala:237)
scala.collection.TraversableLike.map$(TraversableLike.scala:230)
scala.collection.AbstractTraversable.map(Traversable.scala:108)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)

The currently active SparkContext was created at:

(No active SparkContext.)
 
 at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:109)
 at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1466)
 at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:90)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:110)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:105)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$1(HiveTableScanExec.scala:188)
 at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2488)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:188)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)
 at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
 ... 152 more

select p_mfgr,p_name, p_size,
rank() over(distribute by p_mfgr sort by p_name) as r,
dense_rank() over(distribute by p_mfgr sort by p_name) as dr,
cume_dist() over(distribute by p_mfgr sort by p_name) as cud,
sum(p_size) over (distribute by p_mfgr sort by p_name
range between unbounded preceding and current row) as s1,
sum(p_size) over (distribute by p_mfgr sort by p_size
range between 5 preceding and current row) as s2,
first_value(p_size) over w1 as fv1
from part
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name
 rows between 2 preceding and 2 following)
== Parsed Logical Plan ==
'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_mfgr ASC NULLS FIRST, 'p_name ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)))
+- 'Project ['p_mfgr, 'p_name, 'p_size, 'rank() windowspecdefinition('p_mfgr, 'p_name ASC NULLS FIRST, unspecifiedframe$()) AS r#204705, 'dense_rank() windowspecdefinition('p_mfgr, 'p_name ASC NULLS FIRST, unspecifiedframe$()) AS dr#204706, 'cume_dist() windowspecdefinition('p_mfgr, 'p_name ASC NULLS FIRST, unspecifiedframe$()) AS cud#204707, 'sum('p_size) windowspecdefinition('p_mfgr, 'p_name ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS s1#204708, 'sum('p_size) windowspecdefinition('p_mfgr, 'p_size ASC NULLS FIRST, specifiedwindowframe(RangeFrame, -5, currentrow$())) AS s2#204709, unresolvedwindowexpression('first_value('p_size), WindowSpecReference(w1)) AS fv1#204710]
 +- 'UnresolvedRelation [part]

== Analyzed Logical Plan ==
p_mfgr: string, p_name: string, p_size: int, r: int, dr: int, cud: double, s1: bigint, s2: bigint, fv1: int
Project [p_mfgr#204718, p_name#204717, p_size#204721, r#204705, dr#204706, cud#204707, s1#204708L, s2#204709L, fv1#204710]
+- Project [p_mfgr#204718, p_name#204717, p_size#204721, r#204705, dr#204706, cud#204707, s1#204708L, fv1#204710, s2#204709L, r#204705, dr#204706, cud#204707, s1#204708L, s2#204709L, fv1#204710]
 +- Window [sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_size#204721 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, -5, currentrow$())) AS s2#204709L], [p_mfgr#204718], [p_size#204721 ASC NULLS FIRST]
 +- Window [first(p_size#204721, false) windowspecdefinition(p_mfgr#204718, p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS fv1#204710], [p_mfgr#204718], [p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST]
 +- Window [rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS r#204705, dense_rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS dr#204706, cume_dist() windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS cud#204707, sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS s1#204708L], [p_mfgr#204718], [p_name#204717 ASC NULLS FIRST]
 +- Project [p_mfgr#204718, p_name#204717, p_size#204721]
 +- SubqueryAlias `default`.`part`
 +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]

== Optimized Logical Plan ==
Project [p_mfgr#204718, p_name#204717, p_size#204721, r#204705, dr#204706, cud#204707, s1#204708L, s2#204709L, fv1#204710]
+- Window [sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_size#204721 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, -5, currentrow$())) AS s2#204709L], [p_mfgr#204718], [p_size#204721 ASC NULLS FIRST]
 +- Window [first(p_size#204721, false) windowspecdefinition(p_mfgr#204718, p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS fv1#204710], [p_mfgr#204718], [p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST]
 +- Window [rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS r#204705, dense_rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS dr#204706, cume_dist() windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS cud#204707, sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS s1#204708L], [p_mfgr#204718], [p_name#204717 ASC NULLS FIRST]
 +- Project [p_mfgr#204718, p_name#204717, p_size#204721]
 +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]

== Physical Plan ==
*(4) Project [p_mfgr#204718, p_name#204717, p_size#204721, r#204705, dr#204706, cud#204707, s1#204708L, s2#204709L, fv1#204710]
+- Window [sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_size#204721 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, -5, currentrow$())) AS s2#204709L], [p_mfgr#204718], [p_size#204721 ASC NULLS FIRST]
 +- *(3) Sort [p_mfgr#204718 ASC NULLS FIRST, p_size#204721 ASC NULLS FIRST], false, 0
 +- Window [first(p_size#204721, false) windowspecdefinition(p_mfgr#204718, p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS fv1#204710], [p_mfgr#204718], [p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST]
 +- *(2) Sort [p_mfgr#204718 ASC NULLS FIRST, p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST], false, 0
 +- Window [rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS r#204705, dense_rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS dr#204706, cume_dist() windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS cud#204707, sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS s1#204708L], [p_mfgr#204718], [p_name#204717 ASC NULLS FIRST]
 +- *(1) Sort [p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST], false, 0
 +- Exchange hashpartitioning(p_mfgr#204718, 5), true, [id=#152235]
 +- Scan hive default.part [p_mfgr#204718, p_name#204717, p_size#204721], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]

== HIVE - 26 row(s) ==
Manufacturer#1 almond antique burnished rose metallic 2 1 1 0.3333333333333333 4 4 2
Manufacturer#1 almond antique burnished rose metallic 2 1 1 0.3333333333333333 4 4 2
Manufacturer#1 almond antique chartreuse lavender yellow 34 3 2 0.5 38 34 2
Manufacturer#1 almond antique salmon chartreuse burlywood 6 4 3 0.6666666666666666 44 10 2
Manufacturer#1 almond aquamarine burnished black steel 28 5 4 0.8333333333333334 72 28 34
Manufacturer#1 almond aquamarine pink moccasin thistle 42 6 5 1.0 114 42 6
Manufacturer#2 almond antique violet chocolate turquoise 14 1 1 0.2 14 14 14
Manufacturer#2 almond antique violet turquoise frosted 40 2 2 0.4 54 40 14
Manufacturer#2 almond aquamarine midnight light salmon 2 3 3 0.6 56 2 14
Manufacturer#2 almond aquamarine rose maroon antique 25 4 4 0.8 81 25 40
Manufacturer#2 almond aquamarine sandy cyan gainsboro 18 5 5 1.0 99 32 2
Manufacturer#3 almond antique chartreuse khaki white 17 1 1 0.2 17 31 17
Manufacturer#3 almond antique forest lavender goldenrod 14 2 2 0.4 31 14 17
Manufacturer#3 almond antique metallic orange dim 19 3 3 0.6 50 50 17
Manufacturer#3 almond antique misty red olive 1 4 4 0.8 51 1 14
Manufacturer#3 almond antique olive coral navajo 45 5 5 1.0 96 45 19
Manufacturer#4 almond antique gainsboro frosted violet 10 1 1 0.2 10 17 10
Manufacturer#4 almond antique violet mint lemon 39 2 2 0.4 49 39 10
Manufacturer#4 almond aquamarine floral ivory bisque 27 3 3 0.6 76 27 10
Manufacturer#4 almond aquamarine yellow dodger mint 7 4 4 0.8 83 7 39
Manufacturer#4 almond azure aquamarine papaya violet 12 5 5 1.0 95 29 27
Manufacturer#5 almond antique blue firebrick mint 31 1 1 0.2 31 31 31
Manufacturer#5 almond antique medium spring khaki 6 2 2 0.4 37 8 31
Manufacturer#5 almond antique sky peru orange 2 3 3 0.6 39 2 31
Manufacturer#5 almond aquamarine dodger light gainsboro 46 4 4 0.8 85 46 6
Manufacturer#5 almond azure blanched chiffon midnight 23 5 5 1.0 108 23 2

org.scalatest.exceptions.TestFailedException:
Failed to execute query using catalyst:
Error: execute, tree:
Exchange hashpartitioning(p_mfgr#204718, 5), true, [id=#152235]
+- Scan hive default.part [p_mfgr#204718, p_name#204717, p_size#204721], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#204718, 5), true, [id=#152235]
+- Scan hive default.part [p_mfgr#204718, p_name#204717, p_size#204721], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:45)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)
at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:408)
at org.apache.spark.sql.execution.HiveResult$.hiveResultString(HiveResult.scala:52)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$32(HiveComparisonTest.scala:351)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.getResult$1(HiveComparisonTest.scala:351)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:353)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at scala.collection.TraversableLike.map(TraversableLike.scala:237)
at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
at scala.collection.AbstractTraversable.map(Traversable.scala:108)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1144)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.main(Runner.scala:827)
at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:
org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
scala.collection.Iterator.foreach(Iterator.scala:941)
scala.collection.Iterator.foreach$(Iterator.scala:941)
scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
scala.collection.IterableLike.foreach(IterableLike.scala:74)
scala.collection.IterableLike.foreach$(IterableLike.scala:73)
scala.collection.AbstractIterable.foreach(Iterable.scala:56)
scala.collection.TraversableLike.map(TraversableLike.scala:237)
scala.collection.TraversableLike.map$(TraversableLike.scala:230)
scala.collection.AbstractTraversable.map(Traversable.scala:108)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)
The currently active SparkContext was created at:
(No active SparkContext.)
at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:109)
at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1466)
at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:90)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:110)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:105)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$1(HiveTableScanExec.scala:188)
at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2488)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:188)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
... 152 more
select p_mfgr,p_name, p_size,
rank() over(distribute by p_mfgr sort by p_name) as r,
dense_rank() over(distribute by p_mfgr sort by p_name) as dr,
cume_dist() over(distribute by p_mfgr sort by p_name) as cud,
sum(p_size) over (distribute by p_mfgr sort by p_name
range between unbounded preceding and current row) as s1,
sum(p_size) over (distribute by p_mfgr sort by p_size
range between 5 preceding and current row) as s2,
first_value(p_size) over w1 as fv1
from part
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name
rows between 2 preceding and 2 following)
== Parsed Logical Plan ==
'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_mfgr ASC NULLS FIRST, 'p_name ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)))
+- 'Project ['p_mfgr, 'p_name, 'p_size, 'rank() windowspecdefinition('p_mfgr, 'p_name ASC NULLS FIRST, unspecifiedframe$()) AS r#204705, 'dense_rank() windowspecdefinition('p_mfgr, 'p_name ASC NULLS FIRST, unspecifiedframe$()) AS dr#204706, 'cume_dist() windowspecdefinition('p_mfgr, 'p_name ASC NULLS FIRST, unspecifiedframe$()) AS cud#204707, 'sum('p_size) windowspecdefinition('p_mfgr, 'p_name ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS s1#204708, 'sum('p_size) windowspecdefinition('p_mfgr, 'p_size ASC NULLS FIRST, specifiedwindowframe(RangeFrame, -5, currentrow$())) AS s2#204709, unresolvedwindowexpression('first_value('p_size), WindowSpecReference(w1)) AS fv1#204710]
+- 'UnresolvedRelation [part]
== Analyzed Logical Plan ==
p_mfgr: string, p_name: string, p_size: int, r: int, dr: int, cud: double, s1: bigint, s2: bigint, fv1: int
Project [p_mfgr#204718, p_name#204717, p_size#204721, r#204705, dr#204706, cud#204707, s1#204708L, s2#204709L, fv1#204710]
+- Project [p_mfgr#204718, p_name#204717, p_size#204721, r#204705, dr#204706, cud#204707, s1#204708L, fv1#204710, s2#204709L, r#204705, dr#204706, cud#204707, s1#204708L, s2#204709L, fv1#204710]
+- Window [sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_size#204721 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, -5, currentrow$())) AS s2#204709L], [p_mfgr#204718], [p_size#204721 ASC NULLS FIRST]
+- Window [first(p_size#204721, false) windowspecdefinition(p_mfgr#204718, p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS fv1#204710], [p_mfgr#204718], [p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST]
+- Window [rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS r#204705, dense_rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS dr#204706, cume_dist() windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS cud#204707, sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS s1#204708L], [p_mfgr#204718], [p_name#204717 ASC NULLS FIRST]
+- Project [p_mfgr#204718, p_name#204717, p_size#204721]
+- SubqueryAlias `default`.`part`
+- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]
== Optimized Logical Plan ==
Project [p_mfgr#204718, p_name#204717, p_size#204721, r#204705, dr#204706, cud#204707, s1#204708L, s2#204709L, fv1#204710]
+- Window [sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_size#204721 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, -5, currentrow$())) AS s2#204709L], [p_mfgr#204718], [p_size#204721 ASC NULLS FIRST]
+- Window [first(p_size#204721, false) windowspecdefinition(p_mfgr#204718, p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS fv1#204710], [p_mfgr#204718], [p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST]
+- Window [rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS r#204705, dense_rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS dr#204706, cume_dist() windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS cud#204707, sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS s1#204708L], [p_mfgr#204718], [p_name#204717 ASC NULLS FIRST]
+- Project [p_mfgr#204718, p_name#204717, p_size#204721]
+- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]
== Physical Plan ==
*(4) Project [p_mfgr#204718, p_name#204717, p_size#204721, r#204705, dr#204706, cud#204707, s1#204708L, s2#204709L, fv1#204710]
+- Window [sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_size#204721 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, -5, currentrow$())) AS s2#204709L], [p_mfgr#204718], [p_size#204721 ASC NULLS FIRST]
+- *(3) Sort [p_mfgr#204718 ASC NULLS FIRST, p_size#204721 ASC NULLS FIRST], false, 0
+- Window [first(p_size#204721, false) windowspecdefinition(p_mfgr#204718, p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS fv1#204710], [p_mfgr#204718], [p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST]
+- *(2) Sort [p_mfgr#204718 ASC NULLS FIRST, p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST], false, 0
+- Window [rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS r#204705, dense_rank(p_name#204717) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS dr#204706, cume_dist() windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS cud#204707, sum(cast(p_size#204721 as bigint)) windowspecdefinition(p_mfgr#204718, p_name#204717 ASC NULLS FIRST, specifiedwindowframe(RangeFrame, unboundedpreceding$(), currentrow$())) AS s1#204708L], [p_mfgr#204718], [p_name#204717 ASC NULLS FIRST]
+- *(1) Sort [p_mfgr#204718 ASC NULLS FIRST, p_name#204717 ASC NULLS FIRST], false, 0
+- Exchange hashpartitioning(p_mfgr#204718, 5), true, [id=#152235]
+- Scan hive default.part [p_mfgr#204718, p_name#204717, p_size#204721], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204716, p_name#204717, p_mfgr#204718, p_brand#204719, p_type#204720, p_size#204721, p_container#204722, p_retailprice#204723, p_comment#204724]
== HIVE - 26 row(s) ==
Manufacturer#1 almond antique burnished rose metallic 2 1 1 0.3333333333333333 4 4 2
Manufacturer#1 almond antique burnished rose metallic 2 1 1 0.3333333333333333 4 4 2
Manufacturer#1 almond antique chartreuse lavender yellow 34 3 2 0.5 38 34 2
Manufacturer#1 almond antique salmon chartreuse burlywood 6 4 3 0.6666666666666666 44 10 2
Manufacturer#1 almond aquamarine burnished black steel 28 5 4 0.8333333333333334 72 28 34
Manufacturer#1 almond aquamarine pink moccasin thistle 42 6 5 1.0 114 42 6
Manufacturer#2 almond antique violet chocolate turquoise 14 1 1 0.2 14 14 14
Manufacturer#2 almond antique violet turquoise frosted 40 2 2 0.4 54 40 14
Manufacturer#2 almond aquamarine midnight light salmon 2 3 3 0.6 56 2 14
Manufacturer#2 almond aquamarine rose maroon antique 25 4 4 0.8 81 25 40
Manufacturer#2 almond aquamarine sandy cyan gainsboro 18 5 5 1.0 99 32 2
Manufacturer#3 almond antique chartreuse khaki white 17 1 1 0.2 17 31 17
Manufacturer#3 almond antique forest lavender goldenrod 14 2 2 0.4 31 14 17
Manufacturer#3 almond antique metallic orange dim 19 3 3 0.6 50 50 17
Manufacturer#3 almond antique misty red olive 1 4 4 0.8 51 1 14
Manufacturer#3 almond antique olive coral navajo 45 5 5 1.0 96 45 19
Manufacturer#4 almond antique gainsboro frosted violet 10 1 1 0.2 10 17 10
Manufacturer#4 almond antique violet mint lemon 39 2 2 0.4 49 39 10
Manufacturer#4 almond aquamarine floral ivory bisque 27 3 3 0.6 76 27 10
Manufacturer#4 almond aquamarine yellow dodger mint 7 4 4 0.8 83 7 39
Manufacturer#4 almond azure aquamarine papaya violet 12 5 5 1.0 95 29 27
Manufacturer#5 almond antique blue firebrick mint 31 1 1 0.2 31 31 31
Manufacturer#5 almond antique medium spring khaki 6 2 2 0.4 37 8 31
Manufacturer#5 almond antique sky peru orange 2 3 3 0.6 39 2 31
Manufacturer#5 almond aquamarine dodger light gainsboro 46 4 4 0.8 85 46 6
Manufacturer#5 almond azure blanched chiffon midnight 23 5 5 1.0 108 23 2
at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
at org.scalatest.Assertions.fail(Assertions.scala:1089)
at org.scalatest.Assertions.fail$(Assertions.scala:1085)
at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:366)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at scala.collection.TraversableLike.map(TraversableLike.scala:237)
at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
at scala.collection.AbstractTraversable.map(Traversable.scala:108)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1144)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.main(Runner.scala:827)
at org.scalatest.tools.Runner.main(Runner.scala)