&#010;Failed to execute query using catalyst:&#010;Error: execute, tree:&#010;Exchange hashpartitioning(p_mfgr#204784, 5), true, [id=#152347]&#010;+- *(2) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], functions=[min(p_retailprice#204789), max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, mi#204779, ma#204780])&#010; +- Exchange hashpartitioning(p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, 5), true, [id=#152343]&#010; +- *(1) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, knownfloatingpointnormalized(normalizenanandzero(p_retailprice#204789)) AS p_retailprice#204789], functions=[partial_min(p_retailprice#204789), partial_max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min#204797, max#204798])&#010; +- Scan hive default.part [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]&#010;&#010;org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:&#010;Exchange hashpartitioning(p_mfgr#204784, 5), true, [id=#152347]&#010;+- *(2) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], functions=[min(p_retailprice#204789), max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, mi#204779, ma#204780])&#010; +- Exchange hashpartitioning(p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, 5), true, [id=#152343]&#010; +- *(1) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, knownfloatingpointnormalized(normalizenanandzero(p_retailprice#204789)) AS p_retailprice#204789], functions=[partial_min(p_retailprice#204789), partial_max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min#204797, max#204798])&#010; +- Scan hive default.part [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]&#010;&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:45)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:408)&#010; at org.apache.spark.sql.execution.HiveResult$.hiveResultString(HiveResult.scala:52)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$32(HiveComparisonTest.scala:351)&#010; at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)&#010; at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)&#010; at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.getResult$1(HiveComparisonTest.scala:351)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:353)&#010; at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)&#010; at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)&#010; at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)&#010; at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)&#010; at scala.collection.TraversableLike.map(TraversableLike.scala:237)&#010; at scala.collection.TraversableLike.map$(TraversableLike.scala:230)&#010; at scala.collection.AbstractTraversable.map(Traversable.scala:108)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)&#010; at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)&#010; at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)&#010; at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)&#010; at org.scalatest.Transformer.apply(Transformer.scala:22)&#010; at org.scalatest.Transformer.apply(Transformer.scala:20)&#010; at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)&#010; at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)&#010; at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)&#010; at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)&#010; at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)&#010; at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)&#010; at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)&#010; at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)&#010; at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)&#010; at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)&#010; at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)&#010; at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)&#010; at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)&#010; at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)&#010; at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)&#010; at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)&#010; at scala.collection.immutable.List.foreach(List.scala:392)&#010; at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)&#010; at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)&#010; at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)&#010; at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)&#010; at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)&#010; at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)&#010; at org.scalatest.Suite.run(Suite.scala:1147)&#010; at org.scalatest.Suite.run$(Suite.scala:1129)&#010; at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)&#010; at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)&#010; at org.scalatest.SuperEngine.runImpl(Engine.scala:521)&#010; at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)&#010; at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)&#010; at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)&#010; at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)&#010; at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)&#010; at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)&#010; at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)&#010; at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)&#010; at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)&#010; at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)&#010; at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)&#010; at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)&#010; at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)&#010; at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)&#010; at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)&#010; at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)&#010; at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)&#010; at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)&#010; at org.scalatest.Suite.run(Suite.scala:1144)&#010; at org.scalatest.Suite.run$(Suite.scala:1129)&#010; at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)&#010; at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)&#010; at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)&#010; at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)&#010; at scala.collection.immutable.List.foreach(List.scala:392)&#010; at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)&#010; at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)&#010; at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)&#010; at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)&#010; at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)&#010; at org.scalatest.tools.Runner$.main(Runner.scala:827)&#010; at org.scalatest.tools.Runner.main(Runner.scala)&#010;Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:&#010;Exchange hashpartitioning(p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, 5), true, [id=#152343]&#010;+- *(1) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, knownfloatingpointnormalized(normalizenanandzero(p_retailprice#204789)) AS p_retailprice#204789], functions=[partial_min(p_retailprice#204789), partial_max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min#204797, max#204798])&#010; +- Scan hive default.part [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]&#010;&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:160)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)&#010; ... 118 more&#010;Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.&#010;This stopped SparkContext was created at:&#010;&#010;org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)&#010;sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)&#010;sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)&#010;sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)&#010;java.lang.reflect.Constructor.newInstance(Constructor.java:423)&#010;java.lang.Class.newInstance(Class.java:442)&#010;org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)&#010;org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)&#010;scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)&#010;scala.collection.Iterator.foreach(Iterator.scala:941)&#010;scala.collection.Iterator.foreach$(Iterator.scala:941)&#010;scala.collection.AbstractIterator.foreach(Iterator.scala:1429)&#010;scala.collection.IterableLike.foreach(IterableLike.scala:74)&#010;scala.collection.IterableLike.foreach$(IterableLike.scala:73)&#010;scala.collection.AbstractIterable.foreach(Iterable.scala:56)&#010;scala.collection.TraversableLike.map(TraversableLike.scala:237)&#010;scala.collection.TraversableLike.map$(TraversableLike.scala:230)&#010;scala.collection.AbstractTraversable.map(Traversable.scala:108)&#010;org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)&#010;org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)&#010;&#010;The currently active SparkContext was created at:&#010;&#010;(No active SparkContext.)&#010; &#010; at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:109)&#010; at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1466)&#010; at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:90)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:110)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:105)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$1(HiveTableScanExec.scala:188)&#010; at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2488)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:188)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:160)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)&#010; ... 142 more&#010;&#010;select p_mfgr,p_name, p_size, p_retailprice,&#010;sum(p_retailprice) over w1 as s,&#010;min(p_retailprice) as mi ,&#010;max(p_retailprice) as ma ,&#010;avg(p_retailprice) over w1 as ag&#010;from part&#010;group by p_mfgr,p_name, p_size, p_retailprice&#010;window w1 as (distribute by p_mfgr sort by p_mfgr, p_name&#010; rows between 2 preceding and 2 following)&#010;== Parsed Logical Plan ==&#010;'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_mfgr ASC NULLS FIRST, 'p_name ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)))&#010;+- 'Aggregate ['p_mfgr, 'p_name, 'p_size, 'p_retailprice], ['p_mfgr, 'p_name, 'p_size, 'p_retailprice, unresolvedwindowexpression('sum('p_retailprice), WindowSpecReference(w1)) AS s#204778, 'min('p_retailprice) AS mi#204779, 'max('p_retailprice) AS ma#204780, unresolvedwindowexpression('avg('p_retailprice), WindowSpecReference(w1)) AS ag#204781]&#010; +- 'UnresolvedRelation [part]&#010;&#010;== Analyzed Logical Plan ==&#010;p_mfgr: string, p_name: string, p_size: int, p_retailprice: double, s: double, mi: double, ma: double, ag: double&#010;Project [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, s#204778, mi#204779, ma#204780, ag#204781]&#010;+- Project [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, mi#204779, ma#204780, s#204778, ag#204781, s#204778, ag#204781]&#010; +- Window [sum(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204778, avg(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204781], [p_mfgr#204784], [p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST]&#010; +- Aggregate [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min(p_retailprice#204789) AS mi#204779, max(p_retailprice#204789) AS ma#204780]&#010; +- SubqueryAlias `default`.`part`&#010; +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]&#010;&#010;== Optimized Logical Plan ==&#010;Project [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, s#204778, mi#204779, ma#204780, ag#204781]&#010;+- Window [sum(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204778, avg(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204781], [p_mfgr#204784], [p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST]&#010; +- Aggregate [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min(p_retailprice#204789) AS mi#204779, max(p_retailprice#204789) AS ma#204780]&#010; +- Project [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789]&#010; +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]&#010;&#010;== Physical Plan ==&#010;*(4) Project [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, s#204778, mi#204779, ma#204780, ag#204781]&#010;+- Window [sum(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204778, avg(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204781], [p_mfgr#204784], [p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST]&#010; +- *(3) Sort [p_mfgr#204784 ASC NULLS FIRST, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST], false, 0&#010; +- Exchange hashpartitioning(p_mfgr#204784, 5), true, [id=#152347]&#010; +- *(2) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], functions=[min(p_retailprice#204789), max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, mi#204779, ma#204780])&#010; +- Exchange hashpartitioning(p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, 5), true, [id=#152343]&#010; +- *(1) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, knownfloatingpointnormalized(normalizenanandzero(p_retailprice#204789)) AS p_retailprice#204789], functions=[partial_min(p_retailprice#204789), partial_max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min#204797, max#204798])&#010; +- Scan hive default.part [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]&#010;&#010;== HIVE - 25 row(s) ==&#010;Manufacturer#1 almond antique burnished rose metallic 2 1173.15 4529.5 1173.15 1173.15 1509.8333333333333&#010;Manufacturer#1 almond antique chartreuse lavender yellow 34 1753.76 5943.92 1753.76 1753.76 1485.98&#010;Manufacturer#1 almond antique salmon chartreuse burlywood 6 1602.59 7576.58 1602.59 1602.59 1515.316&#010;Manufacturer#1 almond aquamarine burnished black steel 28 1414.42 6403.43 1414.42 1414.42 1600.8575&#010;Manufacturer#1 almond aquamarine pink moccasin thistle 42 1632.66 4649.67 1632.66 1632.66 1549.89&#010;Manufacturer#2 almond antique violet chocolate turquoise 14 1690.68 5523.360000000001 1690.68 1690.68 1841.1200000000001&#010;Manufacturer#2 almond antique violet turquoise frosted 40 1800.7 7222.02 1800.7 1800.7 1805.505&#010;Manufacturer#2 almond aquamarine midnight light salmon 2 2031.98 8923.62 2031.98 2031.98 1784.7240000000002&#010;Manufacturer#2 almond aquamarine rose maroon antique 25 1698.66 7232.9400000000005 1698.66 1698.66 1808.2350000000001&#010;Manufacturer#2 almond aquamarine sandy cyan gainsboro 18 1701.6 5432.24 1701.6 1701.6 1810.7466666666667&#010;Manufacturer#3 almond antique chartreuse khaki white 17 1671.68 4272.34 1671.68 1671.68 1424.1133333333335&#010;Manufacturer#3 almond antique forest lavender goldenrod 14 1190.27 6195.32 1190.27 1190.27 1548.83&#010;Manufacturer#3 almond antique metallic orange dim 19 1410.39 7532.61 1410.39 1410.39 1506.522&#010;Manufacturer#3 almond antique misty red olive 1 1922.98 5860.929999999999 1922.98 1922.98 1465.2324999999998&#010;Manufacturer#3 almond antique olive coral navajo 45 1337.29 4670.66 1337.29 1337.29 1556.8866666666665&#010;Manufacturer#4 almond antique gainsboro frosted violet 10 1620.67 4202.35 1620.67 1620.67 1400.7833333333335&#010;Manufacturer#4 almond antique violet mint lemon 39 1375.42 6047.27 1375.42 1375.42 1511.8175&#010;Manufacturer#4 almond aquamarine floral ivory bisque 27 1206.26 7337.620000000001 1206.26 1206.26 1467.5240000000001&#010;Manufacturer#4 almond aquamarine yellow dodger mint 7 1844.92 5716.950000000001 1844.92 1844.92 1429.2375000000002&#010;Manufacturer#4 almond azure aquamarine papaya violet 12 1290.35 4341.530000000001 1290.35 1290.35 1447.176666666667&#010;Manufacturer#5 almond antique blue firebrick mint 31 1789.69 5190.08 1789.69 1789.69 1730.0266666666666&#010;Manufacturer#5 almond antique medium spring khaki 6 1611.66 6208.18 1611.66 1611.66 1552.045&#010;Manufacturer#5 almond antique sky peru orange 2 1788.73 7672.66 1788.73 1788.73 1534.532&#010;Manufacturer#5 almond aquamarine dodger light gainsboro 46 1018.1 5882.970000000001 1018.1 1018.1 1470.7425000000003&#010;Manufacturer#5 almond azure blanched chiffon midnight 23 1464.48 4271.3099999999995 1464.48 1464.48 1423.7699999999998&#010;


      org.scalatest.exceptions.TestFailedException: 
Failed to execute query using catalyst:
Error: execute, tree:
Exchange hashpartitioning(p_mfgr#204784, 5), true, [id=#152347]
+- *(2) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], functions=[min(p_retailprice#204789), max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, mi#204779, ma#204780])
   +- Exchange hashpartitioning(p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, 5), true, [id=#152343]
      +- *(1) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, knownfloatingpointnormalized(normalizenanandzero(p_retailprice#204789)) AS p_retailprice#204789], functions=[partial_min(p_retailprice#204789), partial_max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min#204797, max#204798])
         +- Scan hive default.part [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#204784, 5), true, [id=#152347]
+- *(2) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], functions=[min(p_retailprice#204789), max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, mi#204779, ma#204780])
   +- Exchange hashpartitioning(p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, 5), true, [id=#152343]
      +- *(1) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, knownfloatingpointnormalized(normalizenanandzero(p_retailprice#204789)) AS p_retailprice#204789], functions=[partial_min(p_retailprice#204789), partial_max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min#204797, max#204798])
         +- Scan hive default.part [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:45)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:408)
	at org.apache.spark.sql.execution.HiveResult$.hiveResultString(HiveResult.scala:52)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$32(HiveComparisonTest.scala:351)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.getResult$1(HiveComparisonTest.scala:351)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:353)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
	at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
	at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
	at scala.collection.TraversableLike.map(TraversableLike.scala:237)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
	at scala.collection.AbstractTraversable.map(Traversable.scala:108)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
	at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1147)
	at org.scalatest.Suite.run$(Suite.scala:1129)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
	at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
	at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
	at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
	at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
	at org.scalatest.Suite.run(Suite.scala:1144)
	at org.scalatest.Suite.run$(Suite.scala:1129)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
	at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
	at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
	at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
	at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
	at org.scalatest.tools.Runner$.main(Runner.scala:827)
	at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, 5), true, [id=#152343]
+- *(1) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, knownfloatingpointnormalized(normalizenanandzero(p_retailprice#204789)) AS p_retailprice#204789], functions=[partial_min(p_retailprice#204789), partial_max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min#204797, max#204798])
   +- Scan hive default.part [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:160)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 118 more
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
scala.collection.Iterator.foreach(Iterator.scala:941)
scala.collection.Iterator.foreach$(Iterator.scala:941)
scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
scala.collection.IterableLike.foreach(IterableLike.scala:74)
scala.collection.IterableLike.foreach$(IterableLike.scala:73)
scala.collection.AbstractIterable.foreach(Iterable.scala:56)
scala.collection.TraversableLike.map(TraversableLike.scala:237)
scala.collection.TraversableLike.map$(TraversableLike.scala:230)
scala.collection.AbstractTraversable.map(Traversable.scala:108)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)

The currently active SparkContext was created at:

(No active SparkContext.)
         
	at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:109)
	at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1466)
	at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:90)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:110)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:105)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$1(HiveTableScanExec.scala:188)
	at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2488)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:188)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:160)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 142 more

select  p_mfgr,p_name, p_size, p_retailprice,
sum(p_retailprice) over w1 as s,
min(p_retailprice) as mi ,
max(p_retailprice) as ma ,
avg(p_retailprice) over w1 as ag
from part
group by p_mfgr,p_name, p_size, p_retailprice
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name
             rows between 2 preceding and 2 following)
== Parsed Logical Plan ==
'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_mfgr ASC NULLS FIRST, 'p_name ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)))
+- 'Aggregate ['p_mfgr, 'p_name, 'p_size, 'p_retailprice], ['p_mfgr, 'p_name, 'p_size, 'p_retailprice, unresolvedwindowexpression('sum('p_retailprice), WindowSpecReference(w1)) AS s#204778, 'min('p_retailprice) AS mi#204779, 'max('p_retailprice) AS ma#204780, unresolvedwindowexpression('avg('p_retailprice), WindowSpecReference(w1)) AS ag#204781]
   +- 'UnresolvedRelation [part]

== Analyzed Logical Plan ==
p_mfgr: string, p_name: string, p_size: int, p_retailprice: double, s: double, mi: double, ma: double, ag: double
Project [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, s#204778, mi#204779, ma#204780, ag#204781]
+- Project [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, mi#204779, ma#204780, s#204778, ag#204781, s#204778, ag#204781]
   +- Window [sum(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204778, avg(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204781], [p_mfgr#204784], [p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST]
      +- Aggregate [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min(p_retailprice#204789) AS mi#204779, max(p_retailprice#204789) AS ma#204780]
         +- SubqueryAlias `default`.`part`
            +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]

== Optimized Logical Plan ==
Project [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, s#204778, mi#204779, ma#204780, ag#204781]
+- Window [sum(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204778, avg(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204781], [p_mfgr#204784], [p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST]
   +- Aggregate [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min(p_retailprice#204789) AS mi#204779, max(p_retailprice#204789) AS ma#204780]
      +- Project [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789]
         +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]

== Physical Plan ==
*(4) Project [p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, s#204778, mi#204779, ma#204780, ag#204781]
+- Window [sum(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204778, avg(p_retailprice#204789) windowspecdefinition(p_mfgr#204784, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204781], [p_mfgr#204784], [p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST]
   +- *(3) Sort [p_mfgr#204784 ASC NULLS FIRST, p_mfgr#204784 ASC NULLS FIRST, p_name#204783 ASC NULLS FIRST], false, 0
      +- Exchange hashpartitioning(p_mfgr#204784, 5), true, [id=#152347]
         +- *(2) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789], functions=[min(p_retailprice#204789), max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, mi#204779, ma#204780])
            +- Exchange hashpartitioning(p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, 5), true, [id=#152343]
               +- *(1) HashAggregate(keys=[p_mfgr#204784, p_name#204783, p_size#204787, knownfloatingpointnormalized(normalizenanandzero(p_retailprice#204789)) AS p_retailprice#204789], functions=[partial_min(p_retailprice#204789), partial_max(p_retailprice#204789)], output=[p_mfgr#204784, p_name#204783, p_size#204787, p_retailprice#204789, min#204797, max#204798])
                  +- Scan hive default.part [p_name#204783, p_mfgr#204784, p_size#204787, p_retailprice#204789], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204782, p_name#204783, p_mfgr#204784, p_brand#204785, p_type#204786, p_size#204787, p_container#204788, p_retailprice#204789, p_comment#204790]

== HIVE - 25 row(s) ==
Manufacturer#1	almond antique burnished rose metallic	2	1173.15	4529.5	1173.15	1173.15	1509.8333333333333
Manufacturer#1	almond antique chartreuse lavender yellow	34	1753.76	5943.92	1753.76	1753.76	1485.98
Manufacturer#1	almond antique salmon chartreuse burlywood	6	1602.59	7576.58	1602.59	1602.59	1515.316
Manufacturer#1	almond aquamarine burnished black steel	28	1414.42	6403.43	1414.42	1414.42	1600.8575
Manufacturer#1	almond aquamarine pink moccasin thistle	42	1632.66	4649.67	1632.66	1632.66	1549.89
Manufacturer#2	almond antique violet chocolate turquoise	14	1690.68	5523.360000000001	1690.68	1690.68	1841.1200000000001
Manufacturer#2	almond antique violet turquoise frosted	40	1800.7	7222.02	1800.7	1800.7	1805.505
Manufacturer#2	almond aquamarine midnight light salmon	2	2031.98	8923.62	2031.98	2031.98	1784.7240000000002
Manufacturer#2	almond aquamarine rose maroon antique	25	1698.66	7232.9400000000005	1698.66	1698.66	1808.2350000000001
Manufacturer#2	almond aquamarine sandy cyan gainsboro	18	1701.6	5432.24	1701.6	1701.6	1810.7466666666667
Manufacturer#3	almond antique chartreuse khaki white	17	1671.68	4272.34	1671.68	1671.68	1424.1133333333335
Manufacturer#3	almond antique forest lavender goldenrod	14	1190.27	6195.32	1190.27	1190.27	1548.83
Manufacturer#3	almond antique metallic orange dim	19	1410.39	7532.61	1410.39	1410.39	1506.522
Manufacturer#3	almond antique misty red olive	1	1922.98	5860.929999999999	1922.98	1922.98	1465.2324999999998
Manufacturer#3	almond antique olive coral navajo	45	1337.29	4670.66	1337.29	1337.29	1556.8866666666665
Manufacturer#4	almond antique gainsboro frosted violet	10	1620.67	4202.35	1620.67	1620.67	1400.7833333333335
Manufacturer#4	almond antique violet mint lemon	39	1375.42	6047.27	1375.42	1375.42	1511.8175
Manufacturer#4	almond aquamarine floral ivory bisque	27	1206.26	7337.620000000001	1206.26	1206.26	1467.5240000000001
Manufacturer#4	almond aquamarine yellow dodger mint	7	1844.92	5716.950000000001	1844.92	1844.92	1429.2375000000002
Manufacturer#4	almond azure aquamarine papaya violet	12	1290.35	4341.530000000001	1290.35	1290.35	1447.176666666667
Manufacturer#5	almond antique blue firebrick mint	31	1789.69	5190.08	1789.69	1789.69	1730.0266666666666
Manufacturer#5	almond antique medium spring khaki	6	1611.66	6208.18	1611.66	1611.66	1552.045
Manufacturer#5	almond antique sky peru orange	2	1788.73	7672.66	1788.73	1788.73	1534.532
Manufacturer#5	almond aquamarine dodger light gainsboro	46	1018.1	5882.970000000001	1018.1	1018.1	1470.7425000000003
Manufacturer#5	almond azure blanched chiffon midnight	23	1464.48	4271.3099999999995	1464.48	1464.48	1423.7699999999998
                
      at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
      at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
      at org.scalatest.Assertions.fail(Assertions.scala:1089)
      at org.scalatest.Assertions.fail$(Assertions.scala:1085)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:366)
      at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
      at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
      at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
      at scala.collection.TraversableLike.map(TraversableLike.scala:237)
      at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
      at scala.collection.AbstractTraversable.map(Traversable.scala:108)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
      at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
      at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
      at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
      at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
      at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
      at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
      at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
      at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
      at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
      at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
      at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
      at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
      at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
      at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
      at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
      at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
      at org.scalatest.Suite.run(Suite.scala:1147)
      at org.scalatest.Suite.run$(Suite.scala:1129)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
      at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
      at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
      at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
      at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
      at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
      at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
      at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
      at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
      at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
      at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
      at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
      at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
      at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
      at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
      at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
      at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
      at org.scalatest.Suite.run(Suite.scala:1144)
      at org.scalatest.Suite.run$(Suite.scala:1129)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
      at org.scalatest.tools.Runner$.main(Runner.scala:827)
      at org.scalatest.tools.Runner.main(Runner.scala)