org.scalatest.exceptions.TestFailedException: Failed to execute query using catalyst: Error: execute, tree: Exchange hashpartitioning(p_mfgr#154183, 5) +- *(2) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[min(p_retailprice#154188), max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, mi#154178, ma#154179]) +- Exchange hashpartitioning(p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, 5) +- *(1) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[partial_min(p_retailprice#154188), partial_max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min#154196, max#154197]) +- Scan hive default.part [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189] org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree: Exchange hashpartitioning(p_mfgr#154183, 5) +- *(2) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[min(p_retailprice#154188), max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, mi#154178, ma#154179]) +- Exchange hashpartitioning(p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, 5) +- *(1) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[partial_min(p_retailprice#154188), partial_max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min#154196, max#154197]) +- Scan hive default.part [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189] at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:302) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247) at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294) at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:324) at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:121) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348) at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.getResult$1(HiveComparisonTest.scala:348) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:350) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at scala.collection.AbstractTraversable.map(Traversable.scala:104) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462) at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at org.scalatest.Transformer.apply(Transformer.scala:22) at org.scalatest.Transformer.apply(Transformer.scala:20) at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103) at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) at scala.collection.immutable.List.foreach(List.scala:392) at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at org.scalatest.Suite$class.run(Suite.scala:1147) at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) at org.scalatest.SuperEngine.runImpl(Engine.scala:521) at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52) at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213) at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree: Exchange hashpartitioning(p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, 5) +- *(1) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[partial_min(p_retailprice#154188), partial_max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min#154196, max#154197]) +- Scan hive default.part [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189] at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:151) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) ... 96 more Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext. This stopped SparkContext was created at: org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:456) org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) org.scalatest.Transformer.apply(Transformer.scala:22) org.scalatest.Transformer.apply(Transformer.scala:20) org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103) org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) org.scalatest.FunSuite.runTest(FunSuite.scala:1560) org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) scala.collection.immutable.List.foreach(List.scala:392) org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) The currently active SparkContext was created at: (No active SparkContext.) at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100) at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1492) at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:89) at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader$lzycompute(HiveTableScanExec.scala:105) at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader(HiveTableScanExec.scala:105) at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188) at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188) at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2457) at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:187) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:151) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) ... 116 more select p_mfgr,p_name, p_size, p_retailprice, sum(p_retailprice) over w1 as s, min(p_retailprice) as mi , max(p_retailprice) as ma , avg(p_retailprice) over w1 as ag from part group by p_mfgr,p_name, p_size, p_retailprice window w1 as (distribute by p_mfgr sort by p_mfgr, p_name rows between 2 preceding and 2 following) == Parsed Logical Plan == 'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_mfgr ASC NULLS FIRST, 'p_name ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2))) +- 'Aggregate ['p_mfgr, 'p_name, 'p_size, 'p_retailprice], ['p_mfgr, 'p_name, 'p_size, 'p_retailprice, unresolvedwindowexpression('sum('p_retailprice), WindowSpecReference(w1)) AS s#154177, 'min('p_retailprice) AS mi#154178, 'max('p_retailprice) AS ma#154179, unresolvedwindowexpression('avg('p_retailprice), WindowSpecReference(w1)) AS ag#154180] +- 'UnresolvedRelation `part` == Analyzed Logical Plan == p_mfgr: string, p_name: string, p_size: int, p_retailprice: double, s: double, mi: double, ma: double, ag: double Project [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, s#154177, mi#154178, ma#154179, ag#154180] +- Project [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, mi#154178, ma#154179, s#154177, ag#154180, s#154177, ag#154180] +- Window [sum(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#154177, avg(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#154180], [p_mfgr#154183], [p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST] +- Aggregate [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min(p_retailprice#154188) AS mi#154178, max(p_retailprice#154188) AS ma#154179] +- SubqueryAlias `default`.`part` +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189] == Optimized Logical Plan == Project [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, s#154177, mi#154178, ma#154179, ag#154180] +- Window [sum(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#154177, avg(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#154180], [p_mfgr#154183], [p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST] +- Aggregate [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min(p_retailprice#154188) AS mi#154178, max(p_retailprice#154188) AS ma#154179] +- Project [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188] +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189] == Physical Plan == *(4) Project [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, s#154177, mi#154178, ma#154179, ag#154180] +- Window [sum(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#154177, avg(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#154180], [p_mfgr#154183], [p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST] +- *(3) Sort [p_mfgr#154183 ASC NULLS FIRST, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST], false, 0 +- Exchange hashpartitioning(p_mfgr#154183, 5) +- *(2) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[min(p_retailprice#154188), max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, mi#154178, ma#154179]) +- Exchange hashpartitioning(p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, 5) +- *(1) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[partial_min(p_retailprice#154188), partial_max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min#154196, max#154197]) +- Scan hive default.part [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189] == HIVE - 25 row(s) == Manufacturer#1 almond antique burnished rose metallic 2 1173.15 4529.5 1173.15 1173.15 1509.8333333333333 Manufacturer#1 almond antique chartreuse lavender yellow 34 1753.76 5943.92 1753.76 1753.76 1485.98 Manufacturer#1 almond antique salmon chartreuse burlywood 6 1602.59 7576.58 1602.59 1602.59 1515.316 Manufacturer#1 almond aquamarine burnished black steel 28 1414.42 6403.43 1414.42 1414.42 1600.8575 Manufacturer#1 almond aquamarine pink moccasin thistle 42 1632.66 4649.67 1632.66 1632.66 1549.89 Manufacturer#2 almond antique violet chocolate turquoise 14 1690.68 5523.360000000001 1690.68 1690.68 1841.1200000000001 Manufacturer#2 almond antique violet turquoise frosted 40 1800.7 7222.02 1800.7 1800.7 1805.505 Manufacturer#2 almond aquamarine midnight light salmon 2 2031.98 8923.62 2031.98 2031.98 1784.7240000000002 Manufacturer#2 almond aquamarine rose maroon antique 25 1698.66 7232.9400000000005 1698.66 1698.66 1808.2350000000001 Manufacturer#2 almond aquamarine sandy cyan gainsboro 18 1701.6 5432.24 1701.6 1701.6 1810.7466666666667 Manufacturer#3 almond antique chartreuse khaki white 17 1671.68 4272.34 1671.68 1671.68 1424.1133333333335 Manufacturer#3 almond antique forest lavender goldenrod 14 1190.27 6195.32 1190.27 1190.27 1548.83 Manufacturer#3 almond antique metallic orange dim 19 1410.39 7532.61 1410.39 1410.39 1506.522 Manufacturer#3 almond antique misty red olive 1 1922.98 5860.929999999999 1922.98 1922.98 1465.2324999999998 Manufacturer#3 almond antique olive coral navajo 45 1337.29 4670.66 1337.29 1337.29 1556.8866666666665 Manufacturer#4 almond antique gainsboro frosted violet 10 1620.67 4202.35 1620.67 1620.67 1400.7833333333335 Manufacturer#4 almond antique violet mint lemon 39 1375.42 6047.27 1375.42 1375.42 1511.8175 Manufacturer#4 almond aquamarine floral ivory bisque 27 1206.26 7337.620000000001 1206.26 1206.26 1467.5240000000001 Manufacturer#4 almond aquamarine yellow dodger mint 7 1844.92 5716.950000000001 1844.92 1844.92 1429.2375000000002 Manufacturer#4 almond azure aquamarine papaya violet 12 1290.35 4341.530000000001 1290.35 1290.35 1447.176666666667 Manufacturer#5 almond antique blue firebrick mint 31 1789.69 5190.08 1789.69 1789.69 1730.0266666666666 Manufacturer#5 almond antique medium spring khaki 6 1611.66 6208.18 1611.66 1611.66 1552.045 Manufacturer#5 almond antique sky peru orange 2 1788.73 7672.66 1788.73 1788.73 1534.532 Manufacturer#5 almond aquamarine dodger light gainsboro 46 1018.1 5882.970000000001 1018.1 1018.1 1470.7425000000003 Manufacturer#5 almond azure blanched chiffon midnight 23 1464.48 4271.3099999999995 1464.48 1464.48 1423.7699999999998

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: 
Failed to execute query using catalyst:
Error: execute, tree:
Exchange hashpartitioning(p_mfgr#154183, 5)
+- *(2) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[min(p_retailprice#154188), max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, mi#154178, ma#154179])
   +- Exchange hashpartitioning(p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, 5)
      +- *(1) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[partial_min(p_retailprice#154188), partial_max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min#154196, max#154197])
         +- Scan hive default.part [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189]

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#154183, 5)
+- *(2) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[min(p_retailprice#154188), max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, mi#154178, ma#154179])
   +- Exchange hashpartitioning(p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, 5)
      +- *(1) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[partial_min(p_retailprice#154188), partial_max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min#154196, max#154197])
         +- Scan hive default.part [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:302)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:324)
	at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:121)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.getResult$1(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:350)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, 5)
+- *(1) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[partial_min(p_retailprice#154188), partial_max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min#154196, max#154197])
   +- Scan hive default.part [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:151)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 96 more
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:456)
org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
org.scalatest.Transformer.apply(Transformer.scala:22)
org.scalatest.Transformer.apply(Transformer.scala:20)
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
org.scalatest.FunSuite.runTest(FunSuite.scala:1560)
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
scala.collection.immutable.List.foreach(List.scala:392)
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)

The currently active SparkContext was created at:

(No active SparkContext.)
         
	at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
	at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1492)
	at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:89)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader$lzycompute(HiveTableScanExec.scala:105)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader(HiveTableScanExec.scala:105)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188)
	at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2457)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:187)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:151)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 116 more

select  p_mfgr,p_name, p_size, p_retailprice,
sum(p_retailprice) over w1 as s,
min(p_retailprice) as mi ,
max(p_retailprice) as ma ,
avg(p_retailprice) over w1 as ag
from part
group by p_mfgr,p_name, p_size, p_retailprice
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name
             rows between 2 preceding and 2 following)
== Parsed Logical Plan ==
'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_mfgr ASC NULLS FIRST, 'p_name ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)))
+- 'Aggregate ['p_mfgr, 'p_name, 'p_size, 'p_retailprice], ['p_mfgr, 'p_name, 'p_size, 'p_retailprice, unresolvedwindowexpression('sum('p_retailprice), WindowSpecReference(w1)) AS s#154177, 'min('p_retailprice) AS mi#154178, 'max('p_retailprice) AS ma#154179, unresolvedwindowexpression('avg('p_retailprice), WindowSpecReference(w1)) AS ag#154180]
   +- 'UnresolvedRelation `part`

== Analyzed Logical Plan ==
p_mfgr: string, p_name: string, p_size: int, p_retailprice: double, s: double, mi: double, ma: double, ag: double
Project [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, s#154177, mi#154178, ma#154179, ag#154180]
+- Project [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, mi#154178, ma#154179, s#154177, ag#154180, s#154177, ag#154180]
   +- Window [sum(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#154177, avg(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#154180], [p_mfgr#154183], [p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST]
      +- Aggregate [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min(p_retailprice#154188) AS mi#154178, max(p_retailprice#154188) AS ma#154179]
         +- SubqueryAlias `default`.`part`
            +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189]

== Optimized Logical Plan ==
Project [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, s#154177, mi#154178, ma#154179, ag#154180]
+- Window [sum(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#154177, avg(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#154180], [p_mfgr#154183], [p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST]
   +- Aggregate [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min(p_retailprice#154188) AS mi#154178, max(p_retailprice#154188) AS ma#154179]
      +- Project [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188]
         +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189]

== Physical Plan ==
*(4) Project [p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, s#154177, mi#154178, ma#154179, ag#154180]
+- Window [sum(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#154177, avg(p_retailprice#154188) windowspecdefinition(p_mfgr#154183, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#154180], [p_mfgr#154183], [p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST]
   +- *(3) Sort [p_mfgr#154183 ASC NULLS FIRST, p_mfgr#154183 ASC NULLS FIRST, p_name#154182 ASC NULLS FIRST], false, 0
      +- Exchange hashpartitioning(p_mfgr#154183, 5)
         +- *(2) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[min(p_retailprice#154188), max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, mi#154178, ma#154179])
            +- Exchange hashpartitioning(p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, 5)
               +- *(1) HashAggregate(keys=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188], functions=[partial_min(p_retailprice#154188), partial_max(p_retailprice#154188)], output=[p_mfgr#154183, p_name#154182, p_size#154186, p_retailprice#154188, min#154196, max#154197])
                  +- Scan hive default.part [p_name#154182, p_mfgr#154183, p_size#154186, p_retailprice#154188], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154181, p_name#154182, p_mfgr#154183, p_brand#154184, p_type#154185, p_size#154186, p_container#154187, p_retailprice#154188, p_comment#154189]
== HIVE - 25 row(s) ==
Manufacturer#1	almond antique burnished rose metallic	2	1173.15	4529.5	1173.15	1173.15	1509.8333333333333
Manufacturer#1	almond antique chartreuse lavender yellow	34	1753.76	5943.92	1753.76	1753.76	1485.98
Manufacturer#1	almond antique salmon chartreuse burlywood	6	1602.59	7576.58	1602.59	1602.59	1515.316
Manufacturer#1	almond aquamarine burnished black steel	28	1414.42	6403.43	1414.42	1414.42	1600.8575
Manufacturer#1	almond aquamarine pink moccasin thistle	42	1632.66	4649.67	1632.66	1632.66	1549.89
Manufacturer#2	almond antique violet chocolate turquoise	14	1690.68	5523.360000000001	1690.68	1690.68	1841.1200000000001
Manufacturer#2	almond antique violet turquoise frosted	40	1800.7	7222.02	1800.7	1800.7	1805.505
Manufacturer#2	almond aquamarine midnight light salmon	2	2031.98	8923.62	2031.98	2031.98	1784.7240000000002
Manufacturer#2	almond aquamarine rose maroon antique	25	1698.66	7232.9400000000005	1698.66	1698.66	1808.2350000000001
Manufacturer#2	almond aquamarine sandy cyan gainsboro	18	1701.6	5432.24	1701.6	1701.6	1810.7466666666667
Manufacturer#3	almond antique chartreuse khaki white	17	1671.68	4272.34	1671.68	1671.68	1424.1133333333335
Manufacturer#3	almond antique forest lavender goldenrod	14	1190.27	6195.32	1190.27	1190.27	1548.83
Manufacturer#3	almond antique metallic orange dim	19	1410.39	7532.61	1410.39	1410.39	1506.522
Manufacturer#3	almond antique misty red olive	1	1922.98	5860.929999999999	1922.98	1922.98	1465.2324999999998
Manufacturer#3	almond antique olive coral navajo	45	1337.29	4670.66	1337.29	1337.29	1556.8866666666665
Manufacturer#4	almond antique gainsboro frosted violet	10	1620.67	4202.35	1620.67	1620.67	1400.7833333333335
Manufacturer#4	almond antique violet mint lemon	39	1375.42	6047.27	1375.42	1375.42	1511.8175
Manufacturer#4	almond aquamarine floral ivory bisque	27	1206.26	7337.620000000001	1206.26	1206.26	1467.5240000000001
Manufacturer#4	almond aquamarine yellow dodger mint	7	1844.92	5716.950000000001	1844.92	1844.92	1429.2375000000002
Manufacturer#4	almond azure aquamarine papaya violet	12	1290.35	4341.530000000001	1290.35	1290.35	1447.176666666667
Manufacturer#5	almond antique blue firebrick mint	31	1789.69	5190.08	1789.69	1789.69	1730.0266666666666
Manufacturer#5	almond antique medium spring khaki	6	1611.66	6208.18	1611.66	1611.66	1552.045
Manufacturer#5	almond antique sky peru orange	2	1788.73	7672.66	1788.73	1788.73	1534.532
Manufacturer#5	almond aquamarine dodger light gainsboro	46	1018.1	5882.970000000001	1018.1	1018.1	1470.7425000000003
Manufacturer#5	almond azure blanched chiffon midnight	23	1464.48	4271.3099999999995	1464.48	1464.48	1423.7699999999998
                
	at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528)
	at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
	at org.scalatest.Assertions$class.fail(Assertions.scala:1089)
	at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:363)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)