Failed to execute query using catalyst:
Error: execute, tree:
Exchange hashpartitioning(p_mfgr#204767, 5), true, [id=#152304]
+- Scan hive default.part [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#204767, 5), true, [id=#152304]
+- Scan hive default.part [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]

 at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
 at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
 at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
 at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
 at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
 at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:45)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
 at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)
 at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:408)
 at org.apache.spark.sql.execution.HiveResult$.hiveResultString(HiveResult.scala:52)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$32(HiveComparisonTest.scala:351)
 at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
 at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
 at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.getResult$1(HiveComparisonTest.scala:351)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:353)
 at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
 at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
 at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
 at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
 at scala.collection.TraversableLike.map(TraversableLike.scala:237)
 at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
 at scala.collection.AbstractTraversable.map(Traversable.scala:108)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
 at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
 at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
 at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
 at org.scalatest.Transformer.apply(Transformer.scala:22)
 at org.scalatest.Transformer.apply(Transformer.scala:20)
 at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
 at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
 at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
 at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
 at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
 at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
 at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
 at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
 at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
 at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
 at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
 at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
 at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
 at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
 at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
 at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
 at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
 at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
 at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
 at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
 at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
 at org.scalatest.Suite.run(Suite.scala:1147)
 at org.scalatest.Suite.run$(Suite.scala:1129)
 at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
 at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
 at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
 at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
 at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
 at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
 at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
 at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
 at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
 at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
 at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
 at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
 at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
 at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
 at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
 at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
 at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
 at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
 at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
 at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
 at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
 at org.scalatest.Suite.run(Suite.scala:1144)
 at org.scalatest.Suite.run$(Suite.scala:1129)
 at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
 at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
 at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
 at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
 at scala.collection.immutable.List.foreach(List.scala:392)
 at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
 at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
 at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
 at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
 at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
 at org.scalatest.tools.Runner$.main(Runner.scala:827)
 at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
scala.collection.Iterator.foreach(Iterator.scala:941)
scala.collection.Iterator.foreach$(Iterator.scala:941)
scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
scala.collection.IterableLike.foreach(IterableLike.scala:74)
scala.collection.IterableLike.foreach$(IterableLike.scala:73)
scala.collection.AbstractIterable.foreach(Iterable.scala:56)
scala.collection.TraversableLike.map(TraversableLike.scala:237)
scala.collection.TraversableLike.map$(TraversableLike.scala:230)
scala.collection.AbstractTraversable.map(Traversable.scala:108)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)

The currently active SparkContext was created at:

(No active SparkContext.)
 
 at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:109)
 at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1466)
 at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:90)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:110)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:105)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$1(HiveTableScanExec.scala:188)
 at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2488)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:188)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
 at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)
 at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)
 at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
 ... 118 more

select p_mfgr,p_name, p_size,
sum(p_retailprice) over w1 as s,
min(p_retailprice) over w1 as mi,
max(p_retailprice) over w1 as ma,
avg(p_retailprice) over w1 as ag
from part
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name
 rows between 2 preceding and 2 following)
== Parsed Logical Plan ==
'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_mfgr ASC NULLS FIRST, 'p_name ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)))
+- 'Project ['p_mfgr, 'p_name, 'p_size, unresolvedwindowexpression('sum('p_retailprice), WindowSpecReference(w1)) AS s#204761, unresolvedwindowexpression('min('p_retailprice), WindowSpecReference(w1)) AS mi#204762, unresolvedwindowexpression('max('p_retailprice), WindowSpecReference(w1)) AS ma#204763, unresolvedwindowexpression('avg('p_retailprice), WindowSpecReference(w1)) AS ag#204764]
 +- 'UnresolvedRelation [part]

== Analyzed Logical Plan ==
p_mfgr: string, p_name: string, p_size: int, s: double, mi: double, ma: double, ag: double
Project [p_mfgr#204767, p_name#204766, p_size#204770, s#204761, mi#204762, ma#204763, ag#204764]
+- Project [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772, s#204761, mi#204762, ma#204763, ag#204764, s#204761, mi#204762, ma#204763, ag#204764]
 +- Window [sum(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204761, min(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS mi#204762, max(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ma#204763, avg(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204764], [p_mfgr#204767], [p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST]
 +- Project [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772]
 +- SubqueryAlias `default`.`part`
 +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]

== Optimized Logical Plan ==
Project [p_mfgr#204767, p_name#204766, p_size#204770, s#204761, mi#204762, ma#204763, ag#204764]
+- Window [sum(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204761, min(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS mi#204762, max(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ma#204763, avg(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204764], [p_mfgr#204767], [p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST]
 +- Project [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772]
 +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]

== Physical Plan ==
*(2) Project [p_mfgr#204767, p_name#204766, p_size#204770, s#204761, mi#204762, ma#204763, ag#204764]
+- Window [sum(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204761, min(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS mi#204762, max(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ma#204763, avg(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204764], [p_mfgr#204767], [p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST]
 +- *(1) Sort [p_mfgr#204767 ASC NULLS FIRST, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST], false, 0
 +- Exchange hashpartitioning(p_mfgr#204767, 5), true, [id=#152304]
 +- Scan hive default.part [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]

== HIVE - 26 row(s) ==
Manufacturer#1 almond antique burnished rose metallic 2 4100.06 1173.15 1753.76 1366.6866666666667
Manufacturer#1 almond antique burnished rose metallic 2 5702.650000000001 1173.15 1753.76 1425.6625000000001
Manufacturer#1 almond antique chartreuse lavender yellow 34 7117.070000000001 1173.15 1753.76 1423.4140000000002
Manufacturer#1 almond antique salmon chartreuse burlywood 6 7576.58 1173.15 1753.76 1515.316
Manufacturer#1 almond aquamarine burnished black steel 28 6403.43 1414.42 1753.76 1600.8575
Manufacturer#1 almond aquamarine pink moccasin thistle 42 4649.67 1414.42 1632.66 1549.89
Manufacturer#2 almond antique violet chocolate turquoise 14 5523.360000000001 1690.68 2031.98 1841.1200000000001
Manufacturer#2 almond antique violet turquoise frosted 40 7222.02 1690.68 2031.98 1805.505
Manufacturer#2 almond aquamarine midnight light salmon 2 8923.62 1690.68 2031.98 1784.7240000000002
Manufacturer#2 almond aquamarine rose maroon antique 25 7232.9400000000005 1698.66 2031.98 1808.2350000000001
Manufacturer#2 almond aquamarine sandy cyan gainsboro 18 5432.24 1698.66 2031.98 1810.7466666666667
Manufacturer#3 almond antique chartreuse khaki white 17 4272.34 1190.27 1671.68 1424.1133333333335
Manufacturer#3 almond antique forest lavender goldenrod 14 6195.32 1190.27 1922.98 1548.83
Manufacturer#3 almond antique metallic orange dim 19 7532.61 1190.27 1922.98 1506.522
Manufacturer#3 almond antique misty red olive 1 5860.929999999999 1190.27 1922.98 1465.2324999999998
Manufacturer#3 almond antique olive coral navajo 45 4670.66 1337.29 1922.98 1556.8866666666665
Manufacturer#4 almond antique gainsboro frosted violet 10 4202.35 1206.26 1620.67 1400.7833333333335
Manufacturer#4 almond antique violet mint lemon 39 6047.27 1206.26 1844.92 1511.8175
Manufacturer#4 almond aquamarine floral ivory bisque 27 7337.620000000001 1206.26 1844.92 1467.5240000000001
Manufacturer#4 almond aquamarine yellow dodger mint 7 5716.950000000001 1206.26 1844.92 1429.2375000000002
Manufacturer#4 almond azure aquamarine papaya violet 12 4341.530000000001 1206.26 1844.92 1447.176666666667
Manufacturer#5 almond antique blue firebrick mint 31 5190.08 1611.66 1789.69 1730.0266666666666
Manufacturer#5 almond antique medium spring khaki 6 6208.18 1018.1 1789.69 1552.045
Manufacturer#5 almond antique sky peru orange 2 7672.66 1018.1 1789.69 1534.532
Manufacturer#5 almond aquamarine dodger light gainsboro 46 5882.970000000001 1018.1 1788.73 1470.7425000000003
Manufacturer#5 almond azure blanched chiffon midnight 23 4271.3099999999995 1018.1 1788.73 1423.7699999999998

org.scalatest.exceptions.TestFailedException:
Failed to execute query using catalyst:
Error: execute, tree:
Exchange hashpartitioning(p_mfgr#204767, 5), true, [id=#152304]
+- Scan hive default.part [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#204767, 5), true, [id=#152304]
+- Scan hive default.part [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:45)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)
at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:408)
at org.apache.spark.sql.execution.HiveResult$.hiveResultString(HiveResult.scala:52)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$32(HiveComparisonTest.scala:351)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.getResult$1(HiveComparisonTest.scala:351)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:353)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at scala.collection.TraversableLike.map(TraversableLike.scala:237)
at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
at scala.collection.AbstractTraversable.map(Traversable.scala:108)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1144)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.main(Runner.scala:827)
at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:
org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
scala.collection.Iterator.foreach(Iterator.scala:941)
scala.collection.Iterator.foreach$(Iterator.scala:941)
scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
scala.collection.IterableLike.foreach(IterableLike.scala:74)
scala.collection.IterableLike.foreach$(IterableLike.scala:73)
scala.collection.AbstractIterable.foreach(Iterable.scala:56)
scala.collection.TraversableLike.map(TraversableLike.scala:237)
scala.collection.TraversableLike.map$(TraversableLike.scala:230)
scala.collection.AbstractTraversable.map(Traversable.scala:108)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)
The currently active SparkContext was created at:
(No active SparkContext.)
at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:109)
at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1466)
at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:90)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:110)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:105)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$1(HiveTableScanExec.scala:188)
at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2488)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:188)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)
at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
... 118 more
select p_mfgr,p_name, p_size,
sum(p_retailprice) over w1 as s,
min(p_retailprice) over w1 as mi,
max(p_retailprice) over w1 as ma,
avg(p_retailprice) over w1 as ag
from part
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name
rows between 2 preceding and 2 following)
== Parsed Logical Plan ==
'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_mfgr ASC NULLS FIRST, 'p_name ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)))
+- 'Project ['p_mfgr, 'p_name, 'p_size, unresolvedwindowexpression('sum('p_retailprice), WindowSpecReference(w1)) AS s#204761, unresolvedwindowexpression('min('p_retailprice), WindowSpecReference(w1)) AS mi#204762, unresolvedwindowexpression('max('p_retailprice), WindowSpecReference(w1)) AS ma#204763, unresolvedwindowexpression('avg('p_retailprice), WindowSpecReference(w1)) AS ag#204764]
+- 'UnresolvedRelation [part]
== Analyzed Logical Plan ==
p_mfgr: string, p_name: string, p_size: int, s: double, mi: double, ma: double, ag: double
Project [p_mfgr#204767, p_name#204766, p_size#204770, s#204761, mi#204762, ma#204763, ag#204764]
+- Project [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772, s#204761, mi#204762, ma#204763, ag#204764, s#204761, mi#204762, ma#204763, ag#204764]
+- Window [sum(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204761, min(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS mi#204762, max(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ma#204763, avg(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204764], [p_mfgr#204767], [p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST]
+- Project [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772]
+- SubqueryAlias `default`.`part`
+- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]
== Optimized Logical Plan ==
Project [p_mfgr#204767, p_name#204766, p_size#204770, s#204761, mi#204762, ma#204763, ag#204764]
+- Window [sum(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204761, min(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS mi#204762, max(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ma#204763, avg(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204764], [p_mfgr#204767], [p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST]
+- Project [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772]
+- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]
== Physical Plan ==
*(2) Project [p_mfgr#204767, p_name#204766, p_size#204770, s#204761, mi#204762, ma#204763, ag#204764]
+- Window [sum(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS s#204761, min(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS mi#204762, max(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ma#204763, avg(p_retailprice#204772) windowspecdefinition(p_mfgr#204767, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, 2)) AS ag#204764], [p_mfgr#204767], [p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST]
+- *(1) Sort [p_mfgr#204767 ASC NULLS FIRST, p_mfgr#204767 ASC NULLS FIRST, p_name#204766 ASC NULLS FIRST], false, 0
+- Exchange hashpartitioning(p_mfgr#204767, 5), true, [id=#152304]
+- Scan hive default.part [p_mfgr#204767, p_name#204766, p_size#204770, p_retailprice#204772], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#204765, p_name#204766, p_mfgr#204767, p_brand#204768, p_type#204769, p_size#204770, p_container#204771, p_retailprice#204772, p_comment#204773]
== HIVE - 26 row(s) ==
Manufacturer#1 almond antique burnished rose metallic 2 4100.06 1173.15 1753.76 1366.6866666666667
Manufacturer#1 almond antique burnished rose metallic 2 5702.650000000001 1173.15 1753.76 1425.6625000000001
Manufacturer#1 almond antique chartreuse lavender yellow 34 7117.070000000001 1173.15 1753.76 1423.4140000000002
Manufacturer#1 almond antique salmon chartreuse burlywood 6 7576.58 1173.15 1753.76 1515.316
Manufacturer#1 almond aquamarine burnished black steel 28 6403.43 1414.42 1753.76 1600.8575
Manufacturer#1 almond aquamarine pink moccasin thistle 42 4649.67 1414.42 1632.66 1549.89
Manufacturer#2 almond antique violet chocolate turquoise 14 5523.360000000001 1690.68 2031.98 1841.1200000000001
Manufacturer#2 almond antique violet turquoise frosted 40 7222.02 1690.68 2031.98 1805.505
Manufacturer#2 almond aquamarine midnight light salmon 2 8923.62 1690.68 2031.98 1784.7240000000002
Manufacturer#2 almond aquamarine rose maroon antique 25 7232.9400000000005 1698.66 2031.98 1808.2350000000001
Manufacturer#2 almond aquamarine sandy cyan gainsboro 18 5432.24 1698.66 2031.98 1810.7466666666667
Manufacturer#3 almond antique chartreuse khaki white 17 4272.34 1190.27 1671.68 1424.1133333333335
Manufacturer#3 almond antique forest lavender goldenrod 14 6195.32 1190.27 1922.98 1548.83
Manufacturer#3 almond antique metallic orange dim 19 7532.61 1190.27 1922.98 1506.522
Manufacturer#3 almond antique misty red olive 1 5860.929999999999 1190.27 1922.98 1465.2324999999998
Manufacturer#3 almond antique olive coral navajo 45 4670.66 1337.29 1922.98 1556.8866666666665
Manufacturer#4 almond antique gainsboro frosted violet 10 4202.35 1206.26 1620.67 1400.7833333333335
Manufacturer#4 almond antique violet mint lemon 39 6047.27 1206.26 1844.92 1511.8175
Manufacturer#4 almond aquamarine floral ivory bisque 27 7337.620000000001 1206.26 1844.92 1467.5240000000001
Manufacturer#4 almond aquamarine yellow dodger mint 7 5716.950000000001 1206.26 1844.92 1429.2375000000002
Manufacturer#4 almond azure aquamarine papaya violet 12 4341.530000000001 1206.26 1844.92 1447.176666666667
Manufacturer#5 almond antique blue firebrick mint 31 5190.08 1611.66 1789.69 1730.0266666666666
Manufacturer#5 almond antique medium spring khaki 6 6208.18 1018.1 1789.69 1552.045
Manufacturer#5 almond antique sky peru orange 2 7672.66 1018.1 1789.69 1534.532
Manufacturer#5 almond aquamarine dodger light gainsboro 46 5882.970000000001 1018.1 1788.73 1470.7425000000003
Manufacturer#5 almond azure blanched chiffon midnight 23 4271.3099999999995 1018.1 1788.73 1423.7699999999998
at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
at org.scalatest.Assertions.fail(Assertions.scala:1089)
at org.scalatest.Assertions.fail$(Assertions.scala:1085)
at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:366)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at scala.collection.TraversableLike.map(TraversableLike.scala:237)
at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
at scala.collection.AbstractTraversable.map(Traversable.scala:108)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1144)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.main(Runner.scala:827)
at org.scalatest.tools.Runner.main(Runner.scala)