org.scalatest.exceptions.TestFailedException: Failed to execute query using catalyst: Error: execute, tree: Exchange hashpartitioning(p_mfgr#154217, 5) +- *(1) Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220] +- Generate explode([1,2,3]), [p_mfgr#154217, p_name#154216, p_size#154220], false, [lv_col#154224] +- Scan hive default.part [p_mfgr#154217, p_name#154216, p_size#154220], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223] org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree: Exchange hashpartitioning(p_mfgr#154217, 5) +- *(1) Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220] +- Generate explode([1,2,3]), [p_mfgr#154217, p_name#154216, p_size#154220], false, [lv_col#154224] +- Scan hive default.part [p_mfgr#154217, p_name#154216, p_size#154220], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223] at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:302) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247) at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294) at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:324) at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:121) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348) at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.getResult$1(HiveComparisonTest.scala:348) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:350) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at scala.collection.AbstractTraversable.map(Traversable.scala:104) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462) at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at org.scalatest.Transformer.apply(Transformer.scala:22) at org.scalatest.Transformer.apply(Transformer.scala:20) at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103) at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) at scala.collection.immutable.List.foreach(List.scala:392) at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at org.scalatest.Suite$class.run(Suite.scala:1147) at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) at org.scalatest.SuperEngine.runImpl(Engine.scala:521) at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52) at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213) at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext. This stopped SparkContext was created at: org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:456) org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) org.scalatest.Transformer.apply(Transformer.scala:22) org.scalatest.Transformer.apply(Transformer.scala:20) org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103) org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) org.scalatest.FunSuite.runTest(FunSuite.scala:1560) org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) scala.collection.immutable.List.foreach(List.scala:392) org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) The currently active SparkContext was created at: (No active SparkContext.) at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100) at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1492) at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:89) at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader$lzycompute(HiveTableScanExec.scala:105) at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader(HiveTableScanExec.scala:105) at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188) at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188) at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2457) at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:187) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.GenerateExec.doExecute(GenerateExec.scala:80) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) ... 87 more select p_mfgr, p_name, lv_col, p_size, sum(p_size) over w1 as s from (select p_mfgr, p_name, p_size, array(1,2,3) arr from part) p lateral view explode(arr) part_lv as lv_col window w1 as (distribute by p_mfgr sort by p_size, lv_col rows between 2 preceding and current row) == Parsed Logical Plan == 'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_size ASC NULLS FIRST, 'lv_col ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, currentrow$()))) +- 'Project ['p_mfgr, 'p_name, 'lv_col, 'p_size, unresolvedwindowexpression('sum('p_size), WindowSpecReference(w1)) AS s#154214] +- 'Generate 'explode('arr), false, part_lv, ['lv_col] +- 'SubqueryAlias `p` +- 'Project ['p_mfgr, 'p_name, 'p_size, 'array(1, 2, 3) AS arr#154213] +- 'UnresolvedRelation `part` == Analyzed Logical Plan == p_mfgr: string, p_name: string, lv_col: int, p_size: int, s: bigint Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220, s#154214L] +- Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220, s#154214L, s#154214L] +- Window [sum(cast(p_size#154220 as bigint)) windowspecdefinition(p_mfgr#154217, p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, currentrow$())) AS s#154214L], [p_mfgr#154217], [p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST] +- Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220] +- Generate explode(arr#154213), false, part_lv, [lv_col#154224] +- SubqueryAlias `p` +- Project [p_mfgr#154217, p_name#154216, p_size#154220, array(1, 2, 3) AS arr#154213] +- SubqueryAlias `default`.`part` +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223] == Optimized Logical Plan == Window [sum(cast(p_size#154220 as bigint)) windowspecdefinition(p_mfgr#154217, p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, currentrow$())) AS s#154214L], [p_mfgr#154217], [p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST] +- Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220] +- Generate explode([1,2,3]), [3], false, part_lv, [lv_col#154224] +- Project [p_mfgr#154217, p_name#154216, p_size#154220] +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223] == Physical Plan == Window [sum(cast(p_size#154220 as bigint)) windowspecdefinition(p_mfgr#154217, p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, currentrow$())) AS s#154214L], [p_mfgr#154217], [p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST] +- *(2) Sort [p_mfgr#154217 ASC NULLS FIRST, p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST], false, 0 +- Exchange hashpartitioning(p_mfgr#154217, 5) +- *(1) Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220] +- Generate explode([1,2,3]), [p_mfgr#154217, p_name#154216, p_size#154220], false, [lv_col#154224] +- Scan hive default.part [p_mfgr#154217, p_name#154216, p_size#154220], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223] == HIVE - 78 row(s) == Manufacturer#1 almond antique burnished rose metallic 1 2 2 Manufacturer#1 almond antique burnished rose metallic 1 2 4 Manufacturer#1 almond antique burnished rose metallic 2 2 6 Manufacturer#1 almond antique burnished rose metallic 2 2 6 Manufacturer#1 almond antique burnished rose metallic 3 2 6 Manufacturer#1 almond antique burnished rose metallic 3 2 6 Manufacturer#1 almond antique salmon chartreuse burlywood 1 6 10 Manufacturer#1 almond antique salmon chartreuse burlywood 2 6 14 Manufacturer#1 almond antique salmon chartreuse burlywood 3 6 18 Manufacturer#1 almond aquamarine burnished black steel 1 28 40 Manufacturer#1 almond aquamarine burnished black steel 2 28 62 Manufacturer#1 almond aquamarine burnished black steel 3 28 84 Manufacturer#1 almond antique chartreuse lavender yellow 1 34 90 Manufacturer#1 almond antique chartreuse lavender yellow 2 34 96 Manufacturer#1 almond antique chartreuse lavender yellow 3 34 102 Manufacturer#1 almond aquamarine pink moccasin thistle 1 42 110 Manufacturer#1 almond aquamarine pink moccasin thistle 2 42 118 Manufacturer#1 almond aquamarine pink moccasin thistle 3 42 126 Manufacturer#2 almond aquamarine midnight light salmon 1 2 2 Manufacturer#2 almond aquamarine midnight light salmon 2 2 4 Manufacturer#2 almond aquamarine midnight light salmon 3 2 6 Manufacturer#2 almond antique violet chocolate turquoise 1 14 18 Manufacturer#2 almond antique violet chocolate turquoise 2 14 30 Manufacturer#2 almond antique violet chocolate turquoise 3 14 42 Manufacturer#2 almond aquamarine sandy cyan gainsboro 1 18 46 Manufacturer#2 almond aquamarine sandy cyan gainsboro 2 18 50 Manufacturer#2 almond aquamarine sandy cyan gainsboro 3 18 54 Manufacturer#2 almond aquamarine rose maroon antique 1 25 61 Manufacturer#2 almond aquamarine rose maroon antique 2 25 68 Manufacturer#2 almond aquamarine rose maroon antique 3 25 75 Manufacturer#2 almond antique violet turquoise frosted 1 40 90 Manufacturer#2 almond antique violet turquoise frosted 2 40 105 Manufacturer#2 almond antique violet turquoise frosted 3 40 120 Manufacturer#3 almond antique misty red olive 1 1 1 Manufacturer#3 almond antique misty red olive 2 1 2 Manufacturer#3 almond antique misty red olive 3 1 3 Manufacturer#3 almond antique forest lavender goldenrod 1 14 16 Manufacturer#3 almond antique forest lavender goldenrod 2 14 29 Manufacturer#3 almond antique forest lavender goldenrod 3 14 42 Manufacturer#3 almond antique chartreuse khaki white 1 17 45 Manufacturer#3 almond antique chartreuse khaki white 2 17 48 Manufacturer#3 almond antique chartreuse khaki white 3 17 51 Manufacturer#3 almond antique metallic orange dim 1 19 53 Manufacturer#3 almond antique metallic orange dim 2 19 55 Manufacturer#3 almond antique metallic orange dim 3 19 57 Manufacturer#3 almond antique olive coral navajo 1 45 83 Manufacturer#3 almond antique olive coral navajo 2 45 109 Manufacturer#3 almond antique olive coral navajo 3 45 135 Manufacturer#4 almond aquamarine yellow dodger mint 1 7 7 Manufacturer#4 almond aquamarine yellow dodger mint 2 7 14 Manufacturer#4 almond aquamarine yellow dodger mint 3 7 21 Manufacturer#4 almond antique gainsboro frosted violet 1 10 24 Manufacturer#4 almond antique gainsboro frosted violet 2 10 27 Manufacturer#4 almond antique gainsboro frosted violet 3 10 30 Manufacturer#4 almond azure aquamarine papaya violet 1 12 32 Manufacturer#4 almond azure aquamarine papaya violet 2 12 34 Manufacturer#4 almond azure aquamarine papaya violet 3 12 36 Manufacturer#4 almond aquamarine floral ivory bisque 1 27 51 Manufacturer#4 almond aquamarine floral ivory bisque 2 27 66 Manufacturer#4 almond aquamarine floral ivory bisque 3 27 81 Manufacturer#4 almond antique violet mint lemon 1 39 93 Manufacturer#4 almond antique violet mint lemon 2 39 105 Manufacturer#4 almond antique violet mint lemon 3 39 117 Manufacturer#5 almond antique sky peru orange 1 2 2 Manufacturer#5 almond antique sky peru orange 2 2 4 Manufacturer#5 almond antique sky peru orange 3 2 6 Manufacturer#5 almond antique medium spring khaki 1 6 10 Manufacturer#5 almond antique medium spring khaki 2 6 14 Manufacturer#5 almond antique medium spring khaki 3 6 18 Manufacturer#5 almond azure blanched chiffon midnight 1 23 35 Manufacturer#5 almond azure blanched chiffon midnight 2 23 52 Manufacturer#5 almond azure blanched chiffon midnight 3 23 69 Manufacturer#5 almond antique blue firebrick mint 1 31 77 Manufacturer#5 almond antique blue firebrick mint 2 31 85 Manufacturer#5 almond antique blue firebrick mint 3 31 93 Manufacturer#5 almond aquamarine dodger light gainsboro 1 46 108 Manufacturer#5 almond aquamarine dodger light gainsboro 2 46 123 Manufacturer#5 almond aquamarine dodger light gainsboro 3 46 138

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: 
Failed to execute query using catalyst:
Error: execute, tree:
Exchange hashpartitioning(p_mfgr#154217, 5)
+- *(1) Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220]
   +- Generate explode([1,2,3]), [p_mfgr#154217, p_name#154216, p_size#154220], false, [lv_col#154224]
      +- Scan hive default.part [p_mfgr#154217, p_name#154216, p_size#154220], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223]

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(p_mfgr#154217, 5)
+- *(1) Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220]
   +- Generate explode([1,2,3]), [p_mfgr#154217, p_name#154216, p_size#154220], false, [lv_col#154224]
      +- Scan hive default.part [p_mfgr#154217, p_name#154216, p_size#154220], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:302)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:324)
	at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:121)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.getResult$1(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:350)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:456)
org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
org.scalatest.Transformer.apply(Transformer.scala:22)
org.scalatest.Transformer.apply(Transformer.scala:20)
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
org.scalatest.FunSuite.runTest(FunSuite.scala:1560)
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
scala.collection.immutable.List.foreach(List.scala:392)
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)

The currently active SparkContext was created at:

(No active SparkContext.)
         
	at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
	at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1492)
	at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:89)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader$lzycompute(HiveTableScanExec.scala:105)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader(HiveTableScanExec.scala:105)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188)
	at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2457)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:187)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.GenerateExec.doExecute(GenerateExec.scala:80)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 87 more

select p_mfgr, p_name,
lv_col, p_size, sum(p_size) over w1   as s
from (select p_mfgr, p_name, p_size, array(1,2,3) arr from part) p
lateral view explode(arr) part_lv as lv_col
window w1 as (distribute by p_mfgr sort by p_size, lv_col
             rows between 2 preceding and current row)
== Parsed Logical Plan ==
'WithWindowDefinition Map(w1 -> windowspecdefinition('p_mfgr, 'p_size ASC NULLS FIRST, 'lv_col ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, currentrow$())))
+- 'Project ['p_mfgr, 'p_name, 'lv_col, 'p_size, unresolvedwindowexpression('sum('p_size), WindowSpecReference(w1)) AS s#154214]
   +- 'Generate 'explode('arr), false, part_lv, ['lv_col]
      +- 'SubqueryAlias `p`
         +- 'Project ['p_mfgr, 'p_name, 'p_size, 'array(1, 2, 3) AS arr#154213]
            +- 'UnresolvedRelation `part`

== Analyzed Logical Plan ==
p_mfgr: string, p_name: string, lv_col: int, p_size: int, s: bigint
Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220, s#154214L]
+- Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220, s#154214L, s#154214L]
   +- Window [sum(cast(p_size#154220 as bigint)) windowspecdefinition(p_mfgr#154217, p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, currentrow$())) AS s#154214L], [p_mfgr#154217], [p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST]
      +- Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220]
         +- Generate explode(arr#154213), false, part_lv, [lv_col#154224]
            +- SubqueryAlias `p`
               +- Project [p_mfgr#154217, p_name#154216, p_size#154220, array(1, 2, 3) AS arr#154213]
                  +- SubqueryAlias `default`.`part`
                     +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223]

== Optimized Logical Plan ==
Window [sum(cast(p_size#154220 as bigint)) windowspecdefinition(p_mfgr#154217, p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, currentrow$())) AS s#154214L], [p_mfgr#154217], [p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST]
+- Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220]
   +- Generate explode([1,2,3]), [3], false, part_lv, [lv_col#154224]
      +- Project [p_mfgr#154217, p_name#154216, p_size#154220]
         +- HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223]

== Physical Plan ==
Window [sum(cast(p_size#154220 as bigint)) windowspecdefinition(p_mfgr#154217, p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST, specifiedwindowframe(RowFrame, -2, currentrow$())) AS s#154214L], [p_mfgr#154217], [p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST]
+- *(2) Sort [p_mfgr#154217 ASC NULLS FIRST, p_size#154220 ASC NULLS FIRST, lv_col#154224 ASC NULLS FIRST], false, 0
   +- Exchange hashpartitioning(p_mfgr#154217, 5)
      +- *(1) Project [p_mfgr#154217, p_name#154216, lv_col#154224, p_size#154220]
         +- Generate explode([1,2,3]), [p_mfgr#154217, p_name#154216, p_size#154220], false, [lv_col#154224]
            +- Scan hive default.part [p_mfgr#154217, p_name#154216, p_size#154220], HiveTableRelation `default`.`part`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [p_partkey#154215, p_name#154216, p_mfgr#154217, p_brand#154218, p_type#154219, p_size#154220, p_container#154221, p_retailprice#154222, p_comment#154223]
== HIVE - 78 row(s) ==
Manufacturer#1	almond antique burnished rose metallic	1	2	2
Manufacturer#1	almond antique burnished rose metallic	1	2	4
Manufacturer#1	almond antique burnished rose metallic	2	2	6
Manufacturer#1	almond antique burnished rose metallic	2	2	6
Manufacturer#1	almond antique burnished rose metallic	3	2	6
Manufacturer#1	almond antique burnished rose metallic	3	2	6
Manufacturer#1	almond antique salmon chartreuse burlywood	1	6	10
Manufacturer#1	almond antique salmon chartreuse burlywood	2	6	14
Manufacturer#1	almond antique salmon chartreuse burlywood	3	6	18
Manufacturer#1	almond aquamarine burnished black steel	1	28	40
Manufacturer#1	almond aquamarine burnished black steel	2	28	62
Manufacturer#1	almond aquamarine burnished black steel	3	28	84
Manufacturer#1	almond antique chartreuse lavender yellow	1	34	90
Manufacturer#1	almond antique chartreuse lavender yellow	2	34	96
Manufacturer#1	almond antique chartreuse lavender yellow	3	34	102
Manufacturer#1	almond aquamarine pink moccasin thistle	1	42	110
Manufacturer#1	almond aquamarine pink moccasin thistle	2	42	118
Manufacturer#1	almond aquamarine pink moccasin thistle	3	42	126
Manufacturer#2	almond aquamarine midnight light salmon	1	2	2
Manufacturer#2	almond aquamarine midnight light salmon	2	2	4
Manufacturer#2	almond aquamarine midnight light salmon	3	2	6
Manufacturer#2	almond antique violet chocolate turquoise	1	14	18
Manufacturer#2	almond antique violet chocolate turquoise	2	14	30
Manufacturer#2	almond antique violet chocolate turquoise	3	14	42
Manufacturer#2	almond aquamarine sandy cyan gainsboro	1	18	46
Manufacturer#2	almond aquamarine sandy cyan gainsboro	2	18	50
Manufacturer#2	almond aquamarine sandy cyan gainsboro	3	18	54
Manufacturer#2	almond aquamarine rose maroon antique	1	25	61
Manufacturer#2	almond aquamarine rose maroon antique	2	25	68
Manufacturer#2	almond aquamarine rose maroon antique	3	25	75
Manufacturer#2	almond antique violet turquoise frosted	1	40	90
Manufacturer#2	almond antique violet turquoise frosted	2	40	105
Manufacturer#2	almond antique violet turquoise frosted	3	40	120
Manufacturer#3	almond antique misty red olive	1	1	1
Manufacturer#3	almond antique misty red olive	2	1	2
Manufacturer#3	almond antique misty red olive	3	1	3
Manufacturer#3	almond antique forest lavender goldenrod	1	14	16
Manufacturer#3	almond antique forest lavender goldenrod	2	14	29
Manufacturer#3	almond antique forest lavender goldenrod	3	14	42
Manufacturer#3	almond antique chartreuse khaki white	1	17	45
Manufacturer#3	almond antique chartreuse khaki white	2	17	48
Manufacturer#3	almond antique chartreuse khaki white	3	17	51
Manufacturer#3	almond antique metallic orange dim	1	19	53
Manufacturer#3	almond antique metallic orange dim	2	19	55
Manufacturer#3	almond antique metallic orange dim	3	19	57
Manufacturer#3	almond antique olive coral navajo	1	45	83
Manufacturer#3	almond antique olive coral navajo	2	45	109
Manufacturer#3	almond antique olive coral navajo	3	45	135
Manufacturer#4	almond aquamarine yellow dodger mint	1	7	7
Manufacturer#4	almond aquamarine yellow dodger mint	2	7	14
Manufacturer#4	almond aquamarine yellow dodger mint	3	7	21
Manufacturer#4	almond antique gainsboro frosted violet	1	10	24
Manufacturer#4	almond antique gainsboro frosted violet	2	10	27
Manufacturer#4	almond antique gainsboro frosted violet	3	10	30
Manufacturer#4	almond azure aquamarine papaya violet	1	12	32
Manufacturer#4	almond azure aquamarine papaya violet	2	12	34
Manufacturer#4	almond azure aquamarine papaya violet	3	12	36
Manufacturer#4	almond aquamarine floral ivory bisque	1	27	51
Manufacturer#4	almond aquamarine floral ivory bisque	2	27	66
Manufacturer#4	almond aquamarine floral ivory bisque	3	27	81
Manufacturer#4	almond antique violet mint lemon	1	39	93
Manufacturer#4	almond antique violet mint lemon	2	39	105
Manufacturer#4	almond antique violet mint lemon	3	39	117
Manufacturer#5	almond antique sky peru orange	1	2	2
Manufacturer#5	almond antique sky peru orange	2	2	4
Manufacturer#5	almond antique sky peru orange	3	2	6
Manufacturer#5	almond antique medium spring khaki	1	6	10
Manufacturer#5	almond antique medium spring khaki	2	6	14
Manufacturer#5	almond antique medium spring khaki	3	6	18
Manufacturer#5	almond azure blanched chiffon midnight	1	23	35
Manufacturer#5	almond azure blanched chiffon midnight	2	23	52
Manufacturer#5	almond azure blanched chiffon midnight	3	23	69
Manufacturer#5	almond antique blue firebrick mint	1	31	77
Manufacturer#5	almond antique blue firebrick mint	2	31	85
Manufacturer#5	almond antique blue firebrick mint	3	31	93
Manufacturer#5	almond aquamarine dodger light gainsboro	1	46	108
Manufacturer#5	almond aquamarine dodger light gainsboro	2	46	123
Manufacturer#5	almond aquamarine dodger light gainsboro	3	46	138
                
	at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528)
	at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
	at org.scalatest.Assertions$class.fail(Assertions.scala:1089)
	at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:363)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)