org.scalatest.exceptions.TestFailedException: Failed to execute query using catalyst: Error: execute, tree: Exchange SinglePartition +- *(3) Project [s#153536, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542] +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531] +- *(2) Sort [i#153531 ASC NULLS FIRST], false, 0 +- Exchange hashpartitioning(i#153531, 5) +- *(1) Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533] +- *(1) Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck)) +- Scan hive default.over1k [f#153533, i#153531, s#153536], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539] org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree: Exchange SinglePartition +- *(3) Project [s#153536, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542] +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531] +- *(2) Sort [i#153531 ASC NULLS FIRST], false, 0 +- Exchange hashpartitioning(i#153531, 5) +- *(1) Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533] +- *(1) Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck)) +- Scan hive default.over1k [f#153533, i#153531, s#153536], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539] at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:302) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247) at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294) at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:324) at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:121) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348) at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.getResult$1(HiveComparisonTest.scala:348) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:350) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at scala.collection.AbstractTraversable.map(Traversable.scala:104) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462) at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at org.scalatest.Transformer.apply(Transformer.scala:22) at org.scalatest.Transformer.apply(Transformer.scala:20) at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103) at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) at scala.collection.immutable.List.foreach(List.scala:392) at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461) at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at org.scalatest.Suite$class.run(Suite.scala:1147) at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) at org.scalatest.SuperEngine.runImpl(Engine.scala:521) at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52) at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213) at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258) at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35) at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree: Exchange hashpartitioning(i#153531, 5) +- *(1) Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533] +- *(1) Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck)) +- Scan hive default.over1k [f#153533, i#153531, s#153536], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539] at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:302) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) ... 96 more Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext. This stopped SparkContext was created at: org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:456) org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) org.scalatest.Transformer.apply(Transformer.scala:22) org.scalatest.Transformer.apply(Transformer.scala:20) org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103) org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) org.scalatest.SuperEngine.runTestImpl(Engine.scala:289) org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) org.scalatest.FunSuite.runTest(FunSuite.scala:1560) org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396) org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384) scala.collection.immutable.List.foreach(List.scala:392) org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384) org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379) The currently active SparkContext was created at: (No active SparkContext.) at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100) at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1492) at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:89) at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader$lzycompute(HiveTableScanExec.scala:105) at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader(HiveTableScanExec.scala:105) at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188) at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188) at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2457) at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:187) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374) at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:121) at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) ... 132 more select s, sum(f) over (partition by i), row_number() over (order by f) from over1k where s = 'tom allen' or s = 'bob steinbeck' == Parsed Logical Plan == 'Project ['s, unresolvedalias('sum('f) windowspecdefinition('i, unspecifiedframe$()), None), unresolvedalias('row_number() windowspecdefinition('f ASC NULLS FIRST, unspecifiedframe$()), None)] +- 'Filter (('s = tom allen) || ('s = bob steinbeck)) +- 'UnresolvedRelation `over1k` == Analyzed Logical Plan == s: string, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING): double, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW): int Project [s#153536, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541] +- Project [s#153536, _w0#153543, i#153531, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541] +- Window [row_number() windowspecdefinition(f#153533 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541], [f#153533 ASC NULLS FIRST] +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531] +- Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533] +- Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck)) +- SubqueryAlias `default`.`over1k` +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539] == Optimized Logical Plan == Project [s#153536, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541] +- Window [row_number() windowspecdefinition(f#153533 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541], [f#153533 ASC NULLS FIRST] +- Project [s#153536, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542] +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531] +- Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533] +- Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck)) +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539] == Physical Plan == *(5) Project [s#153536, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541] +- Window [row_number() windowspecdefinition(f#153533 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541], [f#153533 ASC NULLS FIRST] +- *(4) Sort [f#153533 ASC NULLS FIRST], false, 0 +- Exchange SinglePartition +- *(3) Project [s#153536, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542] +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531] +- *(2) Sort [i#153531 ASC NULLS FIRST], false, 0 +- Exchange hashpartitioning(i#153531, 5) +- *(1) Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533] +- *(1) Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck)) +- Scan hive default.over1k [f#153533, i#153531, s#153536], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539] == HIVE - 1 row(s) == bob steinbeck 9.699999809265137 1

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: 
Failed to execute query using catalyst:
Error: execute, tree:
Exchange SinglePartition
+- *(3) Project [s#153536, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542]
   +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531]
      +- *(2) Sort [i#153531 ASC NULLS FIRST], false, 0
         +- Exchange hashpartitioning(i#153531, 5)
            +- *(1) Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533]
               +- *(1) Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck))
                  +- Scan hive default.over1k [f#153533, i#153531, s#153536], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539]

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange SinglePartition
+- *(3) Project [s#153536, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542]
   +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531]
      +- *(2) Sort [i#153531 ASC NULLS FIRST], false, 0
         +- Exchange hashpartitioning(i#153531, 5)
            +- *(1) Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533]
               +- *(1) Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck))
                  +- Scan hive default.over1k [f#153533, i#153531, s#153536], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:302)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:294)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:324)
	at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:121)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25$$anonfun$getResult$1$1.apply(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.getResult$1(HiveComparisonTest.scala:348)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:350)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(i#153531, 5)
+- *(1) Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533]
   +- *(1) Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck))
      +- Scan hive default.over1k [f#153533, i#153531, s#153536], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:302)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 96 more
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:456)
org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
org.scalatest.Transformer.apply(Transformer.scala:22)
org.scalatest.Transformer.apply(Transformer.scala:20)
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
org.scalatest.FunSuite.runTest(FunSuite.scala:1560)
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
scala.collection.immutable.List.foreach(List.scala:392)
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)

The currently active SparkContext was created at:

(No active SparkContext.)
         
	at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:100)
	at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1492)
	at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:89)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader$lzycompute(HiveTableScanExec.scala:105)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.org$apache$spark$sql$hive$execution$HiveTableScanExec$$hadoopReader(HiveTableScanExec.scala:105)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:188)
	at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2457)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:187)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:374)
	at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:121)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:41)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:610)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 132 more

select s, sum(f) over (partition by i), row_number() over (order by f)
from over1k where s = 'tom allen' or s = 'bob steinbeck'
== Parsed Logical Plan ==
'Project ['s, unresolvedalias('sum('f) windowspecdefinition('i, unspecifiedframe$()), None), unresolvedalias('row_number() windowspecdefinition('f ASC NULLS FIRST, unspecifiedframe$()), None)]
+- 'Filter (('s = tom allen) || ('s = bob steinbeck))
   +- 'UnresolvedRelation `over1k`

== Analyzed Logical Plan ==
s: string, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING): double, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW): int
Project [s#153536, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541]
+- Project [s#153536, _w0#153543, i#153531, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541]
   +- Window [row_number() windowspecdefinition(f#153533 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541], [f#153533 ASC NULLS FIRST]
      +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531]
         +- Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533]
            +- Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck))
               +- SubqueryAlias `default`.`over1k`
                  +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539]

== Optimized Logical Plan ==
Project [s#153536, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541]
+- Window [row_number() windowspecdefinition(f#153533 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541], [f#153533 ASC NULLS FIRST]
   +- Project [s#153536, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542]
      +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531]
         +- Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533]
            +- Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck))
               +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539]

== Physical Plan ==
*(5) Project [s#153536, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542, row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541]
+- Window [row_number() windowspecdefinition(f#153533 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS row_number() OVER (ORDER BY f ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)#153541], [f#153533 ASC NULLS FIRST]
   +- *(4) Sort [f#153533 ASC NULLS FIRST], false, 0
      +- Exchange SinglePartition
         +- *(3) Project [s#153536, f#153533, sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542]
            +- Window [sum(_w0#153543) windowspecdefinition(i#153531, specifiedwindowframe(RowFrame, unboundedpreceding$(), unboundedfollowing$())) AS sum(CAST(f AS DOUBLE)) OVER (PARTITION BY i ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)#153542], [i#153531]
               +- *(2) Sort [i#153531 ASC NULLS FIRST], false, 0
                  +- Exchange hashpartitioning(i#153531, 5)
                     +- *(1) Project [s#153536, cast(f#153533 as double) AS _w0#153543, i#153531, f#153533]
                        +- *(1) Filter ((s#153536 = tom allen) || (s#153536 = bob steinbeck))
                           +- Scan hive default.over1k [f#153533, i#153531, s#153536], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#153529, si#153530, i#153531, b#153532L, f#153533, d#153534, bo#153535, s#153536, ts#153537, dec#153538, bin#153539]
== HIVE - 1 row(s) ==
bob steinbeck	9.699999809265137	1
                
	at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528)
	at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
	at org.scalatest.Assertions$class.fail(Assertions.scala:1089)
	at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:363)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6$$anonfun$25.apply(HiveComparisonTest.scala:345)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.doTest$1(HiveComparisonTest.scala:345)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$6.apply(HiveComparisonTest.scala:462)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:103)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:203)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:52)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)