&#010;Failed to execute query using catalyst:&#010;Error: execute, tree:&#010;Exchange rangepartitioning(ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST, 5), true, [id=#151854]&#010;+- *(4) Filter (isnotnull(dec#204299) AND (dec#204299 = 89.50))&#010; +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]&#010; +- *(3) Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST], false, 0&#010; +- Exchange hashpartitioning(ts#204298, 5), true, [id=#151846]&#010; +- *(2) Project [ts#204298, dec#204299]&#010; +- *(2) BroadcastHashJoin [b#204293L], [b#204304L], Inner, BuildRight&#010; :- *(2) Filter isnotnull(b#204293L)&#010; : +- Scan hive default.over1k [b#204293L, ts#204298, dec#204299], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]&#010; +- BroadcastExchange HashedRelationBroadcastMode(List(input[0, bigint, false])), [id=#151841]&#010; +- *(1) Filter isnotnull(b#204304L)&#010; +- Scan hive default.over1k [b#204304L], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]&#010;&#010;org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:&#010;Exchange rangepartitioning(ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST, 5), true, [id=#151854]&#010;+- *(4) Filter (isnotnull(dec#204299) AND (dec#204299 = 89.50))&#010; +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]&#010; +- *(3) Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST], false, 0&#010; +- Exchange hashpartitioning(ts#204298, 5), true, [id=#151846]&#010; +- *(2) Project [ts#204298, dec#204299]&#010; +- *(2) BroadcastHashJoin [b#204293L], [b#204304L], Inner, BuildRight&#010; :- *(2) Filter isnotnull(b#204293L)&#010; : +- Scan hive default.over1k [b#204293L, ts#204298, dec#204299], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]&#010; +- BroadcastExchange HashedRelationBroadcastMode(List(input[0, bigint, false])), [id=#151841]&#010; +- *(1) Filter isnotnull(b#204304L)&#010; +- Scan hive default.over1k [b#204304L], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]&#010;&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:408)&#010; at org.apache.spark.sql.execution.HiveResult$.hiveResultString(HiveResult.scala:52)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$32(HiveComparisonTest.scala:351)&#010; at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)&#010; at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)&#010; at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.getResult$1(HiveComparisonTest.scala:351)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:353)&#010; at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)&#010; at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)&#010; at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)&#010; at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)&#010; at scala.collection.TraversableLike.map(TraversableLike.scala:237)&#010; at scala.collection.TraversableLike.map$(TraversableLike.scala:230)&#010; at scala.collection.AbstractTraversable.map(Traversable.scala:108)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)&#010; at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)&#010; at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)&#010; at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)&#010; at org.scalatest.Transformer.apply(Transformer.scala:22)&#010; at org.scalatest.Transformer.apply(Transformer.scala:20)&#010; at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)&#010; at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)&#010; at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)&#010; at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)&#010; at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)&#010; at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)&#010; at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)&#010; at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)&#010; at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)&#010; at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)&#010; at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)&#010; at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)&#010; at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)&#010; at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)&#010; at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)&#010; at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)&#010; at scala.collection.immutable.List.foreach(List.scala:392)&#010; at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)&#010; at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)&#010; at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)&#010; at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)&#010; at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)&#010; at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)&#010; at org.scalatest.Suite.run(Suite.scala:1147)&#010; at org.scalatest.Suite.run$(Suite.scala:1129)&#010; at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)&#010; at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)&#010; at org.scalatest.SuperEngine.runImpl(Engine.scala:521)&#010; at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)&#010; at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)&#010; at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)&#010; at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)&#010; at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)&#010; at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)&#010; at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)&#010; at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)&#010; at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)&#010; at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)&#010; at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)&#010; at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)&#010; at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)&#010; at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)&#010; at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)&#010; at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)&#010; at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)&#010; at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)&#010; at org.scalatest.Suite.run(Suite.scala:1144)&#010; at org.scalatest.Suite.run$(Suite.scala:1129)&#010; at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)&#010; at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)&#010; at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)&#010; at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)&#010; at scala.collection.immutable.List.foreach(List.scala:392)&#010; at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)&#010; at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)&#010; at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)&#010; at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)&#010; at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)&#010; at org.scalatest.tools.Runner$.main(Runner.scala:827)&#010; at org.scalatest.tools.Runner.main(Runner.scala)&#010;Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:&#010;Exchange hashpartitioning(ts#204298, 5), true, [id=#151846]&#010;+- *(2) Project [ts#204298, dec#204299]&#010; +- *(2) BroadcastHashJoin [b#204293L], [b#204304L], Inner, BuildRight&#010; :- *(2) Filter isnotnull(b#204293L)&#010; : +- Scan hive default.over1k [b#204293L, ts#204298, dec#204299], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]&#010; +- BroadcastExchange HashedRelationBroadcastMode(List(input[0, bigint, false])), [id=#151841]&#010; +- *(1) Filter isnotnull(b#204304L)&#010; +- Scan hive default.over1k [b#204304L], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]&#010;&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:132)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)&#010; ... 101 more&#010;Caused by: java.util.concurrent.ExecutionException: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.&#010;This stopped SparkContext was created at:&#010;&#010;org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)&#010;sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)&#010;sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)&#010;sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)&#010;java.lang.reflect.Constructor.newInstance(Constructor.java:423)&#010;java.lang.Class.newInstance(Class.java:442)&#010;org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)&#010;org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)&#010;scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)&#010;scala.collection.Iterator.foreach(Iterator.scala:941)&#010;scala.collection.Iterator.foreach$(Iterator.scala:941)&#010;scala.collection.AbstractIterator.foreach(Iterator.scala:1429)&#010;scala.collection.IterableLike.foreach(IterableLike.scala:74)&#010;scala.collection.IterableLike.foreach$(IterableLike.scala:73)&#010;scala.collection.AbstractIterable.foreach(Iterable.scala:56)&#010;scala.collection.TraversableLike.map(TraversableLike.scala:237)&#010;scala.collection.TraversableLike.map$(TraversableLike.scala:230)&#010;scala.collection.AbstractTraversable.map(Traversable.scala:108)&#010;org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)&#010;org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)&#010;&#010;The currently active SparkContext was created at:&#010;&#010;(No active SparkContext.)&#010; &#010; at java.util.concurrent.FutureTask.report(FutureTask.java:122)&#010; at java.util.concurrent.FutureTask.get(FutureTask.java:206)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:167)&#010; at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:514)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeBroadcast$1(SparkPlan.scala:202)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:198)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:116)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenInner(BroadcastHashJoinExec.scala:210)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:100)&#010; at org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:193)&#010; at org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:148)&#010; at org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:96)&#010; at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:217)&#010; at org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:193)&#010; at org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:148)&#010; at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.doProduce(WholeStageCodegenExec.scala:482)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.doProduce$(WholeStageCodegenExec.scala:455)&#010; at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:94)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:89)&#010; at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:89)&#010; at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:136)&#010; at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:94)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:89)&#010; at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:89)&#010; at org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:96)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:95)&#010; at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:94)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:89)&#010; at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:89)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:39)&#010; at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:49)&#010; at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:94)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:89)&#010; at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:89)&#010; at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:39)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:629)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:689)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)&#010; ... 142 more&#010;Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.&#010;This stopped SparkContext was created at:&#010;&#010;org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)&#010;sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)&#010;sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)&#010;sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)&#010;java.lang.reflect.Constructor.newInstance(Constructor.java:423)&#010;java.lang.Class.newInstance(Class.java:442)&#010;org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)&#010;org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)&#010;scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)&#010;scala.collection.Iterator.foreach(Iterator.scala:941)&#010;scala.collection.Iterator.foreach$(Iterator.scala:941)&#010;scala.collection.AbstractIterator.foreach(Iterator.scala:1429)&#010;scala.collection.IterableLike.foreach(IterableLike.scala:74)&#010;scala.collection.IterableLike.foreach$(IterableLike.scala:73)&#010;scala.collection.AbstractIterable.foreach(Iterable.scala:56)&#010;scala.collection.TraversableLike.map(TraversableLike.scala:237)&#010;scala.collection.TraversableLike.map$(TraversableLike.scala:230)&#010;scala.collection.AbstractTraversable.map(Traversable.scala:108)&#010;org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)&#010;org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)&#010;&#010;The currently active SparkContext was created at:&#010;&#010;(No active SparkContext.)&#010; &#010; at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:109)&#010; at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1466)&#010; at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:90)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:110)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:105)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$1(HiveTableScanExec.scala:188)&#010; at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2488)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:188)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)&#010; at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)&#010; at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:132)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)&#010; at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)&#010; at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollectIterator(SparkPlan.scala:388)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anon$1.$anonfun$call$1(BroadcastExchangeExec.scala:89)&#010; at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withExecutionId$1(SQLExecution.scala:136)&#010; at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)&#010; at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:134)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anon$1.call(BroadcastExchangeExec.scala:83)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anon$1.call(BroadcastExchangeExec.scala:78)&#010; at java.util.concurrent.FutureTask.run(FutureTask.java:266)&#010; at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)&#010; at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)&#010; at java.lang.Thread.run(Thread.java:748)&#010;&#010;select ts, dec, rnk&#010;from&#010; (select ts, dec,&#010; rank() over (partition by ts order by dec) as rnk&#010; from&#010; (select other.ts, other.dec&#010; from over1k other&#010; join over1k on (other.b = over1k.b)&#010; ) joined&#010; ) ranked&#010;where dec = 89.5&#010;order by ts, dec, rnk&#010;== Parsed Logical Plan ==&#010;'Sort ['ts ASC NULLS FIRST, 'dec ASC NULLS FIRST, 'rnk ASC NULLS FIRST], true&#010;+- 'Project ['ts, 'dec, 'rnk]&#010; +- 'Filter ('dec = 89.5)&#010; +- 'SubqueryAlias `ranked`&#010; +- 'Project ['ts, 'dec, 'rank() windowspecdefinition('ts, 'dec ASC NULLS FIRST, unspecifiedframe$()) AS rnk#204287]&#010; +- 'SubqueryAlias `joined`&#010; +- 'Project ['other.ts, 'other.dec]&#010; +- 'Join Inner, ('other.b = 'over1k.b)&#010; :- 'SubqueryAlias `other`&#010; : +- 'UnresolvedRelation [over1k]&#010; +- 'UnresolvedRelation [over1k]&#010;&#010;== Analyzed Logical Plan ==&#010;ts: timestamp, dec: decimal(4,2), rnk: int&#010;Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST], true&#010;+- Project [ts#204298, dec#204299, rnk#204287]&#010; +- Filter (cast(dec#204299 as decimal(4,2)) = cast(89.5 as decimal(4,2)))&#010; +- SubqueryAlias `ranked`&#010; +- Project [ts#204298, dec#204299, rnk#204287]&#010; +- Project [ts#204298, dec#204299, rnk#204287, rnk#204287]&#010; +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]&#010; +- Project [ts#204298, dec#204299]&#010; +- SubqueryAlias `joined`&#010; +- Project [ts#204298, dec#204299]&#010; +- Join Inner, (b#204293L = b#204304L)&#010; :- SubqueryAlias `other`&#010; : +- SubqueryAlias `default`.`over1k`&#010; : +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]&#010; +- SubqueryAlias `default`.`over1k`&#010; +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]&#010;&#010;== Optimized Logical Plan ==&#010;Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST], true&#010;+- Filter (isnotnull(dec#204299) AND (dec#204299 = 89.50))&#010; +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]&#010; +- Project [ts#204298, dec#204299]&#010; +- Join Inner, (b#204293L = b#204304L)&#010; :- Project [b#204293L, ts#204298, dec#204299]&#010; : +- Filter isnotnull(b#204293L)&#010; : +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]&#010; +- Project [b#204304L]&#010; +- Filter isnotnull(b#204304L)&#010; +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]&#010;&#010;== Physical Plan ==&#010;*(5) Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST], true, 0&#010;+- Exchange rangepartitioning(ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST, 5), true, [id=#151854]&#010; +- *(4) Filter (isnotnull(dec#204299) AND (dec#204299 = 89.50))&#010; +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]&#010; +- *(3) Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST], false, 0&#010; +- Exchange hashpartitioning(ts#204298, 5), true, [id=#151846]&#010; +- *(2) Project [ts#204298, dec#204299]&#010; +- *(2) BroadcastHashJoin [b#204293L], [b#204304L], Inner, BuildRight&#010; :- *(2) Filter isnotnull(b#204293L)&#010; : +- Scan hive default.over1k [b#204293L, ts#204298, dec#204299], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]&#010; +- BroadcastExchange HashedRelationBroadcastMode(List(input[0, bigint, false])), [id=#151841]&#010; +- *(1) Filter isnotnull(b#204304L)&#010; +- Scan hive default.over1k [b#204304L], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]&#010;&#010;== HIVE - 0 row(s) ==&#010;&#010;


      org.scalatest.exceptions.TestFailedException: 
Failed to execute query using catalyst:
Error: execute, tree:
Exchange rangepartitioning(ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST, 5), true, [id=#151854]
+- *(4) Filter (isnotnull(dec#204299) AND (dec#204299 = 89.50))
   +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]
      +- *(3) Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST], false, 0
         +- Exchange hashpartitioning(ts#204298, 5), true, [id=#151846]
            +- *(2) Project [ts#204298, dec#204299]
               +- *(2) BroadcastHashJoin [b#204293L], [b#204304L], Inner, BuildRight
                  :- *(2) Filter isnotnull(b#204293L)
                  :  +- Scan hive default.over1k [b#204293L, ts#204298, dec#204299], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]
                  +- BroadcastExchange HashedRelationBroadcastMode(List(input[0, bigint, false])), [id=#151841]
                     +- *(1) Filter isnotnull(b#204304L)
                        +- Scan hive default.over1k [b#204304L], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange rangepartitioning(ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST, 5), true, [id=#151854]
+- *(4) Filter (isnotnull(dec#204299) AND (dec#204299 = 89.50))
   +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]
      +- *(3) Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST], false, 0
         +- Exchange hashpartitioning(ts#204298, 5), true, [id=#151846]
            +- *(2) Project [ts#204298, dec#204299]
               +- *(2) BroadcastHashJoin [b#204293L], [b#204304L], Inner, BuildRight
                  :- *(2) Filter isnotnull(b#204293L)
                  :  +- Scan hive default.over1k [b#204293L, ts#204298, dec#204299], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]
                  +- BroadcastExchange HashedRelationBroadcastMode(List(input[0, bigint, false])), [id=#151841]
                     +- *(1) Filter isnotnull(b#204304L)
                        +- Scan hive default.over1k [b#204304L], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:408)
	at org.apache.spark.sql.execution.HiveResult$.hiveResultString(HiveResult.scala:52)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$32(HiveComparisonTest.scala:351)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.getResult$1(HiveComparisonTest.scala:351)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:353)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
	at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
	at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
	at scala.collection.TraversableLike.map(TraversableLike.scala:237)
	at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
	at scala.collection.AbstractTraversable.map(Traversable.scala:108)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
	at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
	at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
	at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite.run(Suite.scala:1147)
	at org.scalatest.Suite.run$(Suite.scala:1129)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
	at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
	at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
	at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
	at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
	at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
	at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
	at org.scalatest.Suite.run(Suite.scala:1144)
	at org.scalatest.Suite.run$(Suite.scala:1129)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
	at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
	at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
	at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
	at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
	at org.scalatest.tools.Runner$.main(Runner.scala:827)
	at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange hashpartitioning(ts#204298, 5), true, [id=#151846]
+- *(2) Project [ts#204298, dec#204299]
   +- *(2) BroadcastHashJoin [b#204293L], [b#204304L], Inner, BuildRight
      :- *(2) Filter isnotnull(b#204293L)
      :  +- Scan hive default.over1k [b#204293L, ts#204298, dec#204299], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]
      +- BroadcastExchange HashedRelationBroadcastMode(List(input[0, bigint, false])), [id=#151841]
         +- *(1) Filter isnotnull(b#204304L)
            +- Scan hive default.over1k [b#204304L], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:90)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:124)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.window.WindowExec.doExecute(WindowExec.scala:115)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:132)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 101 more
Caused by: java.util.concurrent.ExecutionException: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
scala.collection.Iterator.foreach(Iterator.scala:941)
scala.collection.Iterator.foreach$(Iterator.scala:941)
scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
scala.collection.IterableLike.foreach(IterableLike.scala:74)
scala.collection.IterableLike.foreach$(IterableLike.scala:73)
scala.collection.AbstractIterable.foreach(Iterable.scala:56)
scala.collection.TraversableLike.map(TraversableLike.scala:237)
scala.collection.TraversableLike.map$(TraversableLike.scala:230)
scala.collection.AbstractTraversable.map(Traversable.scala:108)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)

The currently active SparkContext was created at:

(No active SparkContext.)
         
	at java.util.concurrent.FutureTask.report(FutureTask.java:122)
	at java.util.concurrent.FutureTask.get(FutureTask.java:206)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:167)
	at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:514)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeBroadcast$1(SparkPlan.scala:202)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:198)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:116)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenInner(BroadcastHashJoinExec.scala:210)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:100)
	at org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:193)
	at org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:148)
	at org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:96)
	at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:217)
	at org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:193)
	at org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:148)
	at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.InputRDDCodegen.doProduce(WholeStageCodegenExec.scala:482)
	at org.apache.spark.sql.execution.InputRDDCodegen.doProduce$(WholeStageCodegenExec.scala:455)
	at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:94)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:89)
	at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:89)
	at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:136)
	at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:94)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:89)
	at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:89)
	at org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:96)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:95)
	at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:94)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:89)
	at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:89)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:39)
	at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:49)
	at org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:94)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:89)
	at org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:89)
	at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:39)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:629)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:689)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:64)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:64)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:74)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:72)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.createShuffledRDD(ShuffleExchangeExec.scala:82)
	at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.$anonfun$doExecute$1(ShuffleExchangeExec.scala:93)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
	... 142 more
Caused by: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.hive.StatisticsSuite.<init>(StatisticsSuite.scala:46)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
java.lang.Class.newInstance(Class.java:442)
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:66)
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
scala.collection.Iterator.foreach(Iterator.scala:941)
scala.collection.Iterator.foreach$(Iterator.scala:941)
scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
scala.collection.IterableLike.foreach(IterableLike.scala:74)
scala.collection.IterableLike.foreach$(IterableLike.scala:73)
scala.collection.AbstractIterable.foreach(Iterable.scala:56)
scala.collection.TraversableLike.map(TraversableLike.scala:237)
scala.collection.TraversableLike.map$(TraversableLike.scala:230)
scala.collection.AbstractTraversable.map(Traversable.scala:108)
org.scalatest.tools.DiscoverySuite.<init>(DiscoverySuite.scala:37)
org.scalatest.tools.Runner$.genDiscoSuites$1(Runner.scala:1165)

The currently active SparkContext was created at:

(No active SparkContext.)
         
	at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:109)
	at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1466)
	at org.apache.spark.sql.hive.HadoopTableReader.<init>(TableReader.scala:90)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:110)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:105)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$1(HiveTableScanExec.scala:188)
	at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2488)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:188)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:524)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:452)
	at org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:451)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:495)
	at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:132)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:717)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectIterator(SparkPlan.scala:388)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anon$1.$anonfun$call$1(BroadcastExchangeExec.scala:89)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withExecutionId$1(SQLExecution.scala:136)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:134)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anon$1.call(BroadcastExchangeExec.scala:83)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anon$1.call(BroadcastExchangeExec.scala:78)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

select ts, dec, rnk
from
  (select ts, dec,
          rank() over (partition by ts order by dec)  as rnk
          from
            (select other.ts, other.dec
             from over1k other
             join over1k on (other.b = over1k.b)
            ) joined
  ) ranked
where dec = 89.5
order by ts, dec, rnk
== Parsed Logical Plan ==
'Sort ['ts ASC NULLS FIRST, 'dec ASC NULLS FIRST, 'rnk ASC NULLS FIRST], true
+- 'Project ['ts, 'dec, 'rnk]
   +- 'Filter ('dec = 89.5)
      +- 'SubqueryAlias `ranked`
         +- 'Project ['ts, 'dec, 'rank() windowspecdefinition('ts, 'dec ASC NULLS FIRST, unspecifiedframe$()) AS rnk#204287]
            +- 'SubqueryAlias `joined`
               +- 'Project ['other.ts, 'other.dec]
                  +- 'Join Inner, ('other.b = 'over1k.b)
                     :- 'SubqueryAlias `other`
                     :  +- 'UnresolvedRelation [over1k]
                     +- 'UnresolvedRelation [over1k]

== Analyzed Logical Plan ==
ts: timestamp, dec: decimal(4,2), rnk: int
Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST], true
+- Project [ts#204298, dec#204299, rnk#204287]
   +- Filter (cast(dec#204299 as decimal(4,2)) = cast(89.5 as decimal(4,2)))
      +- SubqueryAlias `ranked`
         +- Project [ts#204298, dec#204299, rnk#204287]
            +- Project [ts#204298, dec#204299, rnk#204287, rnk#204287]
               +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]
                  +- Project [ts#204298, dec#204299]
                     +- SubqueryAlias `joined`
                        +- Project [ts#204298, dec#204299]
                           +- Join Inner, (b#204293L = b#204304L)
                              :- SubqueryAlias `other`
                              :  +- SubqueryAlias `default`.`over1k`
                              :     +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]
                              +- SubqueryAlias `default`.`over1k`
                                 +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]

== Optimized Logical Plan ==
Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST], true
+- Filter (isnotnull(dec#204299) AND (dec#204299 = 89.50))
   +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]
      +- Project [ts#204298, dec#204299]
         +- Join Inner, (b#204293L = b#204304L)
            :- Project [b#204293L, ts#204298, dec#204299]
            :  +- Filter isnotnull(b#204293L)
            :     +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]
            +- Project [b#204304L]
               +- Filter isnotnull(b#204304L)
                  +- HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]

== Physical Plan ==
*(5) Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST], true, 0
+- Exchange rangepartitioning(ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST, rnk#204287 ASC NULLS FIRST, 5), true, [id=#151854]
   +- *(4) Filter (isnotnull(dec#204299) AND (dec#204299 = 89.50))
      +- Window [rank(dec#204299) windowspecdefinition(ts#204298, dec#204299 ASC NULLS FIRST, specifiedwindowframe(RowFrame, unboundedpreceding$(), currentrow$())) AS rnk#204287], [ts#204298], [dec#204299 ASC NULLS FIRST]
         +- *(3) Sort [ts#204298 ASC NULLS FIRST, dec#204299 ASC NULLS FIRST], false, 0
            +- Exchange hashpartitioning(ts#204298, 5), true, [id=#151846]
               +- *(2) Project [ts#204298, dec#204299]
                  +- *(2) BroadcastHashJoin [b#204293L], [b#204304L], Inner, BuildRight
                     :- *(2) Filter isnotnull(b#204293L)
                     :  +- Scan hive default.over1k [b#204293L, ts#204298, dec#204299], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204290, si#204291, i#204292, b#204293L, f#204294, d#204295, bo#204296, s#204297, ts#204298, dec#204299, bin#204300]
                     +- BroadcastExchange HashedRelationBroadcastMode(List(input[0, bigint, false])), [id=#151841]
                        +- *(1) Filter isnotnull(b#204304L)
                           +- Scan hive default.over1k [b#204304L], HiveTableRelation `default`.`over1k`, org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, [t#204301, si#204302, i#204303, b#204304L, f#204305, d#204306, bo#204307, s#204308, ts#204309, dec#204310, bin#204311]

== HIVE - 0 row(s) ==

                
      at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
      at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
      at org.scalatest.Assertions.fail(Assertions.scala:1089)
      at org.scalatest.Assertions.fail$(Assertions.scala:1085)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$31(HiveComparisonTest.scala:366)
      at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
      at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
      at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
      at scala.collection.TraversableLike.map(TraversableLike.scala:237)
      at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
      at scala.collection.AbstractTraversable.map(Traversable.scala:108)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest.doTest$1(HiveComparisonTest.scala:347)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest.$anonfun$createQueryTest$10(HiveComparisonTest.scala:467)
      at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
      at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
      at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
      at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
      at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
      at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
      at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
      at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveWindowFunctionQuerySuite.scala:35)
      at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
      at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
      at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.runTest(HiveWindowFunctionQuerySuite.scala:35)
      at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
      at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
      at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
      at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
      at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
      at org.scalatest.Suite.run(Suite.scala:1147)
      at org.scalatest.Suite.run$(Suite.scala:1129)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
      at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
      at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
      at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
      at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
      at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
      at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
      at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveWindowFunctionQuerySuite.scala:35)
      at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
      at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
      at org.apache.spark.sql.hive.execution.HiveWindowFunctionQuerySuite.run(HiveWindowFunctionQuerySuite.scala:35)
      at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
      at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
      at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
      at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
      at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
      at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
      at org.scalatest.Suite.run(Suite.scala:1144)
      at org.scalatest.Suite.run$(Suite.scala:1129)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
      at org.scalatest.tools.Runner$.main(Runner.scala:827)
      at org.scalatest.tools.Runner.main(Runner.scala)