&#010;Exception thrown while executing query:&#010;== Parsed Logical Plan ==&#010;Sort [a#130340L ASC], true&#010;+- Relation[a#130340L,b#130341] JSONRelation&#010;&#010;== Analyzed Logical Plan ==&#010;a: bigint, b: string&#010;Sort [a#130340L ASC], true&#010;+- Relation[a#130340L,b#130341] JSONRelation&#010;&#010;== Optimized Logical Plan ==&#010;Sort [a#130340L ASC], true&#010;+- Relation[a#130340L,b#130341] JSONRelation&#010;&#010;== Physical Plan ==&#010;Sort [a#130340L ASC], true, 0&#010;+- ConvertToUnsafe&#010; +- Exchange rangepartitioning(a#130340L ASC,2), None&#010; +- ConvertToSafe&#010; +- Scan JSONRelation[a#130340L,b#130341] InputPaths: file:/home/jenkins/workspace/spark-branch-1.6-test-maven-pre-yarn-1.2.1/sql/hive/target/tmp/spark-068da416-c2da-4b00-bede-247340bf0893&#010;== Exception ==&#010;org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:&#010;Exchange rangepartitioning(a#130340L ASC,2), None&#010;+- ConvertToSafe&#010; +- Scan JSONRelation[a#130340L,b#130341] InputPaths: file:/home/jenkins/workspace/spark-branch-1.6-test-maven-pre-yarn-1.2.1/sql/hive/target/tmp/spark-068da416-c2da-4b00-bede-247340bf0893&#010;&#010;org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:&#010;Exchange rangepartitioning(a#130340L ASC,2), None&#010;+- ConvertToSafe&#010; +- Scan JSONRelation[a#130340L,b#130341] InputPaths: file:/home/jenkins/workspace/spark-branch-1.6-test-maven-pre-yarn-1.2.1/sql/hive/target/tmp/spark-068da416-c2da-4b00-bede-247340bf0893&#010;&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)&#010; at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:247)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)&#010; at org.apache.spark.sql.execution.ConvertToUnsafe.doExecute(rowFormatConverters.scala:38)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)&#010; at org.apache.spark.sql.execution.Sort.doExecute(Sort.scala:64)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:166)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:174)&#010; at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$execute$1$1.apply(DataFrame.scala:1499)&#010; at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$execute$1$1.apply(DataFrame.scala:1499)&#010; at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:56)&#010; at org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:2086)&#010; at org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$execute$1(DataFrame.scala:1498)&#010; at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$collect$1.apply(DataFrame.scala:1503)&#010; at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$collect$1.apply(DataFrame.scala:1503)&#010; at org.apache.spark.sql.DataFrame.withCallback(DataFrame.scala:2099)&#010; at org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$collect(DataFrame.scala:1503)&#010; at org.apache.spark.sql.DataFrame.collect(DataFrame.scala:1480)&#010; at org.apache.spark.sql.QueryTest$.checkAnswer(QueryTest.scala:313)&#010; at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:133)&#010; at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2$$anonfun$apply$mcV$sp$22.apply(hadoopFsRelationSuites.scala:177)&#010; at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2$$anonfun$apply$mcV$sp$22.apply(hadoopFsRelationSuites.scala:173)&#010; at org.apache.spark.sql.test.SQLTestUtils$class.withTempPath(SQLTestUtils.scala:125)&#010; at org.apache.spark.sql.sources.HadoopFsRelationTest.withTempPath(hadoopFsRelationSuites.scala:37)&#010; at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply$mcV$sp(hadoopFsRelationSuites.scala:173)&#010; at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply(hadoopFsRelationSuites.scala:173)&#010; at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply(hadoopFsRelationSuites.scala:173)&#010; at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)&#010; at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)&#010; at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)&#010; at org.scalatest.Transformer.apply(Transformer.scala:22)&#010; at org.scalatest.Transformer.apply(Transformer.scala:20)&#010; at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)&#010; at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42)&#010; at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)&#010; at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)&#010; at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)&#010; at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)&#010; at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)&#010; at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)&#010; at scala.collection.immutable.List.foreach(List.scala:318)&#010; at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)&#010; at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)&#010; at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)&#010; at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)&#010; at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)&#010; at org.scalatest.Suite$class.run(Suite.scala:1424)&#010; at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)&#010; at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)&#010; at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)&#010; at org.scalatest.SuperEngine.runImpl(Engine.scala:545)&#010; at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)&#010; at org.apache.spark.sql.sources.HadoopFsRelationTest.org$scalatest$BeforeAndAfterAll$$super$run(hadoopFsRelationSuites.scala:37)&#010; at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)&#010; at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)&#010; at org.apache.spark.sql.sources.HadoopFsRelationTest.run(hadoopFsRelationSuites.scala:37)&#010; at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)&#010; at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)&#010; at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)&#010; at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)&#010; at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108)&#010; at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)&#010; at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)&#010; at org.scalatest.Suite$class.run(Suite.scala:1421)&#010; at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)&#010; at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)&#010; at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)&#010; at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)&#010; at scala.collection.immutable.List.foreach(List.scala:318)&#010; at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)&#010; at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)&#010; at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)&#010; at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)&#010; at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)&#010; at org.scalatest.tools.Runner$.main(Runner.scala:860)&#010; at org.scalatest.tools.Runner.main(Runner.scala)&#010;Caused by: org.apache.spark.SparkException: Job 5532 cancelled because SparkContext was shut down&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1.apply(DAGScheduler.scala:806)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1.apply(DAGScheduler.scala:804)&#010; at scala.collection.mutable.HashSet.foreach(HashSet.scala:79)&#010; at org.apache.spark.scheduler.DAGScheduler.cleanUpAfterSchedulerStop(DAGScheduler.scala:804)&#010; at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onStop(DAGScheduler.scala:1658)&#010; at org.apache.spark.util.EventLoop.stop(EventLoop.scala:84)&#010; at org.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:1581)&#010; at org.apache.spark.SparkContext$$anonfun$stop$9.apply$mcV$sp(SparkContext.scala:1740)&#010; at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1229)&#010; at org.apache.spark.SparkContext.stop(SparkContext.scala:1739)&#010; at org.apache.spark.SparkContext$$anonfun$3.apply$mcV$sp(SparkContext.scala:596)&#010; at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:267)&#010; at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:239)&#010; at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:239)&#010; at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:239)&#010; at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1765)&#010; at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:239)&#010; at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:239)&#010; at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:239)&#010; at scala.util.Try$.apply(Try.scala:161)&#010; at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:239)&#010; at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:218)&#010; at java.lang.Thread.run(Thread.java:745)&#010; at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:620)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:1832)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)&#010; at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:927)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)&#010; at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)&#010; at org.apache.spark.rdd.RDD.collect(RDD.scala:926)&#010; at org.apache.spark.RangePartitioner$.sketch(Partitioner.scala:264)&#010; at org.apache.spark.RangePartitioner.<init>(Partitioner.scala:126)&#010; at org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:179)&#010; at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:254)&#010; at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:248)&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)&#010; ... 89 more&#010;&#010;


      org.scalatest.exceptions.TestFailedException: 
Exception thrown while executing query:
== Parsed Logical Plan ==
Sort [a#130340L ASC], true
+- Relation[a#130340L,b#130341] JSONRelation

== Analyzed Logical Plan ==
a: bigint, b: string
Sort [a#130340L ASC], true
+- Relation[a#130340L,b#130341] JSONRelation

== Optimized Logical Plan ==
Sort [a#130340L ASC], true
+- Relation[a#130340L,b#130341] JSONRelation

== Physical Plan ==
Sort [a#130340L ASC], true, 0
+- ConvertToUnsafe
   +- Exchange rangepartitioning(a#130340L ASC,2), None
      +- ConvertToSafe
         +- Scan JSONRelation[a#130340L,b#130341] InputPaths: file:/home/jenkins/workspace/spark-branch-1.6-test-maven-pre-yarn-1.2.1/sql/hive/target/tmp/spark-068da416-c2da-4b00-bede-247340bf0893
== Exception ==
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange rangepartitioning(a#130340L ASC,2), None
+- ConvertToSafe
   +- Scan JSONRelation[a#130340L,b#130341] InputPaths: file:/home/jenkins/workspace/spark-branch-1.6-test-maven-pre-yarn-1.2.1/sql/hive/target/tmp/spark-068da416-c2da-4b00-bede-247340bf0893

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange rangepartitioning(a#130340L ASC,2), None
+- ConvertToSafe
   +- Scan JSONRelation[a#130340L,b#130341] InputPaths: file:/home/jenkins/workspace/spark-branch-1.6-test-maven-pre-yarn-1.2.1/sql/hive/target/tmp/spark-068da416-c2da-4b00-bede-247340bf0893

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
	at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:247)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
	at org.apache.spark.sql.execution.ConvertToUnsafe.doExecute(rowFormatConverters.scala:38)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
	at org.apache.spark.sql.execution.Sort.doExecute(Sort.scala:64)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:166)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:174)
	at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$execute$1$1.apply(DataFrame.scala:1499)
	at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$execute$1$1.apply(DataFrame.scala:1499)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:56)
	at org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:2086)
	at org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$execute$1(DataFrame.scala:1498)
	at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$collect$1.apply(DataFrame.scala:1503)
	at org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$collect$1.apply(DataFrame.scala:1503)
	at org.apache.spark.sql.DataFrame.withCallback(DataFrame.scala:2099)
	at org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$collect(DataFrame.scala:1503)
	at org.apache.spark.sql.DataFrame.collect(DataFrame.scala:1480)
	at org.apache.spark.sql.QueryTest$.checkAnswer(QueryTest.scala:313)
	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:133)
	at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2$$anonfun$apply$mcV$sp$22.apply(hadoopFsRelationSuites.scala:177)
	at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2$$anonfun$apply$mcV$sp$22.apply(hadoopFsRelationSuites.scala:173)
	at org.apache.spark.sql.test.SQLTestUtils$class.withTempPath(SQLTestUtils.scala:125)
	at org.apache.spark.sql.sources.HadoopFsRelationTest.withTempPath(hadoopFsRelationSuites.scala:37)
	at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply$mcV$sp(hadoopFsRelationSuites.scala:173)
	at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply(hadoopFsRelationSuites.scala:173)
	at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply(hadoopFsRelationSuites.scala:173)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.spark.sql.sources.HadoopFsRelationTest.org$scalatest$BeforeAndAfterAll$$super$run(hadoopFsRelationSuites.scala:37)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.spark.sql.sources.HadoopFsRelationTest.run(hadoopFsRelationSuites.scala:37)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: org.apache.spark.SparkException: Job 5532 cancelled because SparkContext was shut down
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1.apply(DAGScheduler.scala:806)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1.apply(DAGScheduler.scala:804)
	at scala.collection.mutable.HashSet.foreach(HashSet.scala:79)
	at org.apache.spark.scheduler.DAGScheduler.cleanUpAfterSchedulerStop(DAGScheduler.scala:804)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onStop(DAGScheduler.scala:1658)
	at org.apache.spark.util.EventLoop.stop(EventLoop.scala:84)
	at org.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:1581)
	at org.apache.spark.SparkContext$$anonfun$stop$9.apply$mcV$sp(SparkContext.scala:1740)
	at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1229)
	at org.apache.spark.SparkContext.stop(SparkContext.scala:1739)
	at org.apache.spark.SparkContext$$anonfun$3.apply$mcV$sp(SparkContext.scala:596)
	at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:267)
	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:239)
	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:239)
	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:239)
	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1765)
	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:239)
	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:239)
	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:239)
	at scala.util.Try$.apply(Try.scala:161)
	at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:239)
	at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:218)
	at java.lang.Thread.run(Thread.java:745)
	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:620)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1832)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)
	at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:927)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
	at org.apache.spark.rdd.RDD.collect(RDD.scala:926)
	at org.apache.spark.RangePartitioner$.sketch(Partitioner.scala:264)
	at org.apache.spark.RangePartitioner.<init>(Partitioner.scala:126)
	at org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:179)
	at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:254)
	at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:248)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
	... 89 more

          
      at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
      at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
      at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:134)
      at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2$$anonfun$apply$mcV$sp$22.apply(hadoopFsRelationSuites.scala:177)
      at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2$$anonfun$apply$mcV$sp$22.apply(hadoopFsRelationSuites.scala:173)
      at org.apache.spark.sql.test.SQLTestUtils$class.withTempPath(SQLTestUtils.scala:125)
      at org.apache.spark.sql.sources.HadoopFsRelationTest.withTempPath(hadoopFsRelationSuites.scala:37)
      at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply$mcV$sp(hadoopFsRelationSuites.scala:173)
      at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply(hadoopFsRelationSuites.scala:173)
      at org.apache.spark.sql.sources.HadoopFsRelationTest$$anonfun$2.apply(hadoopFsRelationSuites.scala:173)
      at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
      at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42)
      at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
      at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
      at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
      at scala.collection.immutable.List.foreach(List.scala:318)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
      at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
      at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
      at org.scalatest.Suite$class.run(Suite.scala:1424)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
      at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
      at org.apache.spark.sql.sources.HadoopFsRelationTest.org$scalatest$BeforeAndAfterAll$$super$run(hadoopFsRelationSuites.scala:37)
      at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
      at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
      at org.apache.spark.sql.sources.HadoopFsRelationTest.run(hadoopFsRelationSuites.scala:37)
      at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108)
      at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
      at org.scalatest.Suite$class.run(Suite.scala:1421)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
      at scala.collection.immutable.List.foreach(List.scala:318)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
      at org.scalatest.tools.Runner$.main(Runner.scala:860)
      at org.scalatest.tools.Runner.main(Runner.scala)