Failed to execute query using catalyst:
Error: Exception thrown in awaitResult: 
org.apache.spark.SparkException: Exception thrown in awaitResult: 
 at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:194)
 at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:102)
 at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:229)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
 at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:120)
 at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:98)
 at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenInner(BroadcastHashJoinExec.scala:197)
 at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:82)
 at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
 at org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:79)
 at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:194)
 at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
 at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:218)
 at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:244)
 at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
 at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
 at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
 at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:218)
 at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:113)
 at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
 at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
 at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
 at org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:79)
 at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:77)
 at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
 at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
 at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
 at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:38)
 at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)
 at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
 at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
 at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
 at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:304)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
 at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.sideEffectResult$lzycompute(InsertIntoHiveTable.scala:228)
 at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.sideEffectResult(InsertIntoHiveTable.scala:141)
 at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.executeCollect(InsertIntoHiveTable.scala:300)
 at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:306)
 at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:126)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:392)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)
 at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
 at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
 at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
 at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
 at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
 at scala.collection.AbstractTraversable.map(Traversable.scala:104)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
 at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
 at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
 at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
 at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
 at org.scalatest.Transformer.apply(Transformer.scala:22)
 at org.scalatest.Transformer.apply(Transformer.scala:20)
 at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
 at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)
 at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
 at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
 at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
 at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
 at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
 at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)
 at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
 at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)
 at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
 at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
 at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
 at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
 at scala.collection.immutable.List.foreach(List.scala:381)
 at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
 at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
 at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
 at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
 at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
 at org.scalatest.Suite$class.run(Suite.scala:1424)
 at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
 at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
 at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
 at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
 at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
 at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)
 at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
 at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
 at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)
 at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
 at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)
 at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
 at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
 at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
 at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
 at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
 at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
 at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
 at org.scalatest.Suite$class.run(Suite.scala:1421)
 at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
 at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
 at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
 at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
 at scala.collection.immutable.List.foreach(List.scala:381)
 at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
 at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
 at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
 at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
 at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
 at org.scalatest.tools.Runner$.main(Runner.scala:860)
 at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: java.lang.RuntimeException: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)
 at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2332)
 at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2185)
 at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2102)
 at org.apache.hadoop.conf.Configuration.set(Configuration.java:979)
 at org.apache.hadoop.conf.Configuration.set(Configuration.java:953)
 at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:517)
 at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:535)
 at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:429)
 at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:2681)
 at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:2641)
 at org.apache.hadoop.hive.ql.session.SessionState.getSessionConf(SessionState.java:833)
 at org.apache.hadoop.hive.ql.metadata.Partition.getDeserializer(Partition.java:250)
 at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:133)
 at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:132)
 at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
 at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
 at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
 at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
 at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
 at scala.collection.AbstractTraversable.map(Traversable.scala:104)
 at org.apache.spark.sql.hive.HadoopTableReader.makeRDDForPartitionedTable(TableReader.scala:132)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)
 at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2210)
 at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:149)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
 at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
 at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:109)
 at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
 at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
 at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
 at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
 at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
 at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:236)
 at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:283)
 at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:74)
 at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:71)
 at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:91)
 at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)
 at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)
 at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
 at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
 at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
 at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
 at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)
 at java.util.zip.ZipFile.open(Native Method)
 at java.util.zip.ZipFile.<init>(ZipFile.java:219)
 at java.util.zip.ZipFile.<init>(ZipFile.java:149)
 at java.util.jar.JarFile.<init>(JarFile.java:166)
 at java.util.jar.JarFile.<init>(JarFile.java:103)
 at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93)
 at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69)
 at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:99)
 at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)
 at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:150)
 at java.net.URL.openStream(URL.java:1038)
 at org.apache.hadoop.conf.Configuration.parse(Configuration.java:2161)
 at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2232)
 ... 51 more

insert overwrite table tst partition(d='2009-01-01')
select tst.a, src.value from tst join src ON (tst.a = src.key)
== Parsed Logical Plan ==
'InsertIntoTable 'UnresolvedRelation `tst`, None, Map(d -> Some(2009-01-01)), true, false
+- 'Project ['tst.a,'src.value]
 +- 'Join Inner, Some(('tst.a = 'src.key))
 :- 'UnresolvedRelation `tst`, None
 +- 'UnresolvedRelation `src`, None

== Analyzed Logical Plan ==

InsertIntoHiveTable MetastoreRelation default, tst, None, Map(d -> Some(2009-01-01)), true, false
+- Project [a#82402,value#82405]
 +- Join Inner, Some((cast(a#82402 as double) = cast(key#82404 as double)))
 :- MetastoreRelation default, tst, None
 +- MetastoreRelation default, src, None

== Optimized Logical Plan ==
InsertIntoHiveTable MetastoreRelation default, tst, None, Map(d -> Some(2009-01-01)), true, false
+- Project [a#82402,value#82405]
 +- Join Inner, Some((cast(a#82402 as double) = cast(key#82404 as double)))
 :- Project [a#82402]
 : +- Filter isnotnull(a#82402)
 : +- MetastoreRelation default, tst, None
 +- Filter isnotnull(key#82404)
 +- InMemoryRelation [key#82404,value#82405], true, 5, StorageLevel(disk=true, memory=true, offheap=false, deserialized=true, replication=1), HiveTableScan [key#82388,value#82389], MetastoreRelation default, src, None, Some(src)

== Physical Plan ==
InsertIntoHiveTable MetastoreRelation default, tst, None, Map(d -> Some(2009-01-01)), true, false
+- WholeStageCodegen
 : +- Project [a#82402,value#82405]
 : +- BroadcastHashJoin [cast(a#82402 as double)], [cast(key#82404 as double)], Inner, BuildLeft, None
 : :- INPUT
 : +- Filter isnotnull(key#82404)
 : +- INPUT
 :- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, string] as double)))
 : +- WholeStageCodegen
 : : +- Filter isnotnull(a#82402)
 : : +- INPUT
 : +- HiveTableScan [a#82402], MetastoreRelation default, tst, None
 +- InMemoryTableScan [key#82404,value#82405], [isnotnull(key#82404)], InMemoryRelation [key#82404,value#82405], true, 5, StorageLevel(disk=true, memory=true, offheap=false, deserialized=true, replication=1), HiveTableScan [key#82388,value#82389], MetastoreRelation default, src, None, Some(src)
== HIVE - 0 row(s) ==


org.scalatest.exceptions.TestFailedException:
Failed to execute query using catalyst:
Error: Exception thrown in awaitResult:
org.apache.spark.SparkException: Exception thrown in awaitResult:
at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:194)
at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:102)
at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:229)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:120)
at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:98)
at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenInner(BroadcastHashJoinExec.scala:197)
at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:82)
at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
at org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:79)
at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:194)
at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:218)
at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:244)
at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:218)
at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:113)
at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
at org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:79)
at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:77)
at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:38)
at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)
at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:304)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.sideEffectResult$lzycompute(InsertIntoHiveTable.scala:228)
at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.sideEffectResult(InsertIntoHiveTable.scala:141)
at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.executeCollect(InsertIntoHiveTable.scala:300)
at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:306)
at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:126)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:392)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)
at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)
at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)
at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
at org.scalatest.Suite$class.run(Suite.scala:1421)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
at org.scalatest.tools.Runner$.main(Runner.scala:860)
at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: java.lang.RuntimeException: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)
at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2332)
at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2185)
at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2102)
at org.apache.hadoop.conf.Configuration.set(Configuration.java:979)
at org.apache.hadoop.conf.Configuration.set(Configuration.java:953)
at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:517)
at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:535)
at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:429)
at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:2681)
at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:2641)
at org.apache.hadoop.hive.ql.session.SessionState.getSessionConf(SessionState.java:833)
at org.apache.hadoop.hive.ql.metadata.Partition.getDeserializer(Partition.java:250)
at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:133)
at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:132)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at org.apache.spark.sql.hive.HadoopTableReader.makeRDDForPartitionedTable(TableReader.scala:132)
at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)
at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)
at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2210)
at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:149)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:109)
at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:236)
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:283)
at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:74)
at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:71)
at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:91)
at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)
at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)
at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)
at java.util.zip.ZipFile.open(Native Method)
at java.util.zip.ZipFile.<init>(ZipFile.java:219)
at java.util.zip.ZipFile.<init>(ZipFile.java:149)
at java.util.jar.JarFile.<init>(JarFile.java:166)
at java.util.jar.JarFile.<init>(JarFile.java:103)
at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93)
at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69)
at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:99)
at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)
at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:150)
at java.net.URL.openStream(URL.java:1038)
at org.apache.hadoop.conf.Configuration.parse(Configuration.java:2161)
at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2232)
... 51 more
insert overwrite table tst partition(d='2009-01-01')
select tst.a, src.value from tst join src ON (tst.a = src.key)
== Parsed Logical Plan ==
'InsertIntoTable 'UnresolvedRelation `tst`, None, Map(d -> Some(2009-01-01)), true, false
+- 'Project ['tst.a,'src.value]
+- 'Join Inner, Some(('tst.a = 'src.key))
:- 'UnresolvedRelation `tst`, None
+- 'UnresolvedRelation `src`, None
== Analyzed Logical Plan ==
InsertIntoHiveTable MetastoreRelation default, tst, None, Map(d -> Some(2009-01-01)), true, false
+- Project [a#82402,value#82405]
+- Join Inner, Some((cast(a#82402 as double) = cast(key#82404 as double)))
:- MetastoreRelation default, tst, None
+- MetastoreRelation default, src, None
== Optimized Logical Plan ==
InsertIntoHiveTable MetastoreRelation default, tst, None, Map(d -> Some(2009-01-01)), true, false
+- Project [a#82402,value#82405]
+- Join Inner, Some((cast(a#82402 as double) = cast(key#82404 as double)))
:- Project [a#82402]
: +- Filter isnotnull(a#82402)
: +- MetastoreRelation default, tst, None
+- Filter isnotnull(key#82404)
+- InMemoryRelation [key#82404,value#82405], true, 5, StorageLevel(disk=true, memory=true, offheap=false, deserialized=true, replication=1), HiveTableScan [key#82388,value#82389], MetastoreRelation default, src, None, Some(src)
== Physical Plan ==
InsertIntoHiveTable MetastoreRelation default, tst, None, Map(d -> Some(2009-01-01)), true, false
+- WholeStageCodegen
: +- Project [a#82402,value#82405]
: +- BroadcastHashJoin [cast(a#82402 as double)], [cast(key#82404 as double)], Inner, BuildLeft, None
: :- INPUT
: +- Filter isnotnull(key#82404)
: +- INPUT
:- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, string] as double)))
: +- WholeStageCodegen
: : +- Filter isnotnull(a#82402)
: : +- INPUT
: +- HiveTableScan [a#82402], MetastoreRelation default, tst, None
+- InMemoryTableScan [key#82404,value#82405], [isnotnull(key#82404)], InMemoryRelation [key#82404,value#82405], true, 5, StorageLevel(disk=true, memory=true, offheap=false, deserialized=true, replication=1), HiveTableScan [key#82388,value#82389], MetastoreRelation default, src, None, Some(src)
== HIVE - 0 row(s) ==
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:406)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)
at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)
at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)
at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
at org.scalatest.Suite$class.run(Suite.scala:1421)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
at scala.collection.immutable.List.foreach(List.scala:381)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
at org.scalatest.tools.Runner$.main(Runner.scala:860)
at org.scalatest.tools.Runner.main(Runner.scala)