&#010;Failed to execute query using catalyst:&#010;Error: Exception thrown in awaitResult: &#010;org.apache.spark.SparkException: Exception thrown in awaitResult: &#010; at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:194)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:102)&#010; at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:229)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:120)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:98)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:242)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)&#010; at org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:79)&#010; at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:194)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)&#010; at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:218)&#010; at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:244)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:218)&#010; at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:113)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:79)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:77)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:38)&#010; at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:304)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:472)&#010; at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:472)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.immutable.List.foreach(List.scala:381)&#010; at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)&#010; at scala.collection.immutable.List.map(List.scala:285)&#010; at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:472)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.sideEffectResult$lzycompute(InsertIntoHiveTable.scala:228)&#010; at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.sideEffectResult(InsertIntoHiveTable.scala:141)&#010; at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.executeCollect(InsertIntoHiveTable.scala:300)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:306)&#010; at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:126)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:392)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)&#010; at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)&#010; at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)&#010; at scala.collection.AbstractTraversable.map(Traversable.scala:104)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)&#010; at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)&#010; at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)&#010; at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)&#010; at org.scalatest.Transformer.apply(Transformer.scala:22)&#010; at org.scalatest.Transformer.apply(Transformer.scala:20)&#010; at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)&#010; at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)&#010; at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)&#010; at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)&#010; at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)&#010; at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)&#010; at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)&#010; at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)&#010; at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)&#010; at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)&#010; at scala.collection.immutable.List.foreach(List.scala:381)&#010; at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)&#010; at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)&#010; at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)&#010; at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)&#010; at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)&#010; at org.scalatest.Suite$class.run(Suite.scala:1424)&#010; at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)&#010; at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)&#010; at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)&#010; at org.scalatest.SuperEngine.runImpl(Engine.scala:545)&#010; at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)&#010; at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)&#010; at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)&#010; at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)&#010; at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)&#010; at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)&#010; at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)&#010; at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)&#010; at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)&#010; at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)&#010; at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)&#010; at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)&#010; at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)&#010; at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)&#010; at org.scalatest.Suite$class.run(Suite.scala:1421)&#010; at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)&#010; at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)&#010; at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)&#010; at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)&#010; at scala.collection.immutable.List.foreach(List.scala:381)&#010; at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)&#010; at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)&#010; at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)&#010; at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)&#010; at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)&#010; at org.scalatest.tools.Runner$.main(Runner.scala:860)&#010; at org.scalatest.tools.Runner.main(Runner.scala)&#010;Caused by: java.lang.RuntimeException: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)&#010; at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2332)&#010; at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2185)&#010; at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2102)&#010; at org.apache.hadoop.conf.Configuration.set(Configuration.java:979)&#010; at org.apache.hadoop.conf.Configuration.set(Configuration.java:953)&#010; at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:517)&#010; at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:535)&#010; at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:429)&#010; at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:2681)&#010; at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:2641)&#010; at org.apache.hadoop.hive.ql.session.SessionState.getSessionConf(SessionState.java:833)&#010; at org.apache.hadoop.hive.ql.metadata.Partition.getDeserializer(Partition.java:250)&#010; at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:133)&#010; at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:132)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)&#010; at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)&#010; at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)&#010; at scala.collection.AbstractTraversable.map(Traversable.scala:104)&#010; at org.apache.spark.sql.hive.HadoopTableReader.makeRDDForPartitionedTable(TableReader.scala:132)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)&#010; at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2210)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:149)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)&#010; at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:109)&#010; at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:36)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:236)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:283)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:74)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:71)&#010; at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:91)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)&#010; at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)&#010; at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)&#010; at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)&#010; at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)&#010; at java.lang.Thread.run(Thread.java:745)&#010;Caused by: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)&#010; at java.util.zip.ZipFile.open(Native Method)&#010; at java.util.zip.ZipFile.<init>(ZipFile.java:219)&#010; at java.util.zip.ZipFile.<init>(ZipFile.java:149)&#010; at java.util.jar.JarFile.<init>(JarFile.java:166)&#010; at java.util.jar.JarFile.<init>(JarFile.java:103)&#010; at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93)&#010; at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69)&#010; at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:99)&#010; at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)&#010; at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:150)&#010; at java.net.URL.openStream(URL.java:1038)&#010; at org.apache.hadoop.conf.Configuration.parse(Configuration.java:2161)&#010; at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2232)&#010; ... 52 more&#010;&#010;insert overwrite table dst_union22 partition (ds='2')&#010;select * from&#010;(&#010;select k1 as k1, k2 as k2, k3 as k3, k4 as k4 from dst_union22_delta where ds = '1' and k0 <= 50&#010;union all&#010;select a.k1 as k1, a.k2 as k2, b.k3 as k3, b.k4 as k4&#010;from dst_union22 a left outer join (select * from dst_union22_delta where ds = '1' and k0 > 50) b on&#010;a.k1 = b.k1 and a.ds='1'&#010;where a.k1 > 20&#010;)&#010;subq&#010;== Parsed Logical Plan ==&#010;'InsertIntoTable 'UnresolvedRelation `dst_union22`, None, Map(ds -> Some(2)), true, false&#010;+- 'Project [*]&#010; +- 'SubqueryAlias subq&#010; +- 'Union&#010; :- 'Project ['k1 AS k1#125546,'k2 AS k2#125547,'k3 AS k3#125548,'k4 AS k4#125549]&#010; : +- 'Filter (('ds = 1) && ('k0 <= 50))&#010; : +- 'UnresolvedRelation `dst_union22_delta`, None&#010; +- 'Project ['a.k1 AS k1#125550,'a.k2 AS k2#125551,'b.k3 AS k3#125552,'b.k4 AS k4#125553]&#010; +- 'Filter ('a.k1 > 20)&#010; +- 'Join LeftOuter, Some((('a.k1 = 'b.k1) && ('a.ds = 1)))&#010; :- 'UnresolvedRelation `dst_union22`, Some(a)&#010; +- 'SubqueryAlias b&#010; +- 'Project [*]&#010; +- 'Filter (('ds = 1) && ('k0 > 50))&#010; +- 'UnresolvedRelation `dst_union22_delta`, None&#010;&#010;== Analyzed Logical Plan ==&#010;&#010;InsertIntoHiveTable MetastoreRelation default, dst_union22, None, Map(ds -> Some(2)), true, false&#010;+- Project [k1#125546,k2#125547,k3#125548,k4#125549]&#010; +- SubqueryAlias subq&#010; +- Union&#010; :- Project [k1#125556 AS k1#125546,k2#125557 AS k2#125547,k3#125558 AS k3#125548,k4#125559 AS k4#125549]&#010; : +- Filter ((ds#125554 = 1) && (cast(k0#125555 as double) <= cast(50 as double)))&#010; : +- MetastoreRelation default, dst_union22_delta, None&#010; +- Project [k1#125562 AS k1#125550,k2#125563 AS k2#125551,k3#125570 AS k3#125552,k4#125571 AS k4#125553]&#010; +- Filter (cast(k1#125562 as double) > cast(20 as double))&#010; +- Join LeftOuter, Some(((k1#125562 = k1#125568) && (ds#125561 = 1)))&#010; :- MetastoreRelation default, dst_union22, Some(a)&#010; +- SubqueryAlias b&#010; +- Project [k0#125567,k1#125568,k2#125569,k3#125570,k4#125571,k5#125572,ds#125566]&#010; +- Filter ((ds#125566 = 1) && (cast(k0#125567 as double) > cast(50 as double)))&#010; +- MetastoreRelation default, dst_union22_delta, None&#010;&#010;== Optimized Logical Plan ==&#010;InsertIntoHiveTable MetastoreRelation default, dst_union22, None, Map(ds -> Some(2)), true, false&#010;+- Union&#010; :- Project [k1#125556 AS k1#125546,k2#125557 AS k2#125547,k3#125558 AS k3#125548,k4#125559 AS k4#125549]&#010; : +- Filter (((isnotnull(ds#125554) && isnotnull(k0#125555)) && (ds#125554 = 1)) && (cast(k0#125555 as double) <= 50.0))&#010; : +- MetastoreRelation default, dst_union22_delta, None&#010; +- Project [k1#125562 AS k1#125550,k2#125563 AS k2#125551,k3#125570 AS k3#125552,k4#125571 AS k4#125553]&#010; +- Join LeftOuter, Some(((ds#125561 = 1) && (k1#125562 = k1#125568)))&#010; :- Project [k1#125562,k2#125563,ds#125561]&#010; : +- Filter (isnotnull(k1#125562) && (cast(k1#125562 as double) > 20.0))&#010; : +- MetastoreRelation default, dst_union22, Some(a)&#010; +- Project [k1#125568,k3#125570,k4#125571]&#010; +- Filter (((isnotnull(ds#125566) && isnotnull(k0#125567)) && (ds#125566 = 1)) && (cast(k0#125567 as double) > 50.0))&#010; +- MetastoreRelation default, dst_union22_delta, None&#010;&#010;== Physical Plan ==&#010;InsertIntoHiveTable MetastoreRelation default, dst_union22, None, Map(ds -> Some(2)), true, false&#010;+- Union&#010; :- WholeStageCodegen&#010; : : +- Project [k1#125556 AS k1#125546,k2#125557 AS k2#125547,k3#125558 AS k3#125548,k4#125559 AS k4#125549]&#010; : : +- Filter (isnotnull(k0#125555) && (cast(k0#125555 as double) <= 50.0))&#010; : : +- INPUT&#010; : +- HiveTableScan [k0#125555,k4#125559,k3#125558,k2#125557,k1#125556], MetastoreRelation default, dst_union22_delta, None, [isnotnull(ds#125554),(ds#125554 = 1)]&#010; +- WholeStageCodegen&#010; : +- Project [k1#125562 AS k1#125550,k2#125563 AS k2#125551,k3#125570 AS k3#125552,k4#125571 AS k4#125553]&#010; : +- BroadcastHashJoin [ds#125561,k1#125562], [1,k1#125568], LeftOuter, BuildRight, None&#010; : :- Filter (isnotnull(k1#125562) && (cast(k1#125562 as double) > 20.0))&#010; : : +- INPUT&#010; : +- INPUT&#010; :- HiveTableScan [k1#125562,k2#125563,ds#125561], MetastoreRelation default, dst_union22, Some(a)&#010; +- BroadcastExchange HashedRelationBroadcastMode(List(1, input[0, string]))&#010; +- WholeStageCodegen&#010; : +- Project [k1#125568,k3#125570,k4#125571]&#010; : +- Filter (isnotnull(k0#125567) && (cast(k0#125567 as double) > 50.0))&#010; : +- INPUT&#010; +- HiveTableScan [k1#125568,k3#125570,k4#125571,k0#125567], MetastoreRelation default, dst_union22_delta, None, [isnotnull(ds#125566),(ds#125566 = 1)]&#010;== HIVE - 0 row(s) ==&#010;&#010;


      org.scalatest.exceptions.TestFailedException: 
Failed to execute query using catalyst:
Error: Exception thrown in awaitResult: 
org.apache.spark.SparkException: Exception thrown in awaitResult: 
	at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:194)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:102)
	at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:229)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:120)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:98)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:242)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
	at org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:79)
	at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:194)
	at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
	at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:218)
	at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:244)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:218)
	at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:113)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:79)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:77)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:38)
	at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:304)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:472)
	at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:472)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.immutable.List.map(List.scala:285)
	at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:472)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.sideEffectResult$lzycompute(InsertIntoHiveTable.scala:228)
	at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.sideEffectResult(InsertIntoHiveTable.scala:141)
	at org.apache.spark.sql.hive.execution.InsertIntoHiveTable.executeCollect(InsertIntoHiveTable.scala:300)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:306)
	at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:126)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:392)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)
	at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
	at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)
	at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
	at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: java.lang.RuntimeException: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)
	at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2332)
	at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2185)
	at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2102)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:979)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:953)
	at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:517)
	at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:535)
	at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:429)
	at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:2681)
	at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:2641)
	at org.apache.hadoop.hive.ql.session.SessionState.getSessionConf(SessionState.java:833)
	at org.apache.hadoop.hive.ql.metadata.Partition.getDeserializer(Partition.java:250)
	at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:133)
	at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:132)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.HadoopTableReader.makeRDDForPartitionedTable(TableReader.scala:132)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)
	at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2210)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:149)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
	at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:109)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:36)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:236)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:283)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:74)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:71)
	at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:91)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)
	at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
	at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)
	at java.util.zip.ZipFile.open(Native Method)
	at java.util.zip.ZipFile.<init>(ZipFile.java:219)
	at java.util.zip.ZipFile.<init>(ZipFile.java:149)
	at java.util.jar.JarFile.<init>(JarFile.java:166)
	at java.util.jar.JarFile.<init>(JarFile.java:103)
	at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93)
	at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69)
	at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:99)
	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)
	at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:150)
	at java.net.URL.openStream(URL.java:1038)
	at org.apache.hadoop.conf.Configuration.parse(Configuration.java:2161)
	at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2232)
	... 52 more

insert overwrite table dst_union22 partition (ds='2')
select * from
(
select k1 as k1, k2 as k2, k3 as k3, k4 as k4 from dst_union22_delta where ds = '1' and k0 <= 50
union all
select a.k1 as k1, a.k2 as k2, b.k3 as k3, b.k4 as k4
from dst_union22 a left outer join (select * from dst_union22_delta where ds = '1' and k0 > 50) b on
a.k1 = b.k1 and a.ds='1'
where a.k1 > 20
)
subq
== Parsed Logical Plan ==
'InsertIntoTable 'UnresolvedRelation `dst_union22`, None, Map(ds -> Some(2)), true, false
+- 'Project [*]
   +- 'SubqueryAlias subq
      +- 'Union
         :- 'Project ['k1 AS k1#125546,'k2 AS k2#125547,'k3 AS k3#125548,'k4 AS k4#125549]
         :  +- 'Filter (('ds = 1) && ('k0 <= 50))
         :     +- 'UnresolvedRelation `dst_union22_delta`, None
         +- 'Project ['a.k1 AS k1#125550,'a.k2 AS k2#125551,'b.k3 AS k3#125552,'b.k4 AS k4#125553]
            +- 'Filter ('a.k1 > 20)
               +- 'Join LeftOuter, Some((('a.k1 = 'b.k1) && ('a.ds = 1)))
                  :- 'UnresolvedRelation `dst_union22`, Some(a)
                  +- 'SubqueryAlias b
                     +- 'Project [*]
                        +- 'Filter (('ds = 1) && ('k0 > 50))
                           +- 'UnresolvedRelation `dst_union22_delta`, None

== Analyzed Logical Plan ==

InsertIntoHiveTable MetastoreRelation default, dst_union22, None, Map(ds -> Some(2)), true, false
+- Project [k1#125546,k2#125547,k3#125548,k4#125549]
   +- SubqueryAlias subq
      +- Union
         :- Project [k1#125556 AS k1#125546,k2#125557 AS k2#125547,k3#125558 AS k3#125548,k4#125559 AS k4#125549]
         :  +- Filter ((ds#125554 = 1) && (cast(k0#125555 as double) <= cast(50 as double)))
         :     +- MetastoreRelation default, dst_union22_delta, None
         +- Project [k1#125562 AS k1#125550,k2#125563 AS k2#125551,k3#125570 AS k3#125552,k4#125571 AS k4#125553]
            +- Filter (cast(k1#125562 as double) > cast(20 as double))
               +- Join LeftOuter, Some(((k1#125562 = k1#125568) && (ds#125561 = 1)))
                  :- MetastoreRelation default, dst_union22, Some(a)
                  +- SubqueryAlias b
                     +- Project [k0#125567,k1#125568,k2#125569,k3#125570,k4#125571,k5#125572,ds#125566]
                        +- Filter ((ds#125566 = 1) && (cast(k0#125567 as double) > cast(50 as double)))
                           +- MetastoreRelation default, dst_union22_delta, None

== Optimized Logical Plan ==
InsertIntoHiveTable MetastoreRelation default, dst_union22, None, Map(ds -> Some(2)), true, false
+- Union
   :- Project [k1#125556 AS k1#125546,k2#125557 AS k2#125547,k3#125558 AS k3#125548,k4#125559 AS k4#125549]
   :  +- Filter (((isnotnull(ds#125554) && isnotnull(k0#125555)) && (ds#125554 = 1)) && (cast(k0#125555 as double) <= 50.0))
   :     +- MetastoreRelation default, dst_union22_delta, None
   +- Project [k1#125562 AS k1#125550,k2#125563 AS k2#125551,k3#125570 AS k3#125552,k4#125571 AS k4#125553]
      +- Join LeftOuter, Some(((ds#125561 = 1) && (k1#125562 = k1#125568)))
         :- Project [k1#125562,k2#125563,ds#125561]
         :  +- Filter (isnotnull(k1#125562) && (cast(k1#125562 as double) > 20.0))
         :     +- MetastoreRelation default, dst_union22, Some(a)
         +- Project [k1#125568,k3#125570,k4#125571]
            +- Filter (((isnotnull(ds#125566) && isnotnull(k0#125567)) && (ds#125566 = 1)) && (cast(k0#125567 as double) > 50.0))
               +- MetastoreRelation default, dst_union22_delta, None

== Physical Plan ==
InsertIntoHiveTable MetastoreRelation default, dst_union22, None, Map(ds -> Some(2)), true, false
+- Union
   :- WholeStageCodegen
   :  :  +- Project [k1#125556 AS k1#125546,k2#125557 AS k2#125547,k3#125558 AS k3#125548,k4#125559 AS k4#125549]
   :  :     +- Filter (isnotnull(k0#125555) && (cast(k0#125555 as double) <= 50.0))
   :  :        +- INPUT
   :  +- HiveTableScan [k0#125555,k4#125559,k3#125558,k2#125557,k1#125556], MetastoreRelation default, dst_union22_delta, None, [isnotnull(ds#125554),(ds#125554 = 1)]
   +- WholeStageCodegen
      :  +- Project [k1#125562 AS k1#125550,k2#125563 AS k2#125551,k3#125570 AS k3#125552,k4#125571 AS k4#125553]
      :     +- BroadcastHashJoin [ds#125561,k1#125562], [1,k1#125568], LeftOuter, BuildRight, None
      :        :- Filter (isnotnull(k1#125562) && (cast(k1#125562 as double) > 20.0))
      :        :  +- INPUT
      :        +- INPUT
      :- HiveTableScan [k1#125562,k2#125563,ds#125561], MetastoreRelation default, dst_union22, Some(a)
      +- BroadcastExchange HashedRelationBroadcastMode(List(1, input[0, string]))
         +- WholeStageCodegen
            :  +- Project [k1#125568,k3#125570,k4#125571]
            :     +- Filter (isnotnull(k0#125567) && (cast(k0#125567 as double) > 50.0))
            :        +- INPUT
            +- HiveTableScan [k1#125568,k3#125570,k4#125571,k0#125567], MetastoreRelation default, dst_union22_delta, None, [isnotnull(ds#125566),(ds#125566 = 1)]
== HIVE - 0 row(s) ==

                
      at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
      at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:406)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
      at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
      at scala.collection.AbstractTraversable.map(Traversable.scala:104)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
      at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
      at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)
      at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
      at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
      at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)
      at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
      at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
      at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
      at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
      at org.scalatest.Suite$class.run(Suite.scala:1424)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
      at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)
      at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
      at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
      at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)
      at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
      at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)
      at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
      at org.scalatest.Suite$class.run(Suite.scala:1421)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
      at org.scalatest.tools.Runner$.main(Runner.scala:860)
      at org.scalatest.tools.Runner.main(Runner.scala)