&#010;Failed to execute query using catalyst:&#010;Error: execute, tree:&#010;Exchange rangepartitioning(gen_attr_93258#93265 ASC, gen_attr_93259#93266 ASC, 31), None&#010;+- WholeStageCodegen&#010; : +- BroadcastHashJoin [gen_attr_93259#93266], [gen_attr_93261#93267], LeftSemi, BuildRight, None&#010; : :- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]&#010; : : +- Filter isnotnull(id#93273)&#010; : : +- INPUT&#010; : +- INPUT&#010; :- HiveTableScan [name#93272,id#93273], MetastoreRelation default, sales, None&#010; +- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int] as bigint)))&#010; +- WholeStageCodegen&#010; : +- Project [id#93275 AS gen_attr_93261#93267]&#010; : +- INPUT&#010; +- HiveTableScan [id#93275], MetastoreRelation default, things, None&#010;&#010;org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:&#010;Exchange rangepartitioning(gen_attr_93258#93265 ASC, gen_attr_93259#93266 ASC, 31), None&#010;+- WholeStageCodegen&#010; : +- BroadcastHashJoin [gen_attr_93259#93266], [gen_attr_93261#93267], LeftSemi, BuildRight, None&#010; : :- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]&#010; : : +- Filter isnotnull(id#93273)&#010; : : +- INPUT&#010; : +- INPUT&#010; :- HiveTableScan [name#93272,id#93273], MetastoreRelation default, sales, None&#010; +- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int] as bigint)))&#010; +- WholeStageCodegen&#010; : +- Project [id#93275 AS gen_attr_93261#93267]&#010; : +- INPUT&#010; +- HiveTableScan [id#93275], MetastoreRelation default, things, None&#010;&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:50)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)&#010; at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:116)&#010; at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:36)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:236)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:283)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:306)&#010; at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:126)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:392)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)&#010; at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)&#010; at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)&#010; at scala.collection.AbstractTraversable.map(Traversable.scala:104)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)&#010; at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)&#010; at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)&#010; at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)&#010; at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)&#010; at org.scalatest.Transformer.apply(Transformer.scala:22)&#010; at org.scalatest.Transformer.apply(Transformer.scala:20)&#010; at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)&#010; at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)&#010; at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)&#010; at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)&#010; at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)&#010; at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)&#010; at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)&#010; at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)&#010; at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)&#010; at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)&#010; at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)&#010; at scala.collection.immutable.List.foreach(List.scala:381)&#010; at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)&#010; at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)&#010; at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)&#010; at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)&#010; at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)&#010; at org.scalatest.Suite$class.run(Suite.scala:1424)&#010; at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)&#010; at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)&#010; at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)&#010; at org.scalatest.SuperEngine.runImpl(Engine.scala:545)&#010; at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)&#010; at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)&#010; at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)&#010; at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)&#010; at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)&#010; at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)&#010; at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)&#010; at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)&#010; at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)&#010; at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)&#010; at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)&#010; at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)&#010; at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)&#010; at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)&#010; at org.scalatest.Suite$class.run(Suite.scala:1421)&#010; at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)&#010; at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)&#010; at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)&#010; at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)&#010; at scala.collection.immutable.List.foreach(List.scala:381)&#010; at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)&#010; at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)&#010; at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)&#010; at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)&#010; at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)&#010; at org.scalatest.tools.Runner$.main(Runner.scala:860)&#010; at org.scalatest.tools.Runner.main(Runner.scala)&#010;Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: &#010; at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:194)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:102)&#010; at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:229)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:120)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:98)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenSemi(BroadcastHashJoinExec.scala:318)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:84)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)&#010; at org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:30)&#010; at org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:62)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)&#010; at org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:79)&#010; at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:194)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)&#010; at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:218)&#010; at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:244)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:218)&#010; at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:113)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:79)&#010; at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:77)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)&#010; at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)&#010; at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:38)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:304)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:86)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:122)&#010; at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:113)&#010; at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)&#010; ... 90 more&#010;Caused by: java.lang.RuntimeException: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)&#010; at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2332)&#010; at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2185)&#010; at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2102)&#010; at org.apache.hadoop.conf.Configuration.set(Configuration.java:979)&#010; at org.apache.hadoop.conf.Configuration.set(Configuration.java:953)&#010; at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:517)&#010; at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:535)&#010; at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:429)&#010; at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:2681)&#010; at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:2641)&#010; at org.apache.hadoop.hive.ql.session.SessionState.getSessionConf(SessionState.java:833)&#010; at org.apache.hadoop.hive.ql.metadata.Partition.getDeserializer(Partition.java:250)&#010; at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:133)&#010; at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:132)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)&#010; at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)&#010; at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)&#010; at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)&#010; at scala.collection.AbstractTraversable.map(Traversable.scala:104)&#010; at org.apache.spark.sql.hive.HadoopTableReader.makeRDDForPartitionedTable(TableReader.scala:132)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)&#010; at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2210)&#010; at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:149)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)&#010; at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:36)&#010; at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)&#010; at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)&#010; at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)&#010; at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)&#010; at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)&#010; at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:236)&#010; at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:283)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:74)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:71)&#010; at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:91)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)&#010; at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)&#010; at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)&#010; at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)&#010; at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)&#010; at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)&#010; at java.lang.Thread.run(Thread.java:745)&#010;Caused by: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)&#010; at java.util.zip.ZipFile.open(Native Method)&#010; at java.util.zip.ZipFile.<init>(ZipFile.java:219)&#010; at java.util.zip.ZipFile.<init>(ZipFile.java:149)&#010; at java.util.jar.JarFile.<init>(JarFile.java:166)&#010; at java.util.jar.JarFile.<init>(JarFile.java:103)&#010; at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93)&#010; at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69)&#010; at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:99)&#010; at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)&#010; at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:150)&#010; at java.net.URL.openStream(URL.java:1038)&#010; at org.apache.hadoop.conf.Configuration.parse(Configuration.java:2161)&#010; at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2232)&#010; ... 51 more&#010;&#010;SELECT name,id FROM sales LEFT SEMI JOIN things ON (sales.id = things.id) ORDER BY name ASC, id ASC&#010;== Parsed Logical Plan ==&#010;'Project ['gen_attr_93258 AS name#93270,'gen_attr_93259 AS id#93271]&#010;+- 'SubqueryAlias sales&#010; +- 'Sort ['gen_attr_93258 ASC,'gen_attr_93259 ASC], true&#010; +- 'Project ['gen_attr_93258,'gen_attr_93259]&#010; +- 'Join LeftSemi, Some(('gen_attr_93259 = 'gen_attr_93261))&#010; :- 'SubqueryAlias gen_subquery_0&#010; : +- 'Project ['name AS gen_attr_93258#93265,'id AS gen_attr_93259#93266]&#010; : +- 'UnresolvedRelation `default`.`sales`, None&#010; +- 'SubqueryAlias gen_subquery_1&#010; +- 'Project ['id AS gen_attr_93261#93267,'name AS gen_attr_93262#93268,'ds AS gen_attr_93260#93269]&#010; +- 'UnresolvedRelation `default`.`things`, None&#010;&#010;== Analyzed Logical Plan ==&#010;name: string, id: int&#010;Project [gen_attr_93258#93265 AS name#93270,gen_attr_93259#93266 AS id#93271]&#010;+- SubqueryAlias sales&#010; +- Sort [gen_attr_93258#93265 ASC,gen_attr_93259#93266 ASC], true&#010; +- Project [gen_attr_93258#93265,gen_attr_93259#93266]&#010; +- Join LeftSemi, Some((gen_attr_93259#93266 = gen_attr_93261#93267))&#010; :- SubqueryAlias gen_subquery_0&#010; : +- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]&#010; : +- MetastoreRelation default, sales, None&#010; +- SubqueryAlias gen_subquery_1&#010; +- Project [id#93275 AS gen_attr_93261#93267,name#93276 AS gen_attr_93262#93268,ds#93274 AS gen_attr_93260#93269]&#010; +- MetastoreRelation default, things, None&#010;&#010;== Optimized Logical Plan ==&#010;Project [gen_attr_93258#93265 AS name#93270,gen_attr_93259#93266 AS id#93271]&#010;+- Sort [gen_attr_93258#93265 ASC,gen_attr_93259#93266 ASC], true&#010; +- Join LeftSemi, Some((gen_attr_93259#93266 = gen_attr_93261#93267))&#010; :- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]&#010; : +- Filter isnotnull(id#93273)&#010; : +- MetastoreRelation default, sales, None&#010; +- Project [id#93275 AS gen_attr_93261#93267]&#010; +- MetastoreRelation default, things, None&#010;&#010;== Physical Plan ==&#010;WholeStageCodegen&#010;: +- Project [gen_attr_93258#93265 AS name#93270,gen_attr_93259#93266 AS id#93271]&#010;: +- Sort [gen_attr_93258#93265 ASC,gen_attr_93259#93266 ASC], true, 0&#010;: +- INPUT&#010;+- Exchange rangepartitioning(gen_attr_93258#93265 ASC, gen_attr_93259#93266 ASC, 31), None&#010; +- WholeStageCodegen&#010; : +- BroadcastHashJoin [gen_attr_93259#93266], [gen_attr_93261#93267], LeftSemi, BuildRight, None&#010; : :- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]&#010; : : +- Filter isnotnull(id#93273)&#010; : : +- INPUT&#010; : +- INPUT&#010; :- HiveTableScan [name#93272,id#93273], MetastoreRelation default, sales, None&#010; +- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int] as bigint)))&#010; +- WholeStageCodegen&#010; : +- Project [id#93275 AS gen_attr_93261#93267]&#010; : +- INPUT&#010; +- HiveTableScan [id#93275], MetastoreRelation default, things, None&#010;== HIVE - 2 row(s) ==&#010;Hank 2&#010;Joe 2&#010;


      org.scalatest.exceptions.TestFailedException: 
Failed to execute query using catalyst:
Error: execute, tree:
Exchange rangepartitioning(gen_attr_93258#93265 ASC, gen_attr_93259#93266 ASC, 31), None
+- WholeStageCodegen
   :  +- BroadcastHashJoin [gen_attr_93259#93266], [gen_attr_93261#93267], LeftSemi, BuildRight, None
   :     :- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]
   :     :  +- Filter isnotnull(id#93273)
   :     :     +- INPUT
   :     +- INPUT
   :- HiveTableScan [name#93272,id#93273], MetastoreRelation default, sales, None
   +- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int] as bigint)))
      +- WholeStageCodegen
         :  +- Project [id#93275 AS gen_attr_93261#93267]
         :     +- INPUT
         +- HiveTableScan [id#93275], MetastoreRelation default, things, None

org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
Exchange rangepartitioning(gen_attr_93258#93265 ASC, gen_attr_93259#93266 ASC, 31), None
+- WholeStageCodegen
   :  +- BroadcastHashJoin [gen_attr_93259#93266], [gen_attr_93261#93267], LeftSemi, BuildRight, None
   :     :- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]
   :     :  +- Filter isnotnull(id#93273)
   :     :     +- INPUT
   :     +- INPUT
   :- HiveTableScan [name#93272,id#93273], MetastoreRelation default, sales, None
   +- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int] as bigint)))
      +- WholeStageCodegen
         :  +- Project [id#93275 AS gen_attr_93261#93267]
         :     +- INPUT
         +- HiveTableScan [id#93275], MetastoreRelation default, things, None

	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:50)
	at org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
	at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:116)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:36)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:236)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:283)
	at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:306)
	at org.apache.spark.sql.execution.QueryExecution.hiveResultString(QueryExecution.scala:126)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:392)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
	at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
	at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
	at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)
	at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
	at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
	at org.scalatest.Suite$class.run(Suite.scala:1424)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
	at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)
	at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
	at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
	at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
	at org.scalatest.Suite$class.run(Suite.scala:1421)
	at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
	at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
	at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
	at org.scalatest.tools.Runner$.main(Runner.scala:860)
	at org.scalatest.tools.Runner.main(Runner.scala)
Caused by: org.apache.spark.SparkException: Exception thrown in awaitResult: 
	at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:194)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:102)
	at org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:229)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:121)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:120)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:98)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenSemi(BroadcastHashJoinExec.scala:318)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:84)
	at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
	at org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:30)
	at org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:62)
	at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
	at org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:79)
	at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:194)
	at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
	at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:218)
	at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:244)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:218)
	at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:113)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:79)
	at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:77)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:38)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:304)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:86)
	at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:122)
	at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:113)
	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
	... 90 more
Caused by: java.lang.RuntimeException: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)
	at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2332)
	at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2185)
	at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2102)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:979)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:953)
	at org.apache.hadoop.mapred.JobConf.setJar(JobConf.java:517)
	at org.apache.hadoop.mapred.JobConf.setJarByClass(JobConf.java:535)
	at org.apache.hadoop.mapred.JobConf.<init>(JobConf.java:429)
	at org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:2681)
	at org.apache.hadoop.hive.conf.HiveConf.<init>(HiveConf.java:2641)
	at org.apache.hadoop.hive.ql.session.SessionState.getSessionConf(SessionState.java:833)
	at org.apache.hadoop.hive.ql.metadata.Partition.getDeserializer(Partition.java:250)
	at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:133)
	at org.apache.spark.sql.hive.HadoopTableReader$$anonfun$3.apply(TableReader.scala:132)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at org.apache.spark.sql.hive.HadoopTableReader.makeRDDForPartitionedTable(TableReader.scala:132)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec$$anonfun$10.apply(HiveTableScanExec.scala:150)
	at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2210)
	at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:149)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:36)
	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:113)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:132)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:129)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112)
	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:236)
	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:283)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:74)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1$$anonfun$apply$1.apply(BroadcastExchangeExec.scala:71)
	at org.apache.spark.sql.execution.SQLExecution$.withExecutionId(SQLExecution.scala:91)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)
	at org.apache.spark.sql.execution.exchange.BroadcastExchangeExec$$anonfun$relationFuture$1.apply(BroadcastExchangeExec.scala:71)
	at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24)
	at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.FileNotFoundException: /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-common/2.4.0/hadoop-common-2.4.0.jar (No such file or directory)
	at java.util.zip.ZipFile.open(Native Method)
	at java.util.zip.ZipFile.<init>(ZipFile.java:219)
	at java.util.zip.ZipFile.<init>(ZipFile.java:149)
	at java.util.jar.JarFile.<init>(JarFile.java:166)
	at java.util.jar.JarFile.<init>(JarFile.java:103)
	at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93)
	at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69)
	at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:99)
	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)
	at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:150)
	at java.net.URL.openStream(URL.java:1038)
	at org.apache.hadoop.conf.Configuration.parse(Configuration.java:2161)
	at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2232)
	... 51 more

SELECT name,id FROM sales LEFT SEMI JOIN things ON (sales.id = things.id) ORDER BY name ASC, id ASC
== Parsed Logical Plan ==
'Project ['gen_attr_93258 AS name#93270,'gen_attr_93259 AS id#93271]
+- 'SubqueryAlias sales
   +- 'Sort ['gen_attr_93258 ASC,'gen_attr_93259 ASC], true
      +- 'Project ['gen_attr_93258,'gen_attr_93259]
         +- 'Join LeftSemi, Some(('gen_attr_93259 = 'gen_attr_93261))
            :- 'SubqueryAlias gen_subquery_0
            :  +- 'Project ['name AS gen_attr_93258#93265,'id AS gen_attr_93259#93266]
            :     +- 'UnresolvedRelation `default`.`sales`, None
            +- 'SubqueryAlias gen_subquery_1
               +- 'Project ['id AS gen_attr_93261#93267,'name AS gen_attr_93262#93268,'ds AS gen_attr_93260#93269]
                  +- 'UnresolvedRelation `default`.`things`, None

== Analyzed Logical Plan ==
name: string, id: int
Project [gen_attr_93258#93265 AS name#93270,gen_attr_93259#93266 AS id#93271]
+- SubqueryAlias sales
   +- Sort [gen_attr_93258#93265 ASC,gen_attr_93259#93266 ASC], true
      +- Project [gen_attr_93258#93265,gen_attr_93259#93266]
         +- Join LeftSemi, Some((gen_attr_93259#93266 = gen_attr_93261#93267))
            :- SubqueryAlias gen_subquery_0
            :  +- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]
            :     +- MetastoreRelation default, sales, None
            +- SubqueryAlias gen_subquery_1
               +- Project [id#93275 AS gen_attr_93261#93267,name#93276 AS gen_attr_93262#93268,ds#93274 AS gen_attr_93260#93269]
                  +- MetastoreRelation default, things, None

== Optimized Logical Plan ==
Project [gen_attr_93258#93265 AS name#93270,gen_attr_93259#93266 AS id#93271]
+- Sort [gen_attr_93258#93265 ASC,gen_attr_93259#93266 ASC], true
   +- Join LeftSemi, Some((gen_attr_93259#93266 = gen_attr_93261#93267))
      :- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]
      :  +- Filter isnotnull(id#93273)
      :     +- MetastoreRelation default, sales, None
      +- Project [id#93275 AS gen_attr_93261#93267]
         +- MetastoreRelation default, things, None

== Physical Plan ==
WholeStageCodegen
:  +- Project [gen_attr_93258#93265 AS name#93270,gen_attr_93259#93266 AS id#93271]
:     +- Sort [gen_attr_93258#93265 ASC,gen_attr_93259#93266 ASC], true, 0
:        +- INPUT
+- Exchange rangepartitioning(gen_attr_93258#93265 ASC, gen_attr_93259#93266 ASC, 31), None
   +- WholeStageCodegen
      :  +- BroadcastHashJoin [gen_attr_93259#93266], [gen_attr_93261#93267], LeftSemi, BuildRight, None
      :     :- Project [name#93272 AS gen_attr_93258#93265,id#93273 AS gen_attr_93259#93266]
      :     :  +- Filter isnotnull(id#93273)
      :     :     +- INPUT
      :     +- INPUT
      :- HiveTableScan [name#93272,id#93273], MetastoreRelation default, sales, None
      +- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[0, int] as bigint)))
         +- WholeStageCodegen
            :  +- Project [id#93275 AS gen_attr_93261#93267]
            :     +- INPUT
            +- HiveTableScan [id#93275], MetastoreRelation default, things, None
== HIVE - 2 row(s) ==
Hank	2
Joe	2
                
      at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
      at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:406)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$26.apply(HiveComparisonTest.scala:342)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
      at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
      at scala.collection.AbstractTraversable.map(Traversable.scala:104)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:342)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:505)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
      at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:257)
      at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
      at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:56)
      at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
      at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
      at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:33)
      at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
      at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:33)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
      at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
      at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
      at org.scalatest.Suite$class.run(Suite.scala:1424)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
      at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:28)
      at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
      at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
      at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:33)
      at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
      at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:33)
      at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
      at org.scalatest.Suite$class.run(Suite.scala:1421)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
      at org.scalatest.tools.Runner$.main(Runner.scala:860)
      at org.scalatest.tools.Runner.main(Runner.scala)