org.scalatest.exceptions.TestFailedException: Failed to execute query using catalyst: Error: assertion failed java.lang.AssertionError: assertion failed at scala.Predef$.assert(Predef.scala:165) at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegen.scala:84) at org.apache.spark.sql.execution.aggregate.TungstenAggregate.consume(TungstenAggregate.scala:31) at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doProduce(TungstenAggregate.scala:194) at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegen.scala:57) at org.apache.spark.sql.execution.aggregate.TungstenAggregate.produce(TungstenAggregate.scala:31) at org.apache.spark.sql.execution.WholeStageCodegen.doExecute(WholeStageCodegen.scala:221) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106) at org.apache.spark.sql.execution.Union$$anonfun$doExecute$1.apply(basicOperators.scala:299) at org.apache.spark.sql.execution.Union$$anonfun$doExecute$1.apply(basicOperators.scala:299) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) at scala.collection.immutable.List.foreach(List.scala:318) at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) at scala.collection.AbstractTraversable.map(Traversable.scala:105) at org.apache.spark.sql.execution.Union.doExecute(basicOperators.scala:299) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106) at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:83) at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:77) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:46) at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:77) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106) at org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:143) at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:241) at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:235) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:46) at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:234) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106) at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:83) at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:77) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:46) at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:77) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106) at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:142) at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:150) at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:618) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$36.apply(HiveComparisonTest.scala:443) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$36.apply(HiveComparisonTest.scala:401) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) at scala.collection.AbstractTraversable.map(Traversable.scala:105) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:401) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:557) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:271) at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:271) at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at org.scalatest.Transformer.apply(Transformer.scala:22) at org.scalatest.Transformer.apply(Transformer.scala:20) at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42) at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:34) at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:34) at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) at scala.collection.immutable.List.foreach(List.scala:318) at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) at org.scalatest.Suite$class.run(Suite.scala:1424) at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) at org.scalatest.SuperEngine.runImpl(Engine.scala:545) at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:45) at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:34) at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:34) at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:262) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) == Parsed Logical Plan == 'Aggregate ['unionsrc.key], [unresolvedalias('unionsrc.key,None),unresolvedalias('count(1) AS _c1#137708,None)] +- 'Subquery unionsrc +- 'Union :- 'Project [unresolvedalias('gen_subquery_112.key,None),unresolvedalias('gen_subquery_112.value,None)] : +- 'Subquery gen_subquery_112 : +- 'Union : :- 'Project [unresolvedalias(tst1 AS key#137702,None),unresolvedalias(cast('count(1) as string) AS value#137703,None)] : : +- 'UnresolvedRelation `default`.`src`, Some(s1) : +- 'Project [unresolvedalias(cast('s2.key as string) AS key#137704,None),unresolvedalias('s2.value AS value#137705,None)] : +- 'UnresolvedRelation `default`.`src1`, Some(s2) +- 'Project [unresolvedalias(cast('s3.key as string) AS key#137706,None),unresolvedalias('s3.value AS value#137707,None)] +- 'UnresolvedRelation `default`.`src1`, Some(s3) == Analyzed Logical Plan == key: string, _c1: bigint Aggregate [key#137702], [key#137702,(count(1),mode=Complete,isDistinct=false) AS _c1#137708L] +- Subquery unionsrc +- Union :- Project [key#137702,value#137703] : +- Subquery gen_subquery_112 : +- Union : :- Aggregate [tst1 AS key#137702,cast((count(1),mode=Complete,isDistinct=false) as string) AS value#137703] : : +- MetastoreRelation default, src, Some(s1) : +- Project [cast(key#137711 as string) AS key#137704,value#137712 AS value#137705] : +- MetastoreRelation default, src1, Some(s2) +- Project [cast(key#137713 as string) AS key#137706,value#137714 AS value#137707] +- MetastoreRelation default, src1, Some(s3) == Optimized Logical Plan == Aggregate [key#137702], [key#137702,(count(1),mode=Complete,isDistinct=false) AS _c1#137708L] +- Union :- Aggregate [tst1 AS key#137702] : +- Project : +- InMemoryRelation [key#137709,value#137710], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137596,value#137597], MetastoreRelation default, src, None, Some(src) :- Project [cast(key#137711 as string) AS key#137704] : +- InMemoryRelation [key#137711,value#137712], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137633,value#137634], MetastoreRelation default, src1, None, Some(src1) +- Project [cast(key#137713 as string) AS key#137706] +- InMemoryRelation [key#137713,value#137714], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137633,value#137634], MetastoreRelation default, src1, None, Some(src1) == Physical Plan == TungstenAggregate(key=[key#137702], functions=[(count(1),mode=Final,isDistinct=false)], output=[key#137702,_c1#137708L]) +- Exchange hashpartitioning(key#137702,2), None +- TungstenAggregate(key=[key#137702], functions=[(count(1),mode=Partial,isDistinct=false)], output=[key#137702,count#137747L]) +- Union :- WholeStageCodegen : : +- TungstenAggregate(key=[], functions=[], output=[key#137702]) : : +- INPUT : +- Exchange SinglePartition, None : +- WholeStageCodegen : : +- TungstenAggregate(key=[], functions=[], output=[]) : : +- INPUT : +- InMemoryColumnarTableScan InMemoryRelation [key#137709,value#137710], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137596,value#137597], MetastoreRelation default, src, None, Some(src) :- WholeStageCodegen : : +- Project [cast(key#137711 as string) AS key#137704] : : +- INPUT : +- InMemoryColumnarTableScan [key#137711], InMemoryRelation [key#137711,value#137712], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137633,value#137634], MetastoreRelation default, src1, None, Some(src1) +- WholeStageCodegen : +- Project [cast(key#137713 as string) AS key#137706] : +- INPUT +- InMemoryColumnarTableScan [key#137713], InMemoryRelation [key#137713,value#137714], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137633,value#137634], MetastoreRelation default, src1, None, Some(src1) == HIVE - 17 row(s) == NULL 20 128 2 146 2 150 2 213 2 224 2 238 2 255 2 273 2 278 2 311 2 369 2 401 2 406 2 66 2 98 2 tst1 1
sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException:
Failed to execute query using catalyst:
Error: assertion failed
java.lang.AssertionError: assertion failed
at scala.Predef$.assert(Predef.scala:165)
at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegen.scala:84)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate.consume(TungstenAggregate.scala:31)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doProduce(TungstenAggregate.scala:194)
at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegen.scala:57)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate.produce(TungstenAggregate.scala:31)
at org.apache.spark.sql.execution.WholeStageCodegen.doExecute(WholeStageCodegen.scala:221)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106)
at org.apache.spark.sql.execution.Union$$anonfun$doExecute$1.apply(basicOperators.scala:299)
at org.apache.spark.sql.execution.Union$$anonfun$doExecute$1.apply(basicOperators.scala:299)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.immutable.List.foreach(List.scala:318)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at org.apache.spark.sql.execution.Union.doExecute(basicOperators.scala:299)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:83)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:77)
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:46)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:77)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106)
at org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:143)
at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:241)
at org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:235)
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:46)
at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:234)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:83)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:77)
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:46)
at org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:77)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:108)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:106)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:106)
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:142)
at org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:150)
at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:618)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$36.apply(HiveComparisonTest.scala:443)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$36.apply(HiveComparisonTest.scala:401)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:401)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:557)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:271)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:271)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42)
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:34)
at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:34)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:318)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:45)
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:34)
at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:34)
at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462)
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671)
at sbt.ForkMain$Run$2.call(ForkMain.java:296)
at sbt.ForkMain$Run$2.call(ForkMain.java:286)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
== Parsed Logical Plan ==
'Aggregate ['unionsrc.key], [unresolvedalias('unionsrc.key,None),unresolvedalias('count(1) AS _c1#137708,None)]
+- 'Subquery unionsrc
+- 'Union
:- 'Project [unresolvedalias('gen_subquery_112.key,None),unresolvedalias('gen_subquery_112.value,None)]
: +- 'Subquery gen_subquery_112
: +- 'Union
: :- 'Project [unresolvedalias(tst1 AS key#137702,None),unresolvedalias(cast('count(1) as string) AS value#137703,None)]
: : +- 'UnresolvedRelation `default`.`src`, Some(s1)
: +- 'Project [unresolvedalias(cast('s2.key as string) AS key#137704,None),unresolvedalias('s2.value AS value#137705,None)]
: +- 'UnresolvedRelation `default`.`src1`, Some(s2)
+- 'Project [unresolvedalias(cast('s3.key as string) AS key#137706,None),unresolvedalias('s3.value AS value#137707,None)]
+- 'UnresolvedRelation `default`.`src1`, Some(s3)
== Analyzed Logical Plan ==
key: string, _c1: bigint
Aggregate [key#137702], [key#137702,(count(1),mode=Complete,isDistinct=false) AS _c1#137708L]
+- Subquery unionsrc
+- Union
:- Project [key#137702,value#137703]
: +- Subquery gen_subquery_112
: +- Union
: :- Aggregate [tst1 AS key#137702,cast((count(1),mode=Complete,isDistinct=false) as string) AS value#137703]
: : +- MetastoreRelation default, src, Some(s1)
: +- Project [cast(key#137711 as string) AS key#137704,value#137712 AS value#137705]
: +- MetastoreRelation default, src1, Some(s2)
+- Project [cast(key#137713 as string) AS key#137706,value#137714 AS value#137707]
+- MetastoreRelation default, src1, Some(s3)
== Optimized Logical Plan ==
Aggregate [key#137702], [key#137702,(count(1),mode=Complete,isDistinct=false) AS _c1#137708L]
+- Union
:- Aggregate [tst1 AS key#137702]
: +- Project
: +- InMemoryRelation [key#137709,value#137710], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137596,value#137597], MetastoreRelation default, src, None, Some(src)
:- Project [cast(key#137711 as string) AS key#137704]
: +- InMemoryRelation [key#137711,value#137712], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137633,value#137634], MetastoreRelation default, src1, None, Some(src1)
+- Project [cast(key#137713 as string) AS key#137706]
+- InMemoryRelation [key#137713,value#137714], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137633,value#137634], MetastoreRelation default, src1, None, Some(src1)
== Physical Plan ==
TungstenAggregate(key=[key#137702], functions=[(count(1),mode=Final,isDistinct=false)], output=[key#137702,_c1#137708L])
+- Exchange hashpartitioning(key#137702,2), None
+- TungstenAggregate(key=[key#137702], functions=[(count(1),mode=Partial,isDistinct=false)], output=[key#137702,count#137747L])
+- Union
:- WholeStageCodegen
: : +- TungstenAggregate(key=[], functions=[], output=[key#137702])
: : +- INPUT
: +- Exchange SinglePartition, None
: +- WholeStageCodegen
: : +- TungstenAggregate(key=[], functions=[], output=[])
: : +- INPUT
: +- InMemoryColumnarTableScan InMemoryRelation [key#137709,value#137710], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137596,value#137597], MetastoreRelation default, src, None, Some(src)
:- WholeStageCodegen
: : +- Project [cast(key#137711 as string) AS key#137704]
: : +- INPUT
: +- InMemoryColumnarTableScan [key#137711], InMemoryRelation [key#137711,value#137712], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137633,value#137634], MetastoreRelation default, src1, None, Some(src1)
+- WholeStageCodegen
: +- Project [cast(key#137713 as string) AS key#137706]
: +- INPUT
+- InMemoryColumnarTableScan [key#137713], InMemoryRelation [key#137713,value#137714], true, 5, StorageLevel(true, true, false, true, 1), HiveTableScan [key#137633,value#137634], MetastoreRelation default, src1, None, Some(src1)
== HIVE - 17 row(s) ==
NULL 20
128 2
146 2
150 2
213 2
224 2
238 2
255 2
273 2
278 2
311 2
369 2
401 2
406 2
66 2
98 2
tst1 1
at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$36.apply(HiveComparisonTest.scala:456)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$36.apply(HiveComparisonTest.scala:401)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.doTest$1(HiveComparisonTest.scala:401)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:557)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:271)
at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:271)
at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42)
at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:34)
at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:34)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
at scala.collection.immutable.List.foreach(List.scala:318)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
at org.scalatest.Suite$class.run(Suite.scala:1424)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:45)
at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:34)
at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:34)
at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462)
at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671)
at sbt.ForkMain$Run$2.call(ForkMain.java:296)
at sbt.ForkMain$Run$2.call(ForkMain.java:286)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)