org.scalatest.exceptions.TestFailedException: spark-submit returned with exit code 1. Command line: './bin/spark-submit' '--name' 'prepare testing tables' '--master' 'local[2]' '--conf' 'spark.ui.enabled=false' '--conf' 'spark.master.rest.enabled=false' '--conf' 'spark.sql.warehouse.dir=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.7/target/tmp/warehouse-8dfea7e1-ea1c-4689-b00a-25ad10012771' '--conf' 'spark.sql.test.version.index=3' '--driver-java-options' '-Dderby.system.home=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.7/target/tmp/warehouse-8dfea7e1-ea1c-4689-b00a-25ad10012771' '/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.7/target/tmp/test1381911505394337515.py' 2018-05-21 23:07:34.148 - stderr> SLF4J: Class path contains multiple SLF4J bindings. 2018-05-21 23:07:34.148 - stderr> SLF4J: Found binding in [jar:file:/tmp/test-spark/spark-2.2.1/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] 2018-05-21 23:07:34.148 - stderr> SLF4J: Found binding in [jar:file:/home/sparkivy/per-executor-caches/5/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] 2018-05-21 23:07:34.148 - stderr> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. 2018-05-21 23:07:34.154 - stderr> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 2018-05-21 23:07:35.282 - stdout> 23:07:35.282 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2018-05-21 23:07:37.762 - stdout> 23:07:37.762 ERROR DataNucleus.General: ResourceBundle org.datanucleus.api.jdo.Localisation for locale en_US was not found! 2018-05-21 23:07:37.791 - stdout> 23:07:37.791 ERROR DataNucleus.General: ResourceBundle org.datanucleus.Localisation for locale en_US was not found! 2018-05-21 23:07:37.832 - stdout> 23:07:37.831 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: No resources could be found to annotate error message key:"EXC_GetPMFUnexpectedException" 2018-05-21 23:07:37.832 - stdout> javax.jdo.JDOFatalInternalException: No resources could be found to annotate error message key:"EXC_GetPMFUnexpectedException" 2018-05-21 23:07:37.832 - stdout> at javax.jdo.spi.I18NHelper.assertBundle(I18NHelper.java:269) 2018-05-21 23:07:37.832 - stdout> at javax.jdo.spi.I18NHelper.msg(I18NHelper.java:147) 2018-05-21 23:07:37.832 - stdout> at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193) 2018-05-21 23:07:37.832 - stdout> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808) 2018-05-21 23:07:37.832 - stdout> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2018-05-21 23:07:37.832 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2018-05-21 23:07:37.833 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2018-05-21 23:07:37.833 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2018-05-21 23:07:37.833 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136) 2018-05-21 23:07:37.833 - stdout> at scala.Option.getOrElse(Option.scala:121) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136) 2018-05-21 23:07:37.833 - stdout> at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2018-05-21 23:07:37.833 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2018-05-21 23:07:37.833 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2018-05-21 23:07:37.833 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 2018-05-21 23:07:37.833 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2018-05-21 23:07:37.833 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2018-05-21 23:07:37.833 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2018-05-21 23:07:37.833 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2018-05-21 23:07:37.833 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2018-05-21 23:07:37.833 - stdout> at java.lang.Thread.run(Thread.java:745) 2018-05-21 23:07:37.869 - stdout> 23:07:37.868 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: No resources could be found to annotate error message key:"EXC_GetPMFUnexpectedException" 2018-05-21 23:07:37.869 - stdout> javax.jdo.JDOFatalInternalException: No resources could be found to annotate error message key:"EXC_GetPMFUnexpectedException" 2018-05-21 23:07:37.869 - stdout> at javax.jdo.spi.I18NHelper.assertBundle(I18NHelper.java:269) 2018-05-21 23:07:37.869 - stdout> at javax.jdo.spi.I18NHelper.msg(I18NHelper.java:147) 2018-05-21 23:07:37.869 - stdout> at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193) 2018-05-21 23:07:37.869 - stdout> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808) 2018-05-21 23:07:37.869 - stdout> at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2018-05-21 23:07:37.869 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2018-05-21 23:07:37.869 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2018-05-21 23:07:37.869 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2018-05-21 23:07:37.869 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2018-05-21 23:07:37.869 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2018-05-21 23:07:37.869 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) 2018-05-21 23:07:37.869 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2018-05-21 23:07:37.869 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2018-05-21 23:07:37.869 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2018-05-21 23:07:37.869 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2018-05-21 23:07:37.869 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 2018-05-21 23:07:37.869 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136) 2018-05-21 23:07:37.87 - stdout> at scala.Option.getOrElse(Option.scala:121) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136) 2018-05-21 23:07:37.87 - stdout> at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133) 2018-05-21 23:07:37.87 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2018-05-21 23:07:37.87 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2018-05-21 23:07:37.87 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2018-05-21 23:07:37.87 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2018-05-21 23:07:37.87 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 2018-05-21 23:07:37.87 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2018-05-21 23:07:37.87 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2018-05-21 23:07:37.87 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2018-05-21 23:07:37.87 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2018-05-21 23:07:37.87 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2018-05-21 23:07:37.87 - stdout> at java.lang.Thread.run(Thread.java:745) 2018-05-21 23:07:37.892 - stdout> Traceback (most recent call last): 2018-05-21 23:07:37.892 - stdout> File "/tmp/test-spark/spark-2.2.1/python/lib/pyspark.zip/pyspark/sql/utils.py", line 63, in deco 2018-05-21 23:07:37.892 - stdout> File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py", line 319, in get_return_value 2018-05-21 23:07:37.897 - stdout> py4j.protocol.Py4JJavaErrorERROR:root:Exception while sending command. 2018-05-21 23:07:37.897 - stdout> Traceback (most recent call last): 2018-05-21 23:07:37.897 - stdout> File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 1035, in send_command 2018-05-21 23:07:37.897 - stdout> py4j.protocol.Py4JNetworkError: Answer from Java side is empty 2018-05-21 23:07:37.897 - stdout> 2018-05-21 23:07:37.897 - stdout> During handling of the above exception, another exception occurred: 2018-05-21 23:07:37.897 - stdout> 2018-05-21 23:07:37.897 - stdout> Traceback (most recent call last): 2018-05-21 23:07:37.897 - stdout> File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 883, in send_command 2018-05-21 23:07:37.897 - stdout> File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 1040, in send_command 2018-05-21 23:07:37.897 - stdout> py4j.protocol.Py4JNetworkError: Error while receiving 2018-05-21 23:07:37.897 - stdout> 2018-05-21 23:07:37.897 - stdout> During handling of the above exception, another exception occurred: 2018-05-21 23:07:37.897 - stdout> 2018-05-21 23:07:37.897 - stdout> Traceback (most recent call last): 2018-05-21 23:07:37.897 - stdout> File "/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.7/target/tmp/test1381911505394337515.py", line 5, in <module> 2018-05-21 23:07:37.897 - stdout> spark = SparkSession.builder.enableHiveSupport().getOrCreate() 2018-05-21 23:07:37.897 - stdout> File "/tmp/test-spark/spark-2.2.1/python/lib/pyspark.zip/pyspark/sql/session.py", line 183, in getOrCreate 2018-05-21 23:07:37.898 - stdout> File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 1133, in __call__ 2018-05-21 23:07:37.898 - stdout> File "/tmp/test-spark/spark-2.2.1/python/lib/pyspark.zip/pyspark/sql/utils.py", line 79, in deco 2018-05-21 23:07:37.898 - stdout> pyspark.sql.utils.IllegalArgumentException: "Error while instantiating 'org.apache.spark.sql.hive.HiveSessionStateBuilder':"

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: spark-submit returned with exit code 1.
Command line: './bin/spark-submit' '--name' 'prepare testing tables' '--master' 'local[2]' '--conf' 'spark.ui.enabled=false' '--conf' 'spark.master.rest.enabled=false' '--conf' 'spark.sql.warehouse.dir=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.7/target/tmp/warehouse-8dfea7e1-ea1c-4689-b00a-25ad10012771' '--conf' 'spark.sql.test.version.index=3' '--driver-java-options' '-Dderby.system.home=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.7/target/tmp/warehouse-8dfea7e1-ea1c-4689-b00a-25ad10012771' '/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.7/target/tmp/test1381911505394337515.py'

2018-05-21 23:07:34.148 - stderr> SLF4J: Class path contains multiple SLF4J bindings.
2018-05-21 23:07:34.148 - stderr> SLF4J: Found binding in [jar:file:/tmp/test-spark/spark-2.2.1/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
2018-05-21 23:07:34.148 - stderr> SLF4J: Found binding in [jar:file:/home/sparkivy/per-executor-caches/5/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
2018-05-21 23:07:34.148 - stderr> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
2018-05-21 23:07:34.154 - stderr> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
2018-05-21 23:07:35.282 - stdout> 23:07:35.282 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
2018-05-21 23:07:37.762 - stdout> 23:07:37.762 ERROR DataNucleus.General: ResourceBundle org.datanucleus.api.jdo.Localisation for locale en_US was not found!
2018-05-21 23:07:37.791 - stdout> 23:07:37.791 ERROR DataNucleus.General: ResourceBundle org.datanucleus.Localisation for locale en_US was not found!
2018-05-21 23:07:37.832 - stdout> 23:07:37.831 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: No resources could be found to annotate error message key:"EXC_GetPMFUnexpectedException"
2018-05-21 23:07:37.832 - stdout> javax.jdo.JDOFatalInternalException: No resources could be found to annotate error message key:"EXC_GetPMFUnexpectedException"
2018-05-21 23:07:37.832 - stdout> 	at javax.jdo.spi.I18NHelper.assertBundle(I18NHelper.java:269)
2018-05-21 23:07:37.832 - stdout> 	at javax.jdo.spi.I18NHelper.msg(I18NHelper.java:147)
2018-05-21 23:07:37.832 - stdout> 	at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)
2018-05-21 23:07:37.832 - stdout> 	at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
2018-05-21 23:07:37.832 - stdout> 	at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2018-05-21 23:07:37.832 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2018-05-21 23:07:37.833 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2018-05-21 23:07:37.833 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136)
2018-05-21 23:07:37.833 - stdout> 	at scala.Option.getOrElse(Option.scala:121)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136)
2018-05-21 23:07:37.833 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2018-05-21 23:07:37.833 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2018-05-21 23:07:37.833 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2018-05-21 23:07:37.833 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
2018-05-21 23:07:37.833 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2018-05-21 23:07:37.833 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2018-05-21 23:07:37.833 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2018-05-21 23:07:37.833 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2018-05-21 23:07:37.833 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2018-05-21 23:07:37.833 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2018-05-21 23:07:37.869 - stdout> 23:07:37.868 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: No resources could be found to annotate error message key:"EXC_GetPMFUnexpectedException"
2018-05-21 23:07:37.869 - stdout> javax.jdo.JDOFatalInternalException: No resources could be found to annotate error message key:"EXC_GetPMFUnexpectedException"
2018-05-21 23:07:37.869 - stdout> 	at javax.jdo.spi.I18NHelper.assertBundle(I18NHelper.java:269)
2018-05-21 23:07:37.869 - stdout> 	at javax.jdo.spi.I18NHelper.msg(I18NHelper.java:147)
2018-05-21 23:07:37.869 - stdout> 	at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193)
2018-05-21 23:07:37.869 - stdout> 	at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
2018-05-21 23:07:37.869 - stdout> 	at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2018-05-21 23:07:37.869 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2018-05-21 23:07:37.869 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2018-05-21 23:07:37.869 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2018-05-21 23:07:37.869 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
2018-05-21 23:07:37.869 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2018-05-21 23:07:37.869 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2018-05-21 23:07:37.869 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2018-05-21 23:07:37.869 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
2018-05-21 23:07:37.869 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136)
2018-05-21 23:07:37.87 - stdout> 	at scala.Option.getOrElse(Option.scala:121)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136)
2018-05-21 23:07:37.87 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133)
2018-05-21 23:07:37.87 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2018-05-21 23:07:37.87 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2018-05-21 23:07:37.87 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2018-05-21 23:07:37.87 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2018-05-21 23:07:37.87 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
2018-05-21 23:07:37.87 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2018-05-21 23:07:37.87 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2018-05-21 23:07:37.87 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2018-05-21 23:07:37.87 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2018-05-21 23:07:37.87 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2018-05-21 23:07:37.87 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2018-05-21 23:07:37.892 - stdout> Traceback (most recent call last):
2018-05-21 23:07:37.892 - stdout>   File "/tmp/test-spark/spark-2.2.1/python/lib/pyspark.zip/pyspark/sql/utils.py", line 63, in deco
2018-05-21 23:07:37.892 - stdout>   File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py", line 319, in get_return_value
2018-05-21 23:07:37.897 - stdout> py4j.protocol.Py4JJavaErrorERROR:root:Exception while sending command.
2018-05-21 23:07:37.897 - stdout> Traceback (most recent call last):
2018-05-21 23:07:37.897 - stdout>   File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 1035, in send_command
2018-05-21 23:07:37.897 - stdout> py4j.protocol.Py4JNetworkError: Answer from Java side is empty
2018-05-21 23:07:37.897 - stdout> 
2018-05-21 23:07:37.897 - stdout> During handling of the above exception, another exception occurred:
2018-05-21 23:07:37.897 - stdout> 
2018-05-21 23:07:37.897 - stdout> Traceback (most recent call last):
2018-05-21 23:07:37.897 - stdout>   File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 883, in send_command
2018-05-21 23:07:37.897 - stdout>   File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 1040, in send_command
2018-05-21 23:07:37.897 - stdout> py4j.protocol.Py4JNetworkError: Error while receiving
2018-05-21 23:07:37.897 - stdout> 
2018-05-21 23:07:37.897 - stdout> During handling of the above exception, another exception occurred:
2018-05-21 23:07:37.897 - stdout> 
2018-05-21 23:07:37.897 - stdout> Traceback (most recent call last):
2018-05-21 23:07:37.897 - stdout>   File "/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.7/target/tmp/test1381911505394337515.py", line 5, in <module>
2018-05-21 23:07:37.897 - stdout>     spark = SparkSession.builder.enableHiveSupport().getOrCreate()
2018-05-21 23:07:37.897 - stdout>   File "/tmp/test-spark/spark-2.2.1/python/lib/pyspark.zip/pyspark/sql/session.py", line 183, in getOrCreate
2018-05-21 23:07:37.898 - stdout>   File "/tmp/test-spark/spark-2.2.1/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py", line 1133, in __call__
2018-05-21 23:07:37.898 - stdout>   File "/tmp/test-spark/spark-2.2.1/python/lib/pyspark.zip/pyspark/sql/utils.py", line 79, in deco
2018-05-21 23:07:37.898 - stdout> pyspark.sql.utils.IllegalArgumentException: "Error while instantiating 'org.apache.spark.sql.hive.HiveSessionStateBuilder':"
           
	at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528)
	at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
	at org.scalatest.Assertions$class.fail(Assertions.scala:1089)
	at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
	at org.apache.spark.sql.hive.SparkSubmitTestUtils$class.runSparkSubmit(SparkSubmitTestUtils.scala:84)
	at org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite.runSparkSubmit(HiveExternalCatalogVersionsSuite.scala:43)
	at org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite$$anonfun$beforeAll$1.apply(HiveExternalCatalogVersionsSuite.scala:176)
	at org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite$$anonfun$beforeAll$1.apply(HiveExternalCatalogVersionsSuite.scala:161)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite.beforeAll(HiveExternalCatalogVersionsSuite.scala:161)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:212)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:52)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)