org.scalatest.exceptions.TestFailedException: spark-submit returned with exit code 1. Command line: './bin/spark-submit' '--name' 'prepare testing tables' '--master' 'local[2]' '--conf' 'spark.ui.enabled=false' '--conf' 'spark.master.rest.enabled=false' '--conf' 'spark.sql.warehouse.dir=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/warehouse-353c1144-67e4-487d-a003-5cd40a0fc2ac' '--conf' 'spark.sql.test.version.index=2' '--driver-java-options' '-Dderby.system.home=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/warehouse-353c1144-67e4-487d-a003-5cd40a0fc2ac' '/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/test4456103069289142190.py' 2017-12-20 18:25:23.572 - stderr> SLF4J: Class path contains multiple SLF4J bindings. 2017-12-20 18:25:23.572 - stderr> SLF4J: Found binding in [jar:file:/tmp/test-spark/spark-2.2.0/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] 2017-12-20 18:25:23.572 - stderr> SLF4J: Found binding in [jar:file:/home/sparkivy/per-executor-caches/6/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] 2017-12-20 18:25:23.572 - stderr> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. 2017-12-20 18:25:23.577 - stderr> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 2017-12-20 18:25:24.687 - stdout> 18:25:24.687 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2017-12-20 18:25:27.115 - stdout> 18:25:27.115 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus.store.rdbms" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/home/sparkivy/per-executor-caches/6/.ivy2/cache/org.datanucleus/datanucleus-rdbms/jars/datanucleus-rdbms-3.2.9.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/tmp/test-spark/spark-2.2.0/jars/datanucleus-rdbms-3.2.9.jar." 2017-12-20 18:25:27.133 - stdout> 18:25:27.133 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/home/sparkivy/per-executor-caches/6/.ivy2/cache/org.datanucleus/datanucleus-core/jars/datanucleus-core-3.2.10.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/tmp/test-spark/spark-2.2.0/jars/datanucleus-core-3.2.10.jar." 2017-12-20 18:25:27.137 - stdout> 18:25:27.137 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus.api.jdo" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/home/sparkivy/per-executor-caches/6/.ivy2/cache/org.datanucleus/datanucleus-api-jdo/jars/datanucleus-api-jdo-3.2.6.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/tmp/test-spark/spark-2.2.0/jars/datanucleus-api-jdo-3.2.6.jar." 2017-12-20 18:25:31.313 - stdout> 18:25:31.313 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.316 - stdout> 18:25:31.314 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled 2017-12-20 18:25:31.316 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.316 - stdout> at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600) 2017-12-20 18:25:31.316 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.316 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-20 18:25:31.316 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.316 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.316 - stdout> at scala.Option.getOrElse(Option.scala:121) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129) 2017-12-20 18:25:31.316 - stdout> at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-20 18:25:31.316 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-20 18:25:31.316 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-20 18:25:31.316 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 2017-12-20 18:25:31.316 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-20 18:25:31.316 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-20 18:25:31.316 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-20 18:25:31.316 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-20 18:25:31.317 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-20 18:25:31.317 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-20 18:25:31.317 - stdout> NestedThrowablesStackTrace: 2017-12-20 18:25:31.317 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.317 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.store.query.AbstractJDOQLQuery.compileInternal(AbstractJDOQLQuery.java:367) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:240) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.store.query.Query.executeQuery(Query.java:1744) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.store.query.Query.execute(Query.java:1654) 2017-12-20 18:25:31.317 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-20 18:25:31.317 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.317 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.317 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.317 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-20 18:25:31.317 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) 2017-12-20 18:25:31.317 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.317 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.317 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.317 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) 2017-12-20 18:25:31.317 - stdout> at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) 2017-12-20 18:25:31.318 - stdout> at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050) 2017-12-20 18:25:31.318 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.318 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.318 - stdout> at scala.Option.getOrElse(Option.scala:121) 2017-12-20 18:25:31.318 - stdout> at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129) 2017-12-20 18:25:31.318 - stdout> at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126) 2017-12-20 18:25:31.318 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-20 18:25:31.318 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-20 18:25:31.318 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-20 18:25:31.318 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-20 18:25:31.318 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 2017-12-20 18:25:31.318 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-20 18:25:31.318 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-20 18:25:31.318 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-20 18:25:31.318 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-20 18:25:31.318 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-20 18:25:31.318 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-20 18:25:31.398 - stdout> 18:25:31.398 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.4 - stdout> 18:25:31.398 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.4 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.4 - stdout> at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600) 2017-12-20 18:25:31.4 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6721) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getMetaStoreSchemaVersion(ObjectStore.java:6703) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6661) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-20 18:25:31.4 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) 2017-12-20 18:25:31.4 - stdout> at com.sun.proxy.$Proxy12.verifySchema(Unknown Source) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.4 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-20 18:25:31.4 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.4 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.4 - stdout> at scala.Option.getOrElse(Option.scala:121) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129) 2017-12-20 18:25:31.4 - stdout> at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-20 18:25:31.4 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-20 18:25:31.4 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-20 18:25:31.4 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 2017-12-20 18:25:31.4 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-20 18:25:31.4 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-20 18:25:31.4 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-20 18:25:31.4 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-20 18:25:31.4 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-20 18:25:31.4 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-20 18:25:31.4 - stdout> NestedThrowablesStackTrace: 2017-12-20 18:25:31.401 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.401 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.ExecutionContextImpl.hasPersistenceInformationForClass(ExecutionContextImpl.java:5768) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:258) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.store.query.Query.executeQuery(Query.java:1744) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.store.query.Query.execute(Query.java:1654) 2017-12-20 18:25:31.401 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6721) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getMetaStoreSchemaVersion(ObjectStore.java:6703) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6661) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-20 18:25:31.401 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) 2017-12-20 18:25:31.401 - stdout> at com.sun.proxy.$Proxy12.verifySchema(Unknown Source) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.401 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-20 18:25:31.401 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.401 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.401 - stdout> at scala.Option.getOrElse(Option.scala:121) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129) 2017-12-20 18:25:31.401 - stdout> at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-20 18:25:31.401 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-20 18:25:31.401 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-20 18:25:31.401 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 2017-12-20 18:25:31.401 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-20 18:25:31.401 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-20 18:25:31.401 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-20 18:25:31.402 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-20 18:25:31.402 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-20 18:25:31.402 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-20 18:25:31.41 - stdout> 18:25:31.410 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.411 - stdout> 18:25:31.410 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled 2017-12-20 18:25:31.411 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.411 - stdout> at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600) 2017-12-20 18:25:31.411 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230) 2017-12-20 18:25:31.411 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-20 18:25:31.411 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-20 18:25:31.412 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.412 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.412 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.412 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-20 18:25:31.412 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) 2017-12-20 18:25:31.412 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.412 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.412 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.412 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.412 - stdout> at scala.Option.getOrElse(Option.scala:121) 2017-12-20 18:25:31.412 - stdout> at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129) 2017-12-20 18:25:31.413 - stdout> at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126) 2017-12-20 18:25:31.413 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-20 18:25:31.413 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-20 18:25:31.413 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-20 18:25:31.413 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-20 18:25:31.413 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 2017-12-20 18:25:31.413 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-20 18:25:31.413 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-20 18:25:31.413 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-20 18:25:31.413 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-20 18:25:31.413 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-20 18:25:31.413 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-20 18:25:31.413 - stdout> NestedThrowablesStackTrace: 2017-12-20 18:25:31.413 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.413 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.store.query.AbstractJDOQLQuery.compileInternal(AbstractJDOQLQuery.java:367) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:240) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.store.query.Query.executeQuery(Query.java:1744) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.store.query.Query.execute(Query.java:1654) 2017-12-20 18:25:31.413 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-20 18:25:31.413 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-20 18:25:31.413 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.413 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.413 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.414 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-20 18:25:31.414 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191) 2017-12-20 18:25:31.414 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-20 18:25:31.414 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-20 18:25:31.414 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-20 18:25:31.414 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130) 2017-12-20 18:25:31.414 - stdout> at scala.Option.getOrElse(Option.scala:121) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129) 2017-12-20 18:25:31.414 - stdout> at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126) 2017-12-20 18:25:31.414 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-20 18:25:31.414 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-20 18:25:31.414 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-20 18:25:31.414 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-20 18:25:31.414 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) 2017-12-20 18:25:31.414 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-20 18:25:31.414 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-20 18:25:31.414 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-20 18:25:31.414 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-20 18:25:31.414 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-20 18:25:31.414 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-20 18:25:31.416 - stdout> 18:25:31.416 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.44 - stdout> 18:25:31.440 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.44 - stdout> 18:25:31.440 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled 2017-12-20 18:25:31.44 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-20 18:25:31.44 - stdout> at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600) 2017-12-20 18:25:31.441 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-20 18:25:31.441 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 201

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: spark-submit returned with exit code 1.
Command line: './bin/spark-submit' '--name' 'prepare testing tables' '--master' 'local[2]' '--conf' 'spark.ui.enabled=false' '--conf' 'spark.master.rest.enabled=false' '--conf' 'spark.sql.warehouse.dir=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/warehouse-353c1144-67e4-487d-a003-5cd40a0fc2ac' '--conf' 'spark.sql.test.version.index=2' '--driver-java-options' '-Dderby.system.home=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/warehouse-353c1144-67e4-487d-a003-5cd40a0fc2ac' '/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/test4456103069289142190.py'

2017-12-20 18:25:23.572 - stderr> SLF4J: Class path contains multiple SLF4J bindings.
2017-12-20 18:25:23.572 - stderr> SLF4J: Found binding in [jar:file:/tmp/test-spark/spark-2.2.0/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
2017-12-20 18:25:23.572 - stderr> SLF4J: Found binding in [jar:file:/home/sparkivy/per-executor-caches/6/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
2017-12-20 18:25:23.572 - stderr> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
2017-12-20 18:25:23.577 - stderr> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
2017-12-20 18:25:24.687 - stdout> 18:25:24.687 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
2017-12-20 18:25:27.115 - stdout> 18:25:27.115 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus.store.rdbms" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/home/sparkivy/per-executor-caches/6/.ivy2/cache/org.datanucleus/datanucleus-rdbms/jars/datanucleus-rdbms-3.2.9.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/tmp/test-spark/spark-2.2.0/jars/datanucleus-rdbms-3.2.9.jar."
2017-12-20 18:25:27.133 - stdout> 18:25:27.133 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/home/sparkivy/per-executor-caches/6/.ivy2/cache/org.datanucleus/datanucleus-core/jars/datanucleus-core-3.2.10.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/tmp/test-spark/spark-2.2.0/jars/datanucleus-core-3.2.10.jar."
2017-12-20 18:25:27.137 - stdout> 18:25:27.137 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus.api.jdo" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/home/sparkivy/per-executor-caches/6/.ivy2/cache/org.datanucleus/datanucleus-api-jdo/jars/datanucleus-api-jdo-3.2.6.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/tmp/test-spark/spark-2.2.0/jars/datanucleus-api-jdo-3.2.6.jar."
2017-12-20 18:25:31.313 - stdout> 18:25:31.313 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.316 - stdout> 18:25:31.314 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled
2017-12-20 18:25:31.316 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.316 - stdout> 	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600)
2017-12-20 18:25:31.316 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.316 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.316 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.316 - stdout> 	at scala.Option.getOrElse(Option.scala:121)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129)
2017-12-20 18:25:31.316 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-20 18:25:31.316 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-20 18:25:31.316 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-20 18:25:31.316 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
2017-12-20 18:25:31.316 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-20 18:25:31.316 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-20 18:25:31.316 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-20 18:25:31.316 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-20 18:25:31.317 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-20 18:25:31.317 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-20 18:25:31.317 - stdout> NestedThrowablesStackTrace:
2017-12-20 18:25:31.317 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.317 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.store.query.AbstractJDOQLQuery.compileInternal(AbstractJDOQLQuery.java:367)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:240)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.store.query.Query.executeQuery(Query.java:1744)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.store.query.Query.execute(Query.java:1654)
2017-12-20 18:25:31.317 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-20 18:25:31.317 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.317 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.317 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.317 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
2017-12-20 18:25:31.317 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.317 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.317 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.317 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
2017-12-20 18:25:31.317 - stdout> 	at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
2017-12-20 18:25:31.318 - stdout> 	at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050)
2017-12-20 18:25:31.318 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.318 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.318 - stdout> 	at scala.Option.getOrElse(Option.scala:121)
2017-12-20 18:25:31.318 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129)
2017-12-20 18:25:31.318 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126)
2017-12-20 18:25:31.318 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-20 18:25:31.318 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-20 18:25:31.318 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-20 18:25:31.318 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-20 18:25:31.318 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
2017-12-20 18:25:31.318 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-20 18:25:31.318 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-20 18:25:31.318 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-20 18:25:31.318 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-20 18:25:31.318 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-20 18:25:31.318 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-20 18:25:31.398 - stdout> 18:25:31.398 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.4 - stdout> 18:25:31.398 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.4 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.4 - stdout> 	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600)
2017-12-20 18:25:31.4 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6721)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getMetaStoreSchemaVersion(ObjectStore.java:6703)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6661)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-20 18:25:31.4 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
2017-12-20 18:25:31.4 - stdout> 	at com.sun.proxy.$Proxy12.verifySchema(Unknown Source)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.4 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.4 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.4 - stdout> 	at scala.Option.getOrElse(Option.scala:121)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129)
2017-12-20 18:25:31.4 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-20 18:25:31.4 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-20 18:25:31.4 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-20 18:25:31.4 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
2017-12-20 18:25:31.4 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-20 18:25:31.4 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-20 18:25:31.4 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-20 18:25:31.4 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-20 18:25:31.4 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-20 18:25:31.4 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-20 18:25:31.4 - stdout> NestedThrowablesStackTrace:
2017-12-20 18:25:31.401 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.401 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.ExecutionContextImpl.hasPersistenceInformationForClass(ExecutionContextImpl.java:5768)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:258)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.store.query.Query.executeQuery(Query.java:1744)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.store.query.Query.execute(Query.java:1654)
2017-12-20 18:25:31.401 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6721)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getMetaStoreSchemaVersion(ObjectStore.java:6703)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6661)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-20 18:25:31.401 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
2017-12-20 18:25:31.401 - stdout> 	at com.sun.proxy.$Proxy12.verifySchema(Unknown Source)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.401 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.401 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.401 - stdout> 	at scala.Option.getOrElse(Option.scala:121)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129)
2017-12-20 18:25:31.401 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-20 18:25:31.401 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-20 18:25:31.401 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-20 18:25:31.401 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
2017-12-20 18:25:31.401 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-20 18:25:31.401 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-20 18:25:31.401 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-20 18:25:31.402 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-20 18:25:31.402 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-20 18:25:31.402 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-20 18:25:31.41 - stdout> 18:25:31.410 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.411 - stdout> 18:25:31.410 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled
2017-12-20 18:25:31.411 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.411 - stdout> 	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600)
2017-12-20 18:25:31.411 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230)
2017-12-20 18:25:31.411 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-20 18:25:31.411 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-20 18:25:31.412 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.412 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.412 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.412 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
2017-12-20 18:25:31.412 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.412 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.412 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.412 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.412 - stdout> 	at scala.Option.getOrElse(Option.scala:121)
2017-12-20 18:25:31.412 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126)
2017-12-20 18:25:31.413 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-20 18:25:31.413 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-20 18:25:31.413 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-20 18:25:31.413 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-20 18:25:31.413 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
2017-12-20 18:25:31.413 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-20 18:25:31.413 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-20 18:25:31.413 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-20 18:25:31.413 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-20 18:25:31.413 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-20 18:25:31.413 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-20 18:25:31.413 - stdout> NestedThrowablesStackTrace:
2017-12-20 18:25:31.413 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.413 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.store.query.AbstractJDOQLQuery.compileInternal(AbstractJDOQLQuery.java:367)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:240)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.store.query.Query.executeQuery(Query.java:1744)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.store.query.Query.execute(Query.java:1654)
2017-12-20 18:25:31.413 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-20 18:25:31.413 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-20 18:25:31.413 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.413 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.413 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.414 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
2017-12-20 18:25:31.414 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-20 18:25:31.414 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-20 18:25:31.414 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-20 18:25:31.414 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:194)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:193)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1050)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:130)
2017-12-20 18:25:31.414 - stdout> 	at scala.Option.getOrElse(Option.scala:121)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:129)
2017-12-20 18:25:31.414 - stdout> 	at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:126)
2017-12-20 18:25:31.414 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-20 18:25:31.414 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-20 18:25:31.414 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-20 18:25:31.414 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-20 18:25:31.414 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
2017-12-20 18:25:31.414 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-20 18:25:31.414 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-20 18:25:31.414 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-20 18:25:31.414 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-20 18:25:31.414 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-20 18:25:31.414 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-20 18:25:31.416 - stdout> 18:25:31.416 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.44 - stdout> 18:25:31.440 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.44 - stdout> 18:25:31.440 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled
2017-12-20 18:25:31.44 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.2.0/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-20 18:25:31.44 - stdout> 	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600)
2017-12-20 18:25:31.441 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-20 18:25:31.441 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(Hive