org.scalatest.exceptions.TestFailedException: spark-submit returned with exit code 1. Command line: './bin/spark-submit' '--name' 'prepare testing tables' '--master' 'local[2]' '--conf' 'spark.ui.enabled=false' '--conf' 'spark.master.rest.enabled=false' '--conf' 'spark.sql.warehouse.dir=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/warehouse-5e2a46d1-85d7-431f-92c0-832a1fcb2771' '--conf' 'spark.sql.test.version.index=0' '--driver-java-options' '-Dderby.system.home=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/warehouse-5e2a46d1-85d7-431f-92c0-832a1fcb2771' '/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/test8075371215206768958.py' 2017-12-21 06:08:09.74 - stderr> SLF4J: Class path contains multiple SLF4J bindings. 2017-12-21 06:08:09.74 - stderr> SLF4J: Found binding in [jar:file:/tmp/test-spark/spark-2.0.2/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] 2017-12-21 06:08:09.74 - stderr> SLF4J: Found binding in [jar:file:/home/sparkivy/per-executor-caches/2/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] 2017-12-21 06:08:09.74 - stderr> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. 2017-12-21 06:08:09.74 - stderr> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 2017-12-21 06:08:10.03 - stdout> 06:08:10.030 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2017-12-21 06:08:13.482 - stdout> 06:08:13.482 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus.store.rdbms" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/tmp/test-spark/spark-2.0.2/jars/datanucleus-rdbms-3.2.9.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/home/sparkivy/per-executor-caches/2/.ivy2/cache/org.datanucleus/datanucleus-rdbms/jars/datanucleus-rdbms-3.2.9.jar." 2017-12-21 06:08:13.49 - stdout> 06:08:13.490 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/tmp/test-spark/spark-2.0.2/jars/datanucleus-core-3.2.10.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/home/sparkivy/per-executor-caches/2/.ivy2/cache/org.datanucleus/datanucleus-core/jars/datanucleus-core-3.2.10.jar." 2017-12-21 06:08:13.494 - stdout> 06:08:13.494 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus.api.jdo" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/tmp/test-spark/spark-2.0.2/jars/datanucleus-api-jdo-3.2.6.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/home/sparkivy/per-executor-caches/2/.ivy2/cache/org.datanucleus/datanucleus-api-jdo/jars/datanucleus-api-jdo-3.2.6.jar." 2017-12-21 06:08:29.779 - stdout> 06:08:29.779 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.781 - stdout> 06:08:29.780 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled 2017-12-21 06:08:29.781 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.781 - stdout> at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600) 2017-12-21 06:08:29.781 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-21 06:08:29.781 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.782 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.782 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) 2017-12-21 06:08:29.782 - stdout> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.782 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.782 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.782 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237) 2017-12-21 06:08:29.782 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-21 06:08:29.782 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-21 06:08:29.782 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-21 06:08:29.782 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-21 06:08:29.782 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-21 06:08:29.782 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-21 06:08:29.782 - stdout> NestedThrowablesStackTrace: 2017-12-21 06:08:29.782 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.782 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.store.query.AbstractJDOQLQuery.compileInternal(AbstractJDOQLQuery.java:367) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:240) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.store.query.Query.executeQuery(Query.java:1744) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.store.query.Query.execute(Query.java:1654) 2017-12-21 06:08:29.782 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-21 06:08:29.782 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.783 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-21 06:08:29.783 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.783 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) 2017-12-21 06:08:29.783 - stdout> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.783 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.783 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.783 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237) 2017-12-21 06:08:29.783 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-21 06:08:29.783 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-21 06:08:29.783 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-21 06:08:29.783 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-21 06:08:29.783 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-21 06:08:29.783 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-21 06:08:29.856 - stdout> 06:08:29.856 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.858 - stdout> 06:08:29.857 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.858 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.858 - stdout> at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600) 2017-12-21 06:08:29.858 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6721) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getMetaStoreSchemaVersion(ObjectStore.java:6703) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6661) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.858 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) 2017-12-21 06:08:29.858 - stdout> at com.sun.proxy.$Proxy10.verifySchema(Unknown Source) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.858 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-21 06:08:29.858 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.858 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.858 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45) 2017-12-21 06:08:29.858 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.859 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.859 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237) 2017-12-21 06:08:29.859 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-21 06:08:29.859 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-21 06:08:29.859 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-21 06:08:29.859 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-21 06:08:29.859 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-21 06:08:29.859 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-21 06:08:29.859 - stdout> NestedThrowablesStackTrace: 2017-12-21 06:08:29.859 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.859 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.ExecutionContextImpl.hasPersistenceInformationForClass(ExecutionContextImpl.java:5768) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:258) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.store.query.Query.executeQuery(Query.java:1744) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.store.query.Query.execute(Query.java:1654) 2017-12-21 06:08:29.859 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6721) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.getMetaStoreSchemaVersion(ObjectStore.java:6703) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6661) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.859 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) 2017-12-21 06:08:29.859 - stdout> at com.sun.proxy.$Proxy10.verifySchema(Unknown Source) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.859 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-21 06:08:29.859 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.859 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.859 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) 2017-12-21 06:08:29.859 - stdout> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582) 2017-12-21 06:08:29.86 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.86 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.86 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.86 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.86 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237) 2017-12-21 06:08:29.86 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-21 06:08:29.86 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-21 06:08:29.86 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-21 06:08:29.86 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-21 06:08:29.86 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-21 06:08:29.86 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-21 06:08:29.869 - stdout> 06:08:29.869 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.87 - stdout> 06:08:29.869 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled 2017-12-21 06:08:29.87 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.87 - stdout> at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600) 2017-12-21 06:08:29.87 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-21 06:08:29.87 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-21 06:08:29.87 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.87 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.871 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.871 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-21 06:08:29.871 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) 2017-12-21 06:08:29.871 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.871 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.871 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.871 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) 2017-12-21 06:08:29.871 - stdout> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582) 2017-12-21 06:08:29.871 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.871 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.871 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.871 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.871 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237) 2017-12-21 06:08:29.871 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-21 06:08:29.871 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-21 06:08:29.871 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-21 06:08:29.871 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-21 06:08:29.871 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-21 06:08:29.871 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-21 06:08:29.871 - stdout> NestedThrowablesStackTrace: 2017-12-21 06:08:29.871 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.871 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.871 - stdout> at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145) 2017-12-21 06:08:29.871 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240) 2017-12-21 06:08:29.871 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.store.query.AbstractJDOQLQuery.compileInternal(AbstractJDOQLQuery.java:367) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:240) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.store.query.Query.executeQuery(Query.java:1744) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.store.query.Query.execute(Query.java:1654) 2017-12-21 06:08:29.872 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-21 06:08:29.872 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.872 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.872 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.872 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 2017-12-21 06:08:29.872 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-21 06:08:29.872 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) 2017-12-21 06:08:29.872 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.872 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.872 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.873 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) 2017-12-21 06:08:29.873 - stdout> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582) 2017-12-21 06:08:29.873 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.873 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.873 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.873 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.873 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237) 2017-12-21 06:08:29.873 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-21 06:08:29.873 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-21 06:08:29.873 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-21 06:08:29.873 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-21 06:08:29.873 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-21 06:08:29.873 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-21 06:08:29.875 - stdout> 06:08:29.875 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.898 - stdout> 06:08:29.898 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.899 - stdout> 06:08:29.898 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled 2017-12-21 06:08:29.899 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.899 - stdout> at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600) 2017-12-21 06:08:29.899 - stdout> at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 2017-12-21 06:08:29.899 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.899 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.899 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.899 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 2017-12-21 06:08:29.899 - stdout> at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) 2017-12-21 06:08:29.899 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2017-12-21 06:08:29.899 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2017-12-21 06:08:29.899 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2017-12-21 06:08:29.899 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50) 2017-12-21 06:08:29.899 - stdout> at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48) 2017-12-21 06:08:29.9 - stdout> at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63) 2017-12-21 06:08:29.9 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63) 2017-12-21 06:08:29.9 - stdout> at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62) 2017-12-21 06:08:29.9 - stdout> at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49) 2017-12-21 06:08:29.9 - stdout> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) 2017-12-21 06:08:29.9 - stdout> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582) 2017-12-21 06:08:29.9 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 2017-12-21 06:08:29.9 - stdout> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 2017-12-21 06:08:29.9 - stdout> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 2017-12-21 06:08:29.9 - stdout> at java.lang.reflect.Method.invoke(Method.java:497) 2017-12-21 06:08:29.9 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237) 2017-12-21 06:08:29.9 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2017-12-21 06:08:29.9 - stdout> at py4j.Gateway.invoke(Gateway.java:280) 2017-12-21 06:08:29.9 - stdout> at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) 2017-12-21 06:08:29.9 - stdout> at py4j.commands.CallCommand.execute(CallCommand.java:79) 2017-12-21 06:08:29.9 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:214) 2017-12-21 06:08:29.9 - stdout> at java.lang.Thread.run(Thread.java:745) 2017-12-21 06:08:29.9 - stdout> NestedThrowablesStackTrace: 2017-12-21 06:08:29.9 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.9 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo" 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481) 2017-12-21 06:08:29.9 - stdout> at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113) 201

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: spark-submit returned with exit code 1.
Command line: './bin/spark-submit' '--name' 'prepare testing tables' '--master' 'local[2]' '--conf' 'spark.ui.enabled=false' '--conf' 'spark.master.rest.enabled=false' '--conf' 'spark.sql.warehouse.dir=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/warehouse-5e2a46d1-85d7-431f-92c0-832a1fcb2771' '--conf' 'spark.sql.test.version.index=0' '--driver-java-options' '-Dderby.system.home=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/warehouse-5e2a46d1-85d7-431f-92c0-832a1fcb2771' '/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6/target/tmp/test8075371215206768958.py'

2017-12-21 06:08:09.74 - stderr> SLF4J: Class path contains multiple SLF4J bindings.
2017-12-21 06:08:09.74 - stderr> SLF4J: Found binding in [jar:file:/tmp/test-spark/spark-2.0.2/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
2017-12-21 06:08:09.74 - stderr> SLF4J: Found binding in [jar:file:/home/sparkivy/per-executor-caches/2/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
2017-12-21 06:08:09.74 - stderr> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
2017-12-21 06:08:09.74 - stderr> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
2017-12-21 06:08:10.03 - stdout> 06:08:10.030 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
2017-12-21 06:08:13.482 - stdout> 06:08:13.482 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus.store.rdbms" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/tmp/test-spark/spark-2.0.2/jars/datanucleus-rdbms-3.2.9.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/home/sparkivy/per-executor-caches/2/.ivy2/cache/org.datanucleus/datanucleus-rdbms/jars/datanucleus-rdbms-3.2.9.jar."
2017-12-21 06:08:13.49 - stdout> 06:08:13.490 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/tmp/test-spark/spark-2.0.2/jars/datanucleus-core-3.2.10.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/home/sparkivy/per-executor-caches/2/.ivy2/cache/org.datanucleus/datanucleus-core/jars/datanucleus-core-3.2.10.jar."
2017-12-21 06:08:13.494 - stdout> 06:08:13.494 WARN DataNucleus.General: Plugin (Bundle) "org.datanucleus.api.jdo" is already registered. Ensure you dont have multiple JAR versions of the same plugin in the classpath. The URL "file:/tmp/test-spark/spark-2.0.2/jars/datanucleus-api-jdo-3.2.6.jar" is already registered, and you are trying to register an identical plugin located at URL "file:/home/sparkivy/per-executor-caches/2/.ivy2/cache/org.datanucleus/datanucleus-api-jdo/jars/datanucleus-api-jdo-3.2.6.jar."
2017-12-21 06:08:29.779 - stdout> 06:08:29.779 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.781 - stdout> 06:08:29.780 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled
2017-12-21 06:08:29.781 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.781 - stdout> 	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600)
2017-12-21 06:08:29.781 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-21 06:08:29.781 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.782 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.782 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.782 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.782 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.782 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
2017-12-21 06:08:29.782 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-21 06:08:29.782 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-21 06:08:29.782 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-21 06:08:29.782 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-21 06:08:29.782 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-21 06:08:29.782 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-21 06:08:29.782 - stdout> NestedThrowablesStackTrace:
2017-12-21 06:08:29.782 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.782 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.store.query.AbstractJDOQLQuery.compileInternal(AbstractJDOQLQuery.java:367)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:240)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.store.query.Query.executeQuery(Query.java:1744)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.store.query.Query.execute(Query.java:1654)
2017-12-21 06:08:29.782 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-21 06:08:29.782 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.783 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.783 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
2017-12-21 06:08:29.783 - stdout> 	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.783 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.783 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.783 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
2017-12-21 06:08:29.783 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-21 06:08:29.783 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-21 06:08:29.783 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-21 06:08:29.783 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-21 06:08:29.783 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-21 06:08:29.783 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-21 06:08:29.856 - stdout> 06:08:29.856 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.858 - stdout> 06:08:29.857 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.858 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.858 - stdout> 	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600)
2017-12-21 06:08:29.858 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6721)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getMetaStoreSchemaVersion(ObjectStore.java:6703)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6661)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.858 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
2017-12-21 06:08:29.858 - stdout> 	at com.sun.proxy.$Proxy10.verifySchema(Unknown Source)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.858 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.858 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.858 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
2017-12-21 06:08:29.858 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.859 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.859 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
2017-12-21 06:08:29.859 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-21 06:08:29.859 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-21 06:08:29.859 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-21 06:08:29.859 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-21 06:08:29.859 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-21 06:08:29.859 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-21 06:08:29.859 - stdout> NestedThrowablesStackTrace:
2017-12-21 06:08:29.859 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.859 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.ExecutionContextImpl.hasPersistenceInformationForClass(ExecutionContextImpl.java:5768)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:258)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.store.query.Query.executeQuery(Query.java:1744)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.store.query.Query.execute(Query.java:1654)
2017-12-21 06:08:29.859 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getMSchemaVersion(ObjectStore.java:6721)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.getMetaStoreSchemaVersion(ObjectStore.java:6703)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6661)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6645)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.859 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
2017-12-21 06:08:29.859 - stdout> 	at com.sun.proxy.$Proxy10.verifySchema(Unknown Source)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:572)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.859 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.859 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.859 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
2017-12-21 06:08:29.859 - stdout> 	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
2017-12-21 06:08:29.86 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.86 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.86 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.86 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.86 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
2017-12-21 06:08:29.86 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-21 06:08:29.86 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-21 06:08:29.86 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-21 06:08:29.86 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-21 06:08:29.86 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-21 06:08:29.86 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-21 06:08:29.869 - stdout> 06:08:29.869 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.87 - stdout> 06:08:29.869 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled
2017-12-21 06:08:29.87 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.87 - stdout> 	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600)
2017-12-21 06:08:29.87 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-21 06:08:29.87 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-21 06:08:29.87 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.87 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.871 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.871 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
2017-12-21 06:08:29.871 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.871 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.871 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.871 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
2017-12-21 06:08:29.871 - stdout> 	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
2017-12-21 06:08:29.871 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.871 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.871 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.871 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.871 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
2017-12-21 06:08:29.871 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-21 06:08:29.871 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-21 06:08:29.871 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-21 06:08:29.871 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-21 06:08:29.871 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-21 06:08:29.871 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-21 06:08:29.871 - stdout> NestedThrowablesStackTrace:
2017-12-21 06:08:29.871 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.871 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.871 - stdout> 	at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145)
2017-12-21 06:08:29.871 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240)
2017-12-21 06:08:29.871 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDOQLCompiler.java:113)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.store.query.AbstractJDOQLQuery.compileInternal(AbstractJDOQLQuery.java:367)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:240)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.store.query.Query.executeQuery(Query.java:1744)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.store.query.Query.execute(Query.java:1654)
2017-12-21 06:08:29.872 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-21 06:08:29.872 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.872 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.872 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.872 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-21 06:08:29.872 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
2017-12-21 06:08:29.872 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.872 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.872 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.873 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
2017-12-21 06:08:29.873 - stdout> 	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
2017-12-21 06:08:29.873 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.873 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.873 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.873 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.873 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
2017-12-21 06:08:29.873 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-21 06:08:29.873 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-21 06:08:29.873 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-21 06:08:29.873 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-21 06:08:29.873 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-21 06:08:29.873 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-21 06:08:29.875 - stdout> 06:08:29.875 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.898 - stdout> 06:08:29.898 ERROR DataNucleus.MetaData: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.899 - stdout> 06:08:29.898 WARN org.apache.hadoop.hive.metastore.MetaStoreDirectSql: Database initialization failed; direct SQL is disabled
2017-12-21 06:08:29.899 - stdout> javax.jdo.JDOException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.899 - stdout> 	at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:600)
2017-12-21 06:08:29.899 - stdout> 	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:230)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
2017-12-21 06:08:29.899 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.899 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.899 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.899 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189)
2017-12-21 06:08:29.899 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2017-12-21 06:08:29.899 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2017-12-21 06:08:29.899 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2017-12-21 06:08:29.899 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)
2017-12-21 06:08:29.899 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)
2017-12-21 06:08:29.9 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.<init>(HiveSessionState.scala:63)
2017-12-21 06:08:29.9 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)
2017-12-21 06:08:29.9 - stdout> 	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)
2017-12-21 06:08:29.9 - stdout> 	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)
2017-12-21 06:08:29.9 - stdout> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
2017-12-21 06:08:29.9 - stdout> 	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
2017-12-21 06:08:29.9 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
2017-12-21 06:08:29.9 - stdout> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
2017-12-21 06:08:29.9 - stdout> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
2017-12-21 06:08:29.9 - stdout> 	at java.lang.reflect.Method.invoke(Method.java:497)
2017-12-21 06:08:29.9 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
2017-12-21 06:08:29.9 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2017-12-21 06:08:29.9 - stdout> 	at py4j.Gateway.invoke(Gateway.java:280)
2017-12-21 06:08:29.9 - stdout> 	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
2017-12-21 06:08:29.9 - stdout> 	at py4j.commands.CallCommand.execute(CallCommand.java:79)
2017-12-21 06:08:29.9 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:214)
2017-12-21 06:08:29.9 - stdout> 	at java.lang.Thread.run(Thread.java:745)
2017-12-21 06:08:29.9 - stdout> NestedThrowablesStackTrace:
2017-12-21 06:08:29.9 - stdout> Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.9 - stdout> org.datanucleus.exceptions.NucleusException: Error opening the Meta-Data file "jar:file:/tmp/test-spark/spark-2.0.2/jars/hive-metastore-1.2.1.spark2.jar!/package.jdo"
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.metadata.xml.MetaDataParser.parseMetaDataURL(MetaDataParser.java:145)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.parseFile(JDOMetaDataManager.java:240)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.loadXMLMetaDataForClass(JDOMetaDataManager.java:773)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.api.jdo.metadata.JDOMetaDataManager.getMetaDataForClassInternal(JDOMetaDataManager.java:383)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.metadata.MetaDataManager.getMetaDataForClass(MetaDataManager.java:1570)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:960)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.getType(JavaQueryCompiler.java:952)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.query.expression.PrimaryExpression.bind(PrimaryExpression.java:129)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.query.expression.DyadicExpression.bind(DyadicExpression.java:87)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.query.compiler.JavaQueryCompiler.compileFilter(JavaQueryCompiler.java:481)
2017-12-21 06:08:29.9 - stdout> 	at org.datanucleus.query.compiler.JDOQLCompiler.compile(JDO