org.scalatest.exceptions.TestFailedException: spark-submit returned with exit code 1. Command line: './bin/spark-submit' '--name' 'prepare testing tables' '--master' 'local[2]' '--conf' 'spark.ui.enabled=false' '--conf' 'spark.master.rest.enabled=false' '--conf' 'spark.sql.warehouse.dir=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6-ubuntu-test/target/tmp/warehouse-25c42b7a-b90f-4fe6-8eaa-d7ff1ee56551' '--conf' 'spark.sql.test.version.index=1' '--driver-java-options' '-Dderby.system.home=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6-ubuntu-test/target/tmp/warehouse-25c42b7a-b90f-4fe6-8eaa-d7ff1ee56551' '/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6-ubuntu-test/target/tmp/test3168909500728411004.py' 2018-09-04 13:06:16.133 - stderr> SLF4J: Class path contains multiple SLF4J bindings. 2018-09-04 13:06:16.134 - stderr> SLF4J: Found binding in [jar:file:/tmp/test-spark/spark-2.2.2/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] 2018-09-04 13:06:16.134 - stderr> SLF4J: Found binding in [jar:file:/home/jenkins/sparkivy/per-executor-caches/13/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] 2018-09-04 13:06:16.134 - stderr> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. 2018-09-04 13:06:16.139 - stderr> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 2018-09-04 13:06:16.294 - stdout> 13:06:16.293 WARN org.apache.spark.util.Utils: Your hostname, amp-jenkins-staging-worker-02 resolves to a loopback address: 127.0.1.1; using 192.168.10.32 instead (on interface eno1) 2018-09-04 13:06:16.295 - stdout> 13:06:16.295 WARN org.apache.spark.util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address 2018-09-04 13:06:17.321 - stdout> 13:06:17.321 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 2018-09-04 13:06:17.997 - stdout> 13:06:17.997 ERROR org.apache.spark.SparkContext: Error initializing SparkContext. 2018-09-04 13:06:17.998 - stdout> java.util.ServiceConfigurationError: org.apache.hadoop.fs.FileSystem: Error reading configuration file 2018-09-04 13:06:17.998 - stdout> at java.util.ServiceLoader.fail(ServiceLoader.java:232) 2018-09-04 13:06:17.998 - stdout> at java.util.ServiceLoader.parse(ServiceLoader.java:309) 2018-09-04 13:06:17.998 - stdout> at java.util.ServiceLoader.access$200(ServiceLoader.java:185) 2018-09-04 13:06:17.998 - stdout> at java.util.ServiceLoader$LazyIterator.hasNextService(ServiceLoader.java:357) 2018-09-04 13:06:17.998 - stdout> at java.util.ServiceLoader$LazyIterator.hasNext(ServiceLoader.java:393) 2018-09-04 13:06:17.998 - stdout> at java.util.ServiceLoader$1.hasNext(ServiceLoader.java:474) 2018-09-04 13:06:17.998 - stdout> at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2628) 2018-09-04 13:06:17.998 - stdout> at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2650) 2018-09-04 13:06:17.998 - stdout> at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2667) 2018-09-04 13:06:17.998 - stdout> at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94) 2018-09-04 13:06:17.998 - stdout> at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2703) 2018-09-04 13:06:17.998 - stdout> at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2685) 2018-09-04 13:06:17.998 - stdout> at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:373) 2018-09-04 13:06:17.998 - stdout> at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) 2018-09-04 13:06:17.998 - stdout> at org.apache.spark.SparkContext.addFile(SparkContext.scala:1534) 2018-09-04 13:06:17.998 - stdout> at org.apache.spark.SparkContext.addFile(SparkContext.scala:1505) 2018-09-04 13:06:17.998 - stdout> at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:470) 2018-09-04 13:06:17.998 - stdout> at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:470) 2018-09-04 13:06:17.998 - stdout> at scala.collection.immutable.List.foreach(List.scala:381) 2018-09-04 13:06:17.998 - stdout> at org.apache.spark.SparkContext.<init>(SparkContext.scala:470) 2018-09-04 13:06:17.998 - stdout> at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:58) 2018-09-04 13:06:17.998 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2018-09-04 13:06:17.998 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2018-09-04 13:06:17.998 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2018-09-04 13:06:17.998 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 2018-09-04 13:06:17.998 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247) 2018-09-04 13:06:17.998 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2018-09-04 13:06:17.998 - stdout> at py4j.Gateway.invoke(Gateway.java:238) 2018-09-04 13:06:17.998 - stdout> at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80) 2018-09-04 13:06:17.998 - stdout> at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69) 2018-09-04 13:06:17.998 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:238) 2018-09-04 13:06:17.998 - stdout> at java.lang.Thread.run(Thread.java:748) 2018-09-04 13:06:17.998 - stdout> Caused by: java.io.FileNotFoundException: /tmp/test-spark/spark-2.2.2/jars/hadoop-hdfs-2.7.3.jar (No such file or directory) 2018-09-04 13:06:17.999 - stdout> at java.util.zip.ZipFile.open(Native Method) 2018-09-04 13:06:17.999 - stdout> at java.util.zip.ZipFile.<init>(ZipFile.java:225) 2018-09-04 13:06:17.999 - stdout> at java.util.zip.ZipFile.<init>(ZipFile.java:155) 2018-09-04 13:06:17.999 - stdout> at java.util.jar.JarFile.<init>(JarFile.java:166) 2018-09-04 13:06:17.999 - stdout> at java.util.jar.JarFile.<init>(JarFile.java:103) 2018-09-04 13:06:17.999 - stdout> at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93) 2018-09-04 13:06:17.999 - stdout> at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69) 2018-09-04 13:06:17.999 - stdout> at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:84) 2018-09-04 13:06:17.999 - stdout> at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122) 2018-09-04 13:06:17.999 - stdout> at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:152) 2018-09-04 13:06:17.999 - stdout> at java.net.URL.openStream(URL.java:1045) 2018-09-04 13:06:17.999 - stdout> at java.util.ServiceLoader.parse(ServiceLoader.java:304) 2018-09-04 13:06:17.999 - stdout> ... 30 more 2018-09-04 13:06:18.019 - stdout> Traceback (most recent call last): 2018-09-04 13:06:18.019 - stdout> File "/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6-ubuntu-test/target/tmp/test3168909500728411004.py", line 5, in <module> 2018-09-04 13:06:18.019 - stdout> spark = SparkSession.builder.enableHiveSupport().getOrCreate() 2018-09-04 13:06:18.019 - stdout> File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/sql/session.py", line 173, in getOrCreate 2018-09-04 13:06:18.02 - stdout> File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/context.py", line 334, in getOrCreate 2018-09-04 13:06:18.02 - stdout> File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/context.py", line 118, in __init__ 2018-09-04 13:06:18.02 - stdout> File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/context.py", line 180, in _do_init 2018-09-04 13:06:18.02 - stdout> File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/context.py", line 273, in _initialize_context 2018-09-04 13:06:18.02 - stdout> File "/tmp/test-spark/spark-2.2.2/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py", line 1525, in __call__ 2018-09-04 13:06:18.021 - stdout> File "/tmp/test-spark/spark-2.2.2/python/lib/py4j-0.10.7-src.zip/py4j/protocol.py", line 328, in get_return_value 2018-09-04 13:06:18.022 - stdout> py4j.protocol.Py4JJavaError: An error occurred while calling None.org.apache.spark.api.java.JavaSparkContext. 2018-09-04 13:06:18.022 - stdout> : java.util.ServiceConfigurationError: org.apache.hadoop.fs.FileSystem: Error reading configuration file 2018-09-04 13:06:18.022 - stdout> at java.util.ServiceLoader.fail(ServiceLoader.java:232) 2018-09-04 13:06:18.022 - stdout> at java.util.ServiceLoader.parse(ServiceLoader.java:309) 2018-09-04 13:06:18.022 - stdout> at java.util.ServiceLoader.access$200(ServiceLoader.java:185) 2018-09-04 13:06:18.022 - stdout> at java.util.ServiceLoader$LazyIterator.hasNextService(ServiceLoader.java:357) 2018-09-04 13:06:18.022 - stdout> at java.util.ServiceLoader$LazyIterator.hasNext(ServiceLoader.java:393) 2018-09-04 13:06:18.022 - stdout> at java.util.ServiceLoader$1.hasNext(ServiceLoader.java:474) 2018-09-04 13:06:18.022 - stdout> at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2628) 2018-09-04 13:06:18.022 - stdout> at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2650) 2018-09-04 13:06:18.022 - stdout> at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2667) 2018-09-04 13:06:18.022 - stdout> at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94) 2018-09-04 13:06:18.022 - stdout> at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2703) 2018-09-04 13:06:18.022 - stdout> at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2685) 2018-09-04 13:06:18.022 - stdout> at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:373) 2018-09-04 13:06:18.023 - stdout> at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) 2018-09-04 13:06:18.023 - stdout> at org.apache.spark.SparkContext.addFile(SparkContext.scala:1534) 2018-09-04 13:06:18.023 - stdout> at org.apache.spark.SparkContext.addFile(SparkContext.scala:1505) 2018-09-04 13:06:18.023 - stdout> at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:470) 2018-09-04 13:06:18.023 - stdout> at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:470) 2018-09-04 13:06:18.023 - stdout> at scala.collection.immutable.List.foreach(List.scala:381) 2018-09-04 13:06:18.023 - stdout> at org.apache.spark.SparkContext.<init>(SparkContext.scala:470) 2018-09-04 13:06:18.023 - stdout> at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:58) 2018-09-04 13:06:18.023 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 2018-09-04 13:06:18.023 - stdout> at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 2018-09-04 13:06:18.023 - stdout> at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 2018-09-04 13:06:18.023 - stdout> at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 2018-09-04 13:06:18.023 - stdout> at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247) 2018-09-04 13:06:18.023 - stdout> at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) 2018-09-04 13:06:18.023 - stdout> at py4j.Gateway.invoke(Gateway.java:238) 2018-09-04 13:06:18.023 - stdout> at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80) 2018-09-04 13:06:18.023 - stdout> at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69) 2018-09-04 13:06:18.023 - stdout> at py4j.GatewayConnection.run(GatewayConnection.java:238) 2018-09-04 13:06:18.023 - stdout> at java.lang.Thread.run(Thread.java:748) 2018-09-04 13:06:18.023 - stdout> Caused by: java.io.FileNotFoundException: /tmp/test-spark/spark-2.2.2/jars/hadoop-hdfs-2.7.3.jar (No such file or directory) 2018-09-04 13:06:18.023 - stdout> at java.util.zip.ZipFile.open(Native Method) 2018-09-04 13:06:18.023 - stdout> at java.util.zip.ZipFile.<init>(ZipFile.java:225) 2018-09-04 13:06:18.023 - stdout> at java.util.zip.ZipFile.<init>(ZipFile.java:155) 2018-09-04 13:06:18.023 - stdout> at java.util.jar.JarFile.<init>(JarFile.java:166) 2018-09-04 13:06:18.023 - stdout> at java.util.jar.JarFile.<init>(JarFile.java:103) 2018-09-04 13:06:18.023 - stdout> at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93) 2018-09-04 13:06:18.023 - stdout> at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69) 2018-09-04 13:06:18.023 - stdout> at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:84) 2018-09-04 13:06:18.023 - stdout> at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122) 2018-09-04 13:06:18.023 - stdout> at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:152) 2018-09-04 13:06:18.023 - stdout> at java.net.URL.openStream(URL.java:1045) 2018-09-04 13:06:18.023 - stdout> at java.util.ServiceLoader.parse(ServiceLoader.java:304) 2018-09-04 13:06:18.023 - stdout> ... 30 more 2018-09-04 13:06:18.023 - stdout> 2018-09-04 13:06:18.068 - stdout> 13:06:18.068 ERROR org.apache.spark.util.Utils: Uncaught exception in thread Thread-5 2018-09-04 13:06:18.068 - stdout> java.lang.NoClassDefFoundError: Could not initialize class java.nio.file.FileSystems$DefaultFileSystemHolder 2018-09-04 13:06:18.068 - stdout> at java.nio.file.FileSystems.getDefault(FileSystems.java:176) 2018-09-04 13:06:18.068 - stdout> at java.nio.file.Paths.get(Paths.java:138) 2018-09-04 13:06:18.068 - stdout> at org.apache.spark.util.Utils$.isSymlink(Utils.scala:1045) 2018-09-04 13:06:18.068 - stdout> at org.apache.spark.util.Utils$.deleteRecursively(Utils.scala:1015) 2018-09-04 13:06:18.068 - stdout> at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:65) 2018-09-04 13:06:18.068 - stdout> at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:62) 2018-09-04 13:06:18.068 - stdout> at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) 2018-09-04 13:06:18.068 - stdout> at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186) 2018-09-04 13:06:18.068 - stdout> at org.apache.spark.util.ShutdownHookManager$$anonfun$1.apply$mcV$sp(ShutdownHookManager.scala:62) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:216) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1951) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at scala.util.Try$.apply(Try.scala:192) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178) 2018-09-04 13:06:18.069 - stdout> at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54) 2018-09-04 13:06:18.069 - stdout> 13:06:18.069 WARN org.apache.hadoop.util.ShutdownHookManager: ShutdownHook '$anon$2' failed, java.lang.NoClassDefFoundError: Could not initialize class java.nio.file.FileSystems$DefaultFileSystemHolder 2018-09-04 13:06:18.069 - stdout> java.lang.NoClassDefFoundError: Could not initialize class java.nio.file.FileSystems$DefaultFileSystemHolder 2018-09-04 13:06:18.069 - stdout> at java.nio.file.FileSystems.getDefault(FileSystems.java:176) 2018-09-04 13:06:18.069 - stdout> at java.nio.file.Paths.get(Paths.java:138) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.Utils$.isSymlink(Utils.scala:1045) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.Utils$.deleteRecursively(Utils.scala:1015) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:65) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:62) 2018-09-04 13:06:18.069 - stdout> at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) 2018-09-04 13:06:18.069 - stdout> at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.ShutdownHookManager$$anonfun$1.apply$mcV$sp(ShutdownHookManager.scala:62) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:216) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1951) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at scala.util.Try$.apply(Try.scala:192) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188) 2018-09-04 13:06:18.069 - stdout> at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178) 2018-09-04 13:06:18.07 - stdout> at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54)

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: spark-submit returned with exit code 1.
Command line: './bin/spark-submit' '--name' 'prepare testing tables' '--master' 'local[2]' '--conf' 'spark.ui.enabled=false' '--conf' 'spark.master.rest.enabled=false' '--conf' 'spark.sql.warehouse.dir=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6-ubuntu-test/target/tmp/warehouse-25c42b7a-b90f-4fe6-8eaa-d7ff1ee56551' '--conf' 'spark.sql.test.version.index=1' '--driver-java-options' '-Dderby.system.home=/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6-ubuntu-test/target/tmp/warehouse-25c42b7a-b90f-4fe6-8eaa-d7ff1ee56551' '/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6-ubuntu-test/target/tmp/test3168909500728411004.py'

2018-09-04 13:06:16.133 - stderr> SLF4J: Class path contains multiple SLF4J bindings.
2018-09-04 13:06:16.134 - stderr> SLF4J: Found binding in [jar:file:/tmp/test-spark/spark-2.2.2/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
2018-09-04 13:06:16.134 - stderr> SLF4J: Found binding in [jar:file:/home/jenkins/sparkivy/per-executor-caches/13/.ivy2/cache/org.slf4j/slf4j-log4j12/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
2018-09-04 13:06:16.134 - stderr> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
2018-09-04 13:06:16.139 - stderr> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
2018-09-04 13:06:16.294 - stdout> 13:06:16.293 WARN org.apache.spark.util.Utils: Your hostname, amp-jenkins-staging-worker-02 resolves to a loopback address: 127.0.1.1; using 192.168.10.32 instead (on interface eno1)
2018-09-04 13:06:16.295 - stdout> 13:06:16.295 WARN org.apache.spark.util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address
2018-09-04 13:06:17.321 - stdout> 13:06:17.321 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
2018-09-04 13:06:17.997 - stdout> 13:06:17.997 ERROR org.apache.spark.SparkContext: Error initializing SparkContext.
2018-09-04 13:06:17.998 - stdout> java.util.ServiceConfigurationError: org.apache.hadoop.fs.FileSystem: Error reading configuration file
2018-09-04 13:06:17.998 - stdout> 	at java.util.ServiceLoader.fail(ServiceLoader.java:232)
2018-09-04 13:06:17.998 - stdout> 	at java.util.ServiceLoader.parse(ServiceLoader.java:309)
2018-09-04 13:06:17.998 - stdout> 	at java.util.ServiceLoader.access$200(ServiceLoader.java:185)
2018-09-04 13:06:17.998 - stdout> 	at java.util.ServiceLoader$LazyIterator.hasNextService(ServiceLoader.java:357)
2018-09-04 13:06:17.998 - stdout> 	at java.util.ServiceLoader$LazyIterator.hasNext(ServiceLoader.java:393)
2018-09-04 13:06:17.998 - stdout> 	at java.util.ServiceLoader$1.hasNext(ServiceLoader.java:474)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2628)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2650)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2667)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2703)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2685)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:373)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.spark.SparkContext.addFile(SparkContext.scala:1534)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.spark.SparkContext.addFile(SparkContext.scala:1505)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:470)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:470)
2018-09-04 13:06:17.998 - stdout> 	at scala.collection.immutable.List.foreach(List.scala:381)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:470)
2018-09-04 13:06:17.998 - stdout> 	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:58)
2018-09-04 13:06:17.998 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2018-09-04 13:06:17.998 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2018-09-04 13:06:17.998 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2018-09-04 13:06:17.998 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
2018-09-04 13:06:17.998 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247)
2018-09-04 13:06:17.998 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2018-09-04 13:06:17.998 - stdout> 	at py4j.Gateway.invoke(Gateway.java:238)
2018-09-04 13:06:17.998 - stdout> 	at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80)
2018-09-04 13:06:17.998 - stdout> 	at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69)
2018-09-04 13:06:17.998 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:238)
2018-09-04 13:06:17.998 - stdout> 	at java.lang.Thread.run(Thread.java:748)
2018-09-04 13:06:17.998 - stdout> Caused by: java.io.FileNotFoundException: /tmp/test-spark/spark-2.2.2/jars/hadoop-hdfs-2.7.3.jar (No such file or directory)
2018-09-04 13:06:17.999 - stdout> 	at java.util.zip.ZipFile.open(Native Method)
2018-09-04 13:06:17.999 - stdout> 	at java.util.zip.ZipFile.<init>(ZipFile.java:225)
2018-09-04 13:06:17.999 - stdout> 	at java.util.zip.ZipFile.<init>(ZipFile.java:155)
2018-09-04 13:06:17.999 - stdout> 	at java.util.jar.JarFile.<init>(JarFile.java:166)
2018-09-04 13:06:17.999 - stdout> 	at java.util.jar.JarFile.<init>(JarFile.java:103)
2018-09-04 13:06:17.999 - stdout> 	at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93)
2018-09-04 13:06:17.999 - stdout> 	at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69)
2018-09-04 13:06:17.999 - stdout> 	at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:84)
2018-09-04 13:06:17.999 - stdout> 	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)
2018-09-04 13:06:17.999 - stdout> 	at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:152)
2018-09-04 13:06:17.999 - stdout> 	at java.net.URL.openStream(URL.java:1045)
2018-09-04 13:06:17.999 - stdout> 	at java.util.ServiceLoader.parse(ServiceLoader.java:304)
2018-09-04 13:06:17.999 - stdout> 	... 30 more
2018-09-04 13:06:18.019 - stdout> Traceback (most recent call last):
2018-09-04 13:06:18.019 - stdout>   File "/home/jenkins/workspace/spark-master-test-sbt-hadoop-2.6-ubuntu-test/target/tmp/test3168909500728411004.py", line 5, in <module>
2018-09-04 13:06:18.019 - stdout>     spark = SparkSession.builder.enableHiveSupport().getOrCreate()
2018-09-04 13:06:18.019 - stdout>   File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/sql/session.py", line 173, in getOrCreate
2018-09-04 13:06:18.02 - stdout>   File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/context.py", line 334, in getOrCreate
2018-09-04 13:06:18.02 - stdout>   File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/context.py", line 118, in __init__
2018-09-04 13:06:18.02 - stdout>   File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/context.py", line 180, in _do_init
2018-09-04 13:06:18.02 - stdout>   File "/tmp/test-spark/spark-2.2.2/python/lib/pyspark.zip/pyspark/context.py", line 273, in _initialize_context
2018-09-04 13:06:18.02 - stdout>   File "/tmp/test-spark/spark-2.2.2/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py", line 1525, in __call__
2018-09-04 13:06:18.021 - stdout>   File "/tmp/test-spark/spark-2.2.2/python/lib/py4j-0.10.7-src.zip/py4j/protocol.py", line 328, in get_return_value
2018-09-04 13:06:18.022 - stdout> py4j.protocol.Py4JJavaError: An error occurred while calling None.org.apache.spark.api.java.JavaSparkContext.
2018-09-04 13:06:18.022 - stdout> : java.util.ServiceConfigurationError: org.apache.hadoop.fs.FileSystem: Error reading configuration file
2018-09-04 13:06:18.022 - stdout> 	at java.util.ServiceLoader.fail(ServiceLoader.java:232)
2018-09-04 13:06:18.022 - stdout> 	at java.util.ServiceLoader.parse(ServiceLoader.java:309)
2018-09-04 13:06:18.022 - stdout> 	at java.util.ServiceLoader.access$200(ServiceLoader.java:185)
2018-09-04 13:06:18.022 - stdout> 	at java.util.ServiceLoader$LazyIterator.hasNextService(ServiceLoader.java:357)
2018-09-04 13:06:18.022 - stdout> 	at java.util.ServiceLoader$LazyIterator.hasNext(ServiceLoader.java:393)
2018-09-04 13:06:18.022 - stdout> 	at java.util.ServiceLoader$1.hasNext(ServiceLoader.java:474)
2018-09-04 13:06:18.022 - stdout> 	at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2628)
2018-09-04 13:06:18.022 - stdout> 	at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2650)
2018-09-04 13:06:18.022 - stdout> 	at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2667)
2018-09-04 13:06:18.022 - stdout> 	at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)
2018-09-04 13:06:18.022 - stdout> 	at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2703)
2018-09-04 13:06:18.022 - stdout> 	at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2685)
2018-09-04 13:06:18.022 - stdout> 	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:373)
2018-09-04 13:06:18.023 - stdout> 	at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295)
2018-09-04 13:06:18.023 - stdout> 	at org.apache.spark.SparkContext.addFile(SparkContext.scala:1534)
2018-09-04 13:06:18.023 - stdout> 	at org.apache.spark.SparkContext.addFile(SparkContext.scala:1505)
2018-09-04 13:06:18.023 - stdout> 	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:470)
2018-09-04 13:06:18.023 - stdout> 	at org.apache.spark.SparkContext$$anonfun$13.apply(SparkContext.scala:470)
2018-09-04 13:06:18.023 - stdout> 	at scala.collection.immutable.List.foreach(List.scala:381)
2018-09-04 13:06:18.023 - stdout> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:470)
2018-09-04 13:06:18.023 - stdout> 	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:58)
2018-09-04 13:06:18.023 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
2018-09-04 13:06:18.023 - stdout> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
2018-09-04 13:06:18.023 - stdout> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
2018-09-04 13:06:18.023 - stdout> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
2018-09-04 13:06:18.023 - stdout> 	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247)
2018-09-04 13:06:18.023 - stdout> 	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
2018-09-04 13:06:18.023 - stdout> 	at py4j.Gateway.invoke(Gateway.java:238)
2018-09-04 13:06:18.023 - stdout> 	at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80)
2018-09-04 13:06:18.023 - stdout> 	at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69)
2018-09-04 13:06:18.023 - stdout> 	at py4j.GatewayConnection.run(GatewayConnection.java:238)
2018-09-04 13:06:18.023 - stdout> 	at java.lang.Thread.run(Thread.java:748)
2018-09-04 13:06:18.023 - stdout> Caused by: java.io.FileNotFoundException: /tmp/test-spark/spark-2.2.2/jars/hadoop-hdfs-2.7.3.jar (No such file or directory)
2018-09-04 13:06:18.023 - stdout> 	at java.util.zip.ZipFile.open(Native Method)
2018-09-04 13:06:18.023 - stdout> 	at java.util.zip.ZipFile.<init>(ZipFile.java:225)
2018-09-04 13:06:18.023 - stdout> 	at java.util.zip.ZipFile.<init>(ZipFile.java:155)
2018-09-04 13:06:18.023 - stdout> 	at java.util.jar.JarFile.<init>(JarFile.java:166)
2018-09-04 13:06:18.023 - stdout> 	at java.util.jar.JarFile.<init>(JarFile.java:103)
2018-09-04 13:06:18.023 - stdout> 	at sun.net.www.protocol.jar.URLJarFile.<init>(URLJarFile.java:93)
2018-09-04 13:06:18.023 - stdout> 	at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:69)
2018-09-04 13:06:18.023 - stdout> 	at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:84)
2018-09-04 13:06:18.023 - stdout> 	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)
2018-09-04 13:06:18.023 - stdout> 	at sun.net.www.protocol.jar.JarURLConnection.getInputStream(JarURLConnection.java:152)
2018-09-04 13:06:18.023 - stdout> 	at java.net.URL.openStream(URL.java:1045)
2018-09-04 13:06:18.023 - stdout> 	at java.util.ServiceLoader.parse(ServiceLoader.java:304)
2018-09-04 13:06:18.023 - stdout> 	... 30 more
2018-09-04 13:06:18.023 - stdout> 
2018-09-04 13:06:18.068 - stdout> 13:06:18.068 ERROR org.apache.spark.util.Utils: Uncaught exception in thread Thread-5
2018-09-04 13:06:18.068 - stdout> java.lang.NoClassDefFoundError: Could not initialize class java.nio.file.FileSystems$DefaultFileSystemHolder
2018-09-04 13:06:18.068 - stdout> 	at java.nio.file.FileSystems.getDefault(FileSystems.java:176)
2018-09-04 13:06:18.068 - stdout> 	at java.nio.file.Paths.get(Paths.java:138)
2018-09-04 13:06:18.068 - stdout> 	at org.apache.spark.util.Utils$.isSymlink(Utils.scala:1045)
2018-09-04 13:06:18.068 - stdout> 	at org.apache.spark.util.Utils$.deleteRecursively(Utils.scala:1015)
2018-09-04 13:06:18.068 - stdout> 	at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:65)
2018-09-04 13:06:18.068 - stdout> 	at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:62)
2018-09-04 13:06:18.068 - stdout> 	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
2018-09-04 13:06:18.068 - stdout> 	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
2018-09-04 13:06:18.068 - stdout> 	at org.apache.spark.util.ShutdownHookManager$$anonfun$1.apply$mcV$sp(ShutdownHookManager.scala:62)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:216)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1951)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at scala.util.Try$.apply(Try.scala:192)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54)
2018-09-04 13:06:18.069 - stdout> 13:06:18.069 WARN org.apache.hadoop.util.ShutdownHookManager: ShutdownHook '$anon$2' failed, java.lang.NoClassDefFoundError: Could not initialize class java.nio.file.FileSystems$DefaultFileSystemHolder
2018-09-04 13:06:18.069 - stdout> java.lang.NoClassDefFoundError: Could not initialize class java.nio.file.FileSystems$DefaultFileSystemHolder
2018-09-04 13:06:18.069 - stdout> 	at java.nio.file.FileSystems.getDefault(FileSystems.java:176)
2018-09-04 13:06:18.069 - stdout> 	at java.nio.file.Paths.get(Paths.java:138)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.Utils$.isSymlink(Utils.scala:1045)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.Utils$.deleteRecursively(Utils.scala:1015)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:65)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:62)
2018-09-04 13:06:18.069 - stdout> 	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
2018-09-04 13:06:18.069 - stdout> 	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.ShutdownHookManager$$anonfun$1.apply$mcV$sp(ShutdownHookManager.scala:62)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:216)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1951)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at scala.util.Try$.apply(Try.scala:192)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188)
2018-09-04 13:06:18.069 - stdout> 	at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178)
2018-09-04 13:06:18.07 - stdout> 	at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54)
           
	at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:528)
	at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
	at org.scalatest.Assertions$class.fail(Assertions.scala:1089)
	at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
	at org.apache.spark.sql.hive.SparkSubmitTestUtils$class.runSparkSubmit(SparkSubmitTestUtils.scala:84)
	at org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite.runSparkSubmit(HiveExternalCatalogVersionsSuite.scala:43)
	at org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite$$anonfun$beforeAll$1.apply(HiveExternalCatalogVersionsSuite.scala:184)
	at org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite$$anonfun$beforeAll$1.apply(HiveExternalCatalogVersionsSuite.scala:169)
	at scala.collection.immutable.List.foreach(List.scala:392)
	at org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite.beforeAll(HiveExternalCatalogVersionsSuite.scala:169)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:212)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:52)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)