Details for spark-branch-2.2-test-sbt-hadoop-2.6 build #192

View on Jenkins

Duration
169 minutes
Start time
2017-06-13 14:42:32 ()
Commit
039c465062b15aa8793fa72d19f3d2ff3b53a99c
Executor
amp-jenkins-worker-03
Status
FAILURE

Failed tests

org.apache.spark.sql.hive.client.VersionsSuite: 13: create client 797878 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: createDatabase 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: setCurrentDatabase 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getDatabase 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: databaseExists 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: listDatabases 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: alterDatabase 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: dropDatabase 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: createTable 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: loadTable 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: tableExists 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getTable 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getTableOption 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: alterTable(table: CatalogTable) 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: alterTable(tableName: String, table: CatalogTable) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: listTables(database) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: listTables(database, pattern) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: dropTable 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: sql create partitioned table 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: createPartitions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getPartitionNames(catalogTable) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getPartitions(catalogTable) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getPartitionsByFilter 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getPartition 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getPartitionOption(db: String, table: String, spec: TablePartitionSpec) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getPartitionOption(table: CatalogTable, spec: TablePartitionSpec) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getPartitions(db: String, table: String) 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: loadPartition 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: loadDynamicPartitions 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: renamePartitions 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: alterPartitions 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: dropPartitions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: createFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: functionExists 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: renameFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: alterFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getFunctionOption 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: listFunctions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: dropFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: sql set command 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: sql create index and reset 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: version 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: getConf 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: setOut 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: setInfo 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: setError 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: newSession 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: withHiveState and addJar 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: reset 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 13: CREATE TABLE AS SELECT 925 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: create client 60087 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: createDatabase 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: setCurrentDatabase 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getDatabase 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: databaseExists 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: listDatabases 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: alterDatabase 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: dropDatabase 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: createTable 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: loadTable 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: tableExists 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getTable 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getTableOption 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: alterTable(table: CatalogTable) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: alterTable(tableName: String, table: CatalogTable) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: listTables(database) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: listTables(database, pattern) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: dropTable 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: sql create partitioned table 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: createPartitions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getPartitionNames(catalogTable) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getPartitions(catalogTable) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getPartitionsByFilter 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getPartition 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getPartitionOption(db: String, table: String, spec: TablePartitionSpec) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getPartitionOption(table: CatalogTable, spec: TablePartitionSpec) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getPartitions(db: String, table: String) 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: loadPartition 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: loadDynamicPartitions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: renamePartitions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: alterPartitions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: dropPartitions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: createFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: functionExists 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: renameFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: alterFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getFunction 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getFunctionOption 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: listFunctions 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: dropFunction 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: sql set command 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: sql create index and reset 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: version 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: getConf 1 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: setOut 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: setInfo 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: setError 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: newSession 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: withHiveState and addJar 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: reset 0 ms
org.apache.spark.sql.hive.client.VersionsSuite: 14: CREATE TABLE AS SELECT 850 ms

Test time report

Right click on the visualization to go back up a level. Click on a node to expand it. Hover over a node to see the combined duration of tests under that node.