Query [id = e96095da-0755-4483-ab82-970946a972eb, runId = 97b2019d-4cec-471c-b5d4-dfdd8ce7ceb3] terminated with exception: Writing job aborted.
org.apache.spark.sql.streaming.StreamingQueryException: Writing job aborted.
=== Streaming Query ===
Identifier: [id = e96095da-0755-4483-ab82-970946a972eb, runId = 97b2019d-4cec-471c-b5d4-dfdd8ce7ceb3]
Current Committed Offsets: {}
Current Available Offsets: {KafkaSourceV1[Subscribe[topic-29]]: {"topic-29":{"0":10}}}
Current State: ACTIVE
Thread State: RUNNABLE
Logical Plan:
WriteToMicroBatchDataSource org.apache.spark.sql.execution.streaming.sources.ForeachWriterTable$$anon$1$$anon$2@4a2a97dd
+- SerializeFromObject [input[0, int, false] AS value#8319]
+- MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$4259/2012495678@7f34844, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#8318: int
+- DeserializeToObject newInstance(class scala.Tuple2), obj#8317: scala.Tuple2
+- Project [cast(key#8293 as string) AS key#8307, cast(value#8294 as string) AS value#8308]
+- StreamingExecutionRelation KafkaSourceV1[Subscribe[topic-29]], [key#8293, value#8294, topic#8295, partition#8296, offset#8297L, timestamp#8298, timestampType#8299]
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:352)
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:243)
Cause: org.apache.spark.SparkException: Writing job aborted.
at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.doWrite(WriteToDataSourceV2Exec.scala:227)
at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.doWrite$(WriteToDataSourceV2Exec.scala:178)
at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doWrite(WriteToDataSourceV2Exec.scala:141)
at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2Exec.scala:148)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:131)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:155)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247)
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:296)
at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3371)
at org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:2720)
at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3360)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3356)
at org.apache.spark.sql.Dataset.collect(Dataset.scala:2720)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:555)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$14(MicroBatchExecution.scala:550)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:550)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:210)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:178)
at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:172)
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:331)
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:243)
Cause: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 144.0 failed 1 times, most recent failure: Lost task 0.0 in stage 144.0 (TID 737, localhost, executor driver): org.scalatest.exceptions.TestFailedDueToTimeoutException: The code passed to eventually never returned normally. Attempted 309 times over 1.0029187045666665 minutes. Last failure message: assertion failed: /brokers/topics/topic-29 still exists.
at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432)
at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439)
at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391)
at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308)
at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307)
at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletionWithRetries(KafkaTestUtils.scala:417)
at org.apache.spark.sql.kafka010.KafkaTestUtils.deleteTopic(KafkaTestUtils.scala:226)
at org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$anon$1.open(KafkaMicroBatchSourceSuite.scala:581)
at org.apache.spark.sql.execution.streaming.sources.ForeachDataWriter.<init>(ForeachWriterTable.scala:132)
at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:111)
at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:103)
at org.apache.spark.sql.execution.streaming.sources.MicroBatchWriterFactory.createWriter(MicroBatchWrite.scala:48)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2Exec.scala:247)
at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.$anonfun$doWrite$2(WriteToDataSourceV2Exec.scala:199)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:121)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:428)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1349)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:431)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.AssertionError: assertion failed: /brokers/topics/topic-29 still exists
at scala.Predef$.assert(Predef.scala:223)
at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletion(KafkaTestUtils.scala:389)
at org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$verifyTopicDeletionWithRetries$1(KafkaTestUtils.scala:419)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395)
at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409)
... 23 more
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:1945)
at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:1933)
at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:1932)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1932)
at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:942)
at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:942)
at scala.Option.foreach(Option.scala:274)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:942)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2162)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2111)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2100)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:746)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2013)
at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.doWrite(WriteToDataSourceV2Exec.scala:196)
at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.doWrite$(WriteToDataSourceV2Exec.scala:178)
at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doWrite(WriteToDataSourceV2Exec.scala:141)
at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2Exec.scala:148)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:131)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:155)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247)
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:296)
at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3371)
at org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:2720)
at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3360)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3356)
at org.apache.spark.sql.Dataset.collect(Dataset.scala:2720)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:555)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$14(MicroBatchExecution.scala:550)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:550)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:210)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:327)
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:325)
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:178)
at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:172)
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:331)
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:243)
Cause: org.scalatest.exceptions.TestFailedDueToTimeoutException: The code passed to eventually never returned normally. Attempted 309 times over 1.0029187045666665 minutes. Last failure message: assertion failed: /brokers/topics/topic-29 still exists.
at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432)
at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439)
at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391)
at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308)
at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307)
at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletionWithRetries(KafkaTestUtils.scala:417)
at org.apache.spark.sql.kafka010.KafkaTestUtils.deleteTopic(KafkaTestUtils.scala:226)
at org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$anon$1.open(KafkaMicroBatchSourceSuite.scala:581)
at org.apache.spark.sql.execution.streaming.sources.ForeachDataWriter.<init>(ForeachWriterTable.scala:132)
at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:111)
at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:103)
at org.apache.spark.sql.execution.streaming.sources.MicroBatchWriterFactory.createWriter(MicroBatchWrite.scala:48)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2Exec.scala:247)
at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.$anonfun$doWrite$2(WriteToDataSourceV2Exec.scala:199)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:121)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:428)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1349)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:431)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Cause: java.lang.AssertionError: assertion failed: /brokers/topics/topic-29 still exists
at scala.Predef$.assert(Predef.scala:223)
at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletion(KafkaTestUtils.scala:389)
at org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$verifyTopicDeletionWithRetries$1(KafkaTestUtils.scala:419)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395)
at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409)
at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439)
at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391)
at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308)
at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307)
at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletionWithRetries(KafkaTestUtils.scala:417)
at org.apache.spark.sql.kafka010.KafkaTestUtils.deleteTopic(KafkaTestUtils.scala:226)
at org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$anon$1.open(KafkaMicroBatchSourceSuite.scala:581)
at org.apache.spark.sql.execution.streaming.sources.ForeachDataWriter.<init>(ForeachWriterTable.scala:132)
at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:111)
at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:103)
at org.apache.spark.sql.execution.streaming.sources.MicroBatchWriterFactory.createWriter(MicroBatchWrite.scala:48)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2Exec.scala:247)
at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.$anonfun$doWrite$2(WriteToDataSourceV2Exec.scala:199)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:121)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:428)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1349)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:431)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)