Query [id = 80700dfe-4d2d-4051-aed4-1aa5afebadec, runId = 57646de3-bed7-4f2c-9578-ded01fd978ab] terminated with exception: Writing job aborted.


      org.apache.spark.sql.streaming.StreamingQueryException: Writing job aborted.
=== Streaming Query ===
Identifier: [id = 80700dfe-4d2d-4051-aed4-1aa5afebadec, runId = 57646de3-bed7-4f2c-9578-ded01fd978ab]
Current Committed Offsets: {}
Current Available Offsets: {KafkaSourceV1[Subscribe[topic-35]]: {"topic-35":{"0":10}}}

Current State: ACTIVE
Thread State: RUNNABLE

Logical Plan:
WriteToMicroBatchDataSource org.apache.spark.sql.execution.streaming.sources.ForeachWriterTable$$anon$1$$anon$2@52c08376
+- SerializeFromObject [input[0, int, false] AS value#16981]
   +- MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5556/1985822273@400fe93c, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#16980: int
      +- DeserializeToObject newInstance(class scala.Tuple2), obj#16979: scala.Tuple2
         +- Project [cast(key#16955 as string) AS key#16969, cast(value#16956 as string) AS value#16970]
            +- StreamingExecutionRelation KafkaSourceV1[Subscribe[topic-35]], [key#16955, value#16956, topic#16957, partition#16958, offset#16959L, timestamp#16960, timestampType#16961]

      at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:353)
      at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)
      Cause: org.apache.spark.SparkException: Writing job aborted.
      at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:404)
      at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:355)
      at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.writeWithV2(WriteToDataSourceV2Exec.scala:318)
      at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2Exec.scala:325)
      at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
      at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
      at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
      at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
      at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
      at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
      at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)
      at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3416)
      at org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:2746)
      at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3406)
      at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
      at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
      at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
      at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3402)
      at org.apache.spark.sql.Dataset.collect(Dataset.scala:2746)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:557)
      at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
      at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
      at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:552)
      at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)
      at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)
      at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:552)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:213)
      at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
      at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)
      at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)
      at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:181)
      at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)
      at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)
      at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)
      Cause: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 162.0 failed 1 times, most recent failure: Lost task 0.0 in stage 162.0 (TID 844, 192.168.10.32, executor driver): org.scalatest.exceptions.TestFailedDueToTimeoutException: The code passed to eventually never returned normally. Attempted 309 times over 1.0029671913333333 minutes. Last failure message: assertion failed: /brokers/topics/topic-35 still exists.
	at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432)
	at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439)
	at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391)
	at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
	at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308)
	at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307)
	at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
	at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletionWithRetries(KafkaTestUtils.scala:554)
	at org.apache.spark.sql.kafka010.KafkaTestUtils.deleteTopic(KafkaTestUtils.scala:342)
	at org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$anon$1.open(KafkaMicroBatchSourceSuite.scala:585)
	at org.apache.spark.sql.execution.streaming.sources.ForeachDataWriter.<init>(ForeachWriterTable.scala:136)
	at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:115)
	at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:107)
	at org.apache.spark.sql.execution.streaming.sources.MicroBatchWriterFactory.createWriter(MicroBatchWrite.scala:48)
	at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2Exec.scala:424)
	at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.$anonfun$writeWithV2$2(WriteToDataSourceV2Exec.scala:376)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:127)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:455)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:458)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.AssertionError: assertion failed: /brokers/topics/topic-35 still exists
	at scala.Predef$.assert(Predef.scala:223)
	at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletion(KafkaTestUtils.scala:526)
	at org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$verifyTopicDeletionWithRetries$1(KafkaTestUtils.scala:556)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395)
	at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409)
	... 23 more

Driver stacktrace:
      at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:1979)
      at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:1967)
      at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:1966)
      at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
      at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
      at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
      at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1966)
      at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:946)
      at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:946)
      at scala.Option.foreach(Option.scala:407)
      at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:946)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2196)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2145)
      at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2134)
      at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
      at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:748)
      at org.apache.spark.SparkContext.runJob(SparkContext.scala:2080)
      at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:373)
      at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:355)
      at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.writeWithV2(WriteToDataSourceV2Exec.scala:318)
      at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2Exec.scala:325)
      at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:189)
      at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:227)
      at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
      at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:224)
      at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:185)
      at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:329)
      at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:378)
      at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3416)
      at org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:2746)
      at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3406)
      at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
      at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
      at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
      at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3402)
      at org.apache.spark.sql.Dataset.collect(Dataset.scala:2746)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:557)
      at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
      at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
      at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:552)
      at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)
      at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)
      at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:552)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:213)
      at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
      at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)
      at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)
      at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:181)
      at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)
      at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)
      at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)
      at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)
      Cause: org.scalatest.exceptions.TestFailedDueToTimeoutException: The code passed to eventually never returned normally. Attempted 309 times over 1.0029671913333333 minutes. Last failure message: assertion failed: /brokers/topics/topic-35 still exists.
      at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432)
      at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439)
      at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391)
      at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
      at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308)
      at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307)
      at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
      at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletionWithRetries(KafkaTestUtils.scala:554)
      at org.apache.spark.sql.kafka010.KafkaTestUtils.deleteTopic(KafkaTestUtils.scala:342)
      at org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$anon$1.open(KafkaMicroBatchSourceSuite.scala:585)
      at org.apache.spark.sql.execution.streaming.sources.ForeachDataWriter.<init>(ForeachWriterTable.scala:136)
      at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:115)
      at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:107)
      at org.apache.spark.sql.execution.streaming.sources.MicroBatchWriterFactory.createWriter(MicroBatchWrite.scala:48)
      at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2Exec.scala:424)
      at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.$anonfun$writeWithV2$2(WriteToDataSourceV2Exec.scala:376)
      at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
      at org.apache.spark.scheduler.Task.run(Task.scala:127)
      at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:455)
      at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:458)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      at java.lang.Thread.run(Thread.java:748)
      Cause: java.lang.AssertionError: assertion failed: /brokers/topics/topic-35 still exists
      at scala.Predef$.assert(Predef.scala:223)
      at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletion(KafkaTestUtils.scala:526)
      at org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$verifyTopicDeletionWithRetries$1(KafkaTestUtils.scala:556)
      at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
      at org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395)
      at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409)
      at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439)
      at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391)
      at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
      at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308)
      at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307)
      at org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479)
      at org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletionWithRetries(KafkaTestUtils.scala:554)
      at org.apache.spark.sql.kafka010.KafkaTestUtils.deleteTopic(KafkaTestUtils.scala:342)
      at org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$anon$1.open(KafkaMicroBatchSourceSuite.scala:585)
      at org.apache.spark.sql.execution.streaming.sources.ForeachDataWriter.<init>(ForeachWriterTable.scala:136)
      at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:115)
      at org.apache.spark.sql.execution.streaming.sources.ForeachWriterFactory.createWriter(ForeachWriterTable.scala:107)
      at org.apache.spark.sql.execution.streaming.sources.MicroBatchWriterFactory.createWriter(MicroBatchWrite.scala:48)
      at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2Exec.scala:424)
      at org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.$anonfun$writeWithV2$2(WriteToDataSourceV2Exec.scala:376)
      at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
      at org.apache.spark.scheduler.Task.run(Task.scala:127)
      at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:455)
      at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1377)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:458)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      at java.lang.Thread.run(Thread.java:748)