Query memory [id = ec88627a-ab61-4d4c-9f45-34b8a1c74fe6, runId = eff5e928-3c87-4a39-a1f8-2783797b7b17] terminated with exception: null


      org.apache.spark.sql.streaming.StreamingQueryException: null
=== Streaming Query ===
Identifier: memory [id = ec88627a-ab61-4d4c-9f45-34b8a1c74fe6, runId = eff5e928-3c87-4a39-a1f8-2783797b7b17]
Current Committed Offsets: {}
Current Available Offsets: {}

Current State: ACTIVE
Thread State: RUNNABLE

Logical Plan:
SerializeFromObject [input[0, int, false] AS value#2783]
+- MapElements org.apache.spark.sql.kafka010.KafkaSourceStressForDontFailOnDataLossSuite$$Lambda$5101/0x00000008018c4840@2f00c332, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#2782: int
   +- DeserializeToObject newInstance(class scala.Tuple2), obj#2781: scala.Tuple2
      +- Project [cast(key#2757 as string) AS key#2771, cast(value#2758 as string) AS value#2772]
         +- StreamingDataSourceV2Relation [key#2757, value#2758, topic#2759, partition#2760, offset#2761L, timestamp#2762, timestampType#2763], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@198774ea, KafkaSource[SubscribePattern[failOnDataLoss.*]]

      at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:351)
      at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:242)
      Cause: java.lang.InterruptedException
      at org.apache.spark.sql.kafka010.KafkaOffsetReader.withRetriesWithoutInterrupt(KafkaOffsetReader.scala:376)
      at org.apache.spark.sql.kafka010.KafkaOffsetReader.$anonfun$fetchEarliestOffsets$1(KafkaOffsetReader.scala:187)
      at org.apache.spark.sql.kafka010.KafkaOffsetReader.runUninterruptibly(KafkaOffsetReader.scala:327)
      at org.apache.spark.sql.kafka010.KafkaOffsetReader.fetchEarliestOffsets(KafkaOffsetReader.scala:187)
      at org.apache.spark.sql.kafka010.KafkaContinuousStream.initialOffset(KafkaContinuousStream.scala:67)
      at org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution$$anonfun$3.$anonfun$applyOrElse$4(ContinuousExecution.scala:171)
      at scala.Option.getOrElse(Option.scala:138)
      at org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution$$anonfun$3.applyOrElse(ContinuousExecution.scala:171)
      at org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution$$anonfun$3.applyOrElse(ContinuousExecution.scala:167)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:258)
      at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:72)
      at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:258)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:263)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:328)
      at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:189)
      at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326)
      at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:263)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:263)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:328)
      at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:189)
      at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326)
      at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:263)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:263)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:328)
      at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:189)
      at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326)
      at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:263)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:263)
      at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:328)
      at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:189)
      at org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:326)
      at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:263)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
      at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
      at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:247)
      at org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.runContinuous(ContinuousExecution.scala:167)
      at org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution.runActivatedStream(ContinuousExecution.scala:109)
      at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:330)
      at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:242)