&#010;Timed out waiting for stream: The code passed to failAfter did not complete within 60 seconds.&#010;java.lang.Thread.getStackTrace(Thread.java:1559)&#010; org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:234)&#010; org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)&#010; org.apache.spark.sql.streaming.StreamingInnerJoinSuite.failAfterImpl(StreamingJoinSuite.scala:41)&#010; org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)&#010; org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)&#010; org.apache.spark.sql.streaming.StreamingInnerJoinSuite.failAfter(StreamingJoinSuite.scala:41)&#010; org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:463)&#010; org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:462)&#010; scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149)&#010;&#010; Caused by: null&#010; java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2151)&#010; org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:457)&#010; org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:464)&#010; scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&#010; org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)&#010; org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:239)&#010; org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)&#010; org.apache.spark.sql.streaming.StreamingInnerJoinSuite.failAfterImpl(StreamingJoinSuite.scala:41)&#010; org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)&#010; org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)&#010;&#010;&#010;== Progress ==&#010; AddData to MemoryStream[value#869588]: 1&#010;=> CheckAnswer: &#010; AddData to MemoryStream[value#869590]: 1,10&#010; CheckNewAnswer: [1,2,3]&#010; AddData to MemoryStream[value#869588]: 10&#010; CheckNewAnswer: [10,20,30]&#010; AddData to MemoryStream[value#869590]: 1&#010; CheckNewAnswer: [1,2,3]&#010; StopStream&#010; StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@15b55ba5,Map(),null)&#010; AddData to MemoryStream[value#869588]: 1&#010; CheckNewAnswer: [1,2,3],[1,2,3]&#010; StopStream&#010; StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@6b21d70d,Map(),null)&#010; AddData to MemoryStream[value#869588]: 100&#010; AddData to MemoryStream[value#869590]: 100&#010; CheckNewAnswer: [100,200,300]&#010;&#010;== Stream ==&#010;Output Mode: Append&#010;Stream state: {}&#010;Thread state: alive&#010;Thread stack trace: scala.runtime.Statics.anyHash(Statics.java:122)&#010;scala.collection.immutable.HashMap.elemHashCode(HashMap.scala:87)&#010;scala.collection.immutable.HashMap.computeHash(HashMap.scala:96)&#010;scala.collection.immutable.HashMap.$plus(HashMap.scala:65)&#010;scala.collection.immutable.HashMap.$plus(HashMap.scala:39)&#010;scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:32)&#010;scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:28)&#010;scala.collection.generic.Growable.$anonfun$$plus$plus$eq$1(Growable.scala:62)&#010;scala.collection.generic.Growable$$Lambda$22/513169028.apply(Unknown Source)&#010;scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)&#010;scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)&#010;scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)&#010;scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)&#010;scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)&#010;scala.collection.mutable.MapBuilder.$plus$plus$eq(MapBuilder.scala:28)&#010;scala.collection.generic.GenMapFactory.apply(GenMapFactory.scala:51)&#010;scala.sys.package$.env(package.scala:64)&#010;org.apache.spark.util.Utils$.isTesting(Utils.scala:1883)&#010;org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.assertNotAnalysisRule(AnalysisHelper.scala:134)&#010;org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.assertNotAnalysisRule$(AnalysisHelper.scala:133)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.assertNotAnalysisRule(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:148)&#010;org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:287)&#010;org.apache.spark.sql.catalyst.trees.TreeNode$$Lambda$658/1004430079.apply(Unknown Source)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:372)&#010;org.apache.spark.sql.catalyst.trees.TreeNode$$Lambda$5395/960858222.apply(Unknown Source)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:210)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:370)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:323)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:287)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)&#010;org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:287)&#010;org.apache.spark.sql.catalyst.trees.TreeNode$$Lambda$658/1004430079.apply(Unknown Source)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:372)&#010;org.apache.spark.sql.catalyst.trees.TreeNode$$Lambda$5395/960858222.apply(Unknown Source)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:210)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:370)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:323)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:287)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)&#010;org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)&#010;org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:271)&#010;org.apache.spark.sql.catalyst.optimizer.ConstantFolding$.apply(expressions.scala:44)&#010;org.apache.spark.sql.catalyst.optimizer.ConstantFolding$.apply(expressions.scala:43)&#010;org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:109)&#010;org.apache.spark.sql.catalyst.rules.RuleExecutor$$Lambda$663/172518776.apply(Unknown Source)&#010;scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)&#010;scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)&#010;scala.collection.immutable.List.foldLeft(List.scala:89)&#010;org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:106)&#010;org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:98)&#010;org.apache.spark.sql.catalyst.rules.RuleExecutor$$Lambda$662/49318987.apply(Unknown Source)&#010;scala.collection.immutable.List.foreach(List.scala:392)&#010;org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:98)&#010;org.apache.spark.sql.execution.streaming.IncrementalExecution.$anonfun$optimizedPlan$1(IncrementalExecution.scala:80)&#010;org.apache.spark.sql.execution.streaming.IncrementalExecution$$Lambda$10065/1057143263.apply(Unknown Source)&#010;org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)&#010;org.apache.spark.sql.execution.streaming.IncrementalExecution.optimizedPlan$lzycompute(IncrementalExecution.scala:80)&#010;org.apache.spark.sql.execution.streaming.IncrementalExecution.optimizedPlan(IncrementalExecution.scala:79)&#010;org.apache.spark.sql.execution.QueryExecution.$anonfun$sparkPlan$1(QueryExecution.scala:82)&#010;org.apache.spark.sql.execution.QueryExecution$$Lambda$5558/111210112.apply(Unknown Source)&#010;org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)&#010;org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:76)&#010;org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:76)&#010;org.apache.spark.sql.execution.QueryExecution.$anonfun$executedPlan$1(QueryExecution.scala:92)&#010;org.apache.spark.sql.execution.QueryExecution$$Lambda$5557/1383367438.apply(Unknown Source)&#010;org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)&#010;org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:92)&#010;org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:91)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$14(MicroBatchExecution.scala:544)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$10063/1334195633.apply(Unknown Source)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:534)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:213)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$9997/1873130578.apply$mcV$sp(Unknown Source)&#010;scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)&#010;org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:181)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$9993/731866529.apply$mcZ$sp(Unknown Source)&#010;org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)&#010;org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)&#010;org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)&#010;org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)&#010;&#010;&#010;== Sink ==&#010;&#010;&#010;&#010;== Plan ==&#010;== Parsed Logical Plan ==&#010;WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@688dcb91&#010;+- Project [key#869592, leftValue#869593, rightValue#869598]&#010; +- Join Inner, (key#869592 = key#869597)&#010; :- Project [value#869588 AS key#869592, (value#869588 * 2) AS leftValue#869593]&#010; : +- StreamingDataSourceV2Relation [value#869588], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@139b6999, MemoryStream[value#869588], -1, 0&#010; +- Project [value#869590 AS key#869597, (value#869590 * 3) AS rightValue#869598]&#010; +- LocalRelation <empty>, [value#869590]&#010;&#010;== Analyzed Logical Plan ==&#010;&#010;WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@688dcb91&#010;+- Project [key#869592, leftValue#869593, rightValue#869598]&#010; +- Join Inner, (key#869592 = key#869597)&#010; :- Project [value#869588 AS key#869592, (value#869588 * 2) AS leftValue#869593]&#010; : +- StreamingDataSourceV2Relation [value#869588], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@139b6999, MemoryStream[value#869588], -1, 0&#010; +- Project [value#869590 AS key#869597, (value#869590 * 3) AS rightValue#869598]&#010; +- LocalRelation <empty>, [value#869590]&#010;&#010;== Optimized Logical Plan ==&#010;WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@688dcb91&#010;+- Project [key#869592, leftValue#869593, rightValue#869598]&#010; +- Join Inner, (key#869592 = key#869597)&#010; :- Project [value#869588 AS key#869592, (value#869588 * 2) AS leftValue#869593]&#010; : +- StreamingDataSourceV2Relation [value#869588], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@139b6999, MemoryStream[value#869588], -1, 0&#010; +- LocalRelation <empty>, [key#869597, rightValue#869598]&#010;&#010;== Physical Plan ==&#010;WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@688dcb91&#010;+- *(2) Project [key#869592, leftValue#869593, rightValue#869598]&#010; +- StreamingSymmetricHashJoin [key#869592], [key#869597], Inner, condition = [ leftOnly = null, rightOnly = null, both = null, full = null ], state info [ checkpoint = file:/home/jenkins/workspace/spark-master-test-maven-hadoop-2.7-ubuntu-testing@2/sql/core/target/tmp/streaming.metadata-26c64213-887c-41b2-af2f-bf64c188c2dd/state, runId = 7c286567-8f2a-4afc-8e75-c57f4837420c, opId = 0, ver = 0, numPartitions = 5], 0, state cleanup [ left = null, right = null ]&#010; :- Exchange hashpartitioning(key#869592, 5), true&#010; : +- *(1) Project [value#869588 AS key#869592, (value#869588 * 2) AS leftValue#869593]&#010; : +- *(1) Project [value#869588]&#010; : +- MicroBatchScan[value#869588] MemoryStreamDataSource&#010; +- Exchange hashpartitioning(key#869597, 5), true&#010; +- LocalTableScan <empty>, [key#869597, rightValue#869598]&#010;&#010; &#010;


      org.scalatest.exceptions.TestFailedException: 
Timed out waiting for stream: The code passed to failAfter did not complete within 60 seconds.
java.lang.Thread.getStackTrace(Thread.java:1559)
	org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:234)
	org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
	org.apache.spark.sql.streaming.StreamingInnerJoinSuite.failAfterImpl(StreamingJoinSuite.scala:41)
	org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
	org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)
	org.apache.spark.sql.streaming.StreamingInnerJoinSuite.failAfter(StreamingJoinSuite.scala:41)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:463)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:462)
	scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149)

	Caused by: 	null
	java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2151)
		org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:457)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:464)
		scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
		org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
		org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:239)
		org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
		org.apache.spark.sql.streaming.StreamingInnerJoinSuite.failAfterImpl(StreamingJoinSuite.scala:41)
		org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
		org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)


== Progress ==
   AddData to MemoryStream[value#869588]: 1
=> CheckAnswer: 
   AddData to MemoryStream[value#869590]: 1,10
   CheckNewAnswer: [1,2,3]
   AddData to MemoryStream[value#869588]: 10
   CheckNewAnswer: [10,20,30]
   AddData to MemoryStream[value#869590]: 1
   CheckNewAnswer: [1,2,3]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@15b55ba5,Map(),null)
   AddData to MemoryStream[value#869588]: 1
   CheckNewAnswer: [1,2,3],[1,2,3]
   StopStream
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@6b21d70d,Map(),null)
   AddData to MemoryStream[value#869588]: 100
   AddData to MemoryStream[value#869590]: 100
   CheckNewAnswer: [100,200,300]

== Stream ==
Output Mode: Append
Stream state: {}
Thread state: alive
Thread stack trace: scala.runtime.Statics.anyHash(Statics.java:122)
scala.collection.immutable.HashMap.elemHashCode(HashMap.scala:87)
scala.collection.immutable.HashMap.computeHash(HashMap.scala:96)
scala.collection.immutable.HashMap.$plus(HashMap.scala:65)
scala.collection.immutable.HashMap.$plus(HashMap.scala:39)
scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:32)
scala.collection.mutable.MapBuilder.$plus$eq(MapBuilder.scala:28)
scala.collection.generic.Growable.$anonfun$$plus$plus$eq$1(Growable.scala:62)
scala.collection.generic.Growable$$Lambda$22/513169028.apply(Unknown Source)
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62)
scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53)
scala.collection.mutable.MapBuilder.$plus$plus$eq(MapBuilder.scala:28)
scala.collection.generic.GenMapFactory.apply(GenMapFactory.scala:51)
scala.sys.package$.env(package.scala:64)
org.apache.spark.util.Utils$.isTesting(Utils.scala:1883)
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.assertNotAnalysisRule(AnalysisHelper.scala:134)
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.assertNotAnalysisRule$(AnalysisHelper.scala:133)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.assertNotAnalysisRule(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:148)
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:287)
org.apache.spark.sql.catalyst.trees.TreeNode$$Lambda$658/1004430079.apply(Unknown Source)
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:372)
org.apache.spark.sql.catalyst.trees.TreeNode$$Lambda$5395/960858222.apply(Unknown Source)
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:210)
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:370)
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:323)
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:287)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:287)
org.apache.spark.sql.catalyst.trees.TreeNode$$Lambda$658/1004430079.apply(Unknown Source)
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:372)
org.apache.spark.sql.catalyst.trees.TreeNode$$Lambda$5395/960858222.apply(Unknown Source)
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:210)
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:370)
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:323)
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:287)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:149)
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:147)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:271)
org.apache.spark.sql.catalyst.optimizer.ConstantFolding$.apply(expressions.scala:44)
org.apache.spark.sql.catalyst.optimizer.ConstantFolding$.apply(expressions.scala:43)
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:109)
org.apache.spark.sql.catalyst.rules.RuleExecutor$$Lambda$663/172518776.apply(Unknown Source)
scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
scala.collection.immutable.List.foldLeft(List.scala:89)
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:106)
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:98)
org.apache.spark.sql.catalyst.rules.RuleExecutor$$Lambda$662/49318987.apply(Unknown Source)
scala.collection.immutable.List.foreach(List.scala:392)
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:98)
org.apache.spark.sql.execution.streaming.IncrementalExecution.$anonfun$optimizedPlan$1(IncrementalExecution.scala:80)
org.apache.spark.sql.execution.streaming.IncrementalExecution$$Lambda$10065/1057143263.apply(Unknown Source)
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
org.apache.spark.sql.execution.streaming.IncrementalExecution.optimizedPlan$lzycompute(IncrementalExecution.scala:80)
org.apache.spark.sql.execution.streaming.IncrementalExecution.optimizedPlan(IncrementalExecution.scala:79)
org.apache.spark.sql.execution.QueryExecution.$anonfun$sparkPlan$1(QueryExecution.scala:82)
org.apache.spark.sql.execution.QueryExecution$$Lambda$5558/111210112.apply(Unknown Source)
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:76)
org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:76)
org.apache.spark.sql.execution.QueryExecution.$anonfun$executedPlan$1(QueryExecution.scala:92)
org.apache.spark.sql.execution.QueryExecution$$Lambda$5557/1383367438.apply(Unknown Source)
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:92)
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:91)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$14(MicroBatchExecution.scala:544)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$10063/1334195633.apply(Unknown Source)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:534)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:213)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$9997/1873130578.apply$mcV$sp(Unknown Source)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:328)
org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:326)
org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:181)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$9993/731866529.apply$mcZ$sp(Unknown Source)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)


== Sink ==



== Plan ==
== Parsed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@688dcb91
+- Project [key#869592, leftValue#869593, rightValue#869598]
   +- Join Inner, (key#869592 = key#869597)
      :- Project [value#869588 AS key#869592, (value#869588 * 2) AS leftValue#869593]
      :  +- StreamingDataSourceV2Relation [value#869588], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@139b6999, MemoryStream[value#869588], -1, 0
      +- Project [value#869590 AS key#869597, (value#869590 * 3) AS rightValue#869598]
         +- LocalRelation <empty>, [value#869590]

== Analyzed Logical Plan ==

WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@688dcb91
+- Project [key#869592, leftValue#869593, rightValue#869598]
   +- Join Inner, (key#869592 = key#869597)
      :- Project [value#869588 AS key#869592, (value#869588 * 2) AS leftValue#869593]
      :  +- StreamingDataSourceV2Relation [value#869588], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@139b6999, MemoryStream[value#869588], -1, 0
      +- Project [value#869590 AS key#869597, (value#869590 * 3) AS rightValue#869598]
         +- LocalRelation <empty>, [value#869590]

== Optimized Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@688dcb91
+- Project [key#869592, leftValue#869593, rightValue#869598]
   +- Join Inner, (key#869592 = key#869597)
      :- Project [value#869588 AS key#869592, (value#869588 * 2) AS leftValue#869593]
      :  +- StreamingDataSourceV2Relation [value#869588], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@139b6999, MemoryStream[value#869588], -1, 0
      +- LocalRelation <empty>, [key#869597, rightValue#869598]

== Physical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@688dcb91
+- *(2) Project [key#869592, leftValue#869593, rightValue#869598]
   +- StreamingSymmetricHashJoin [key#869592], [key#869597], Inner, condition = [ leftOnly = null, rightOnly = null, both = null, full = null ], state info [ checkpoint = file:/home/jenkins/workspace/spark-master-test-maven-hadoop-2.7-ubuntu-testing@2/sql/core/target/tmp/streaming.metadata-26c64213-887c-41b2-af2f-bf64c188c2dd/state, runId = 7c286567-8f2a-4afc-8e75-c57f4837420c, opId = 0, ver = 0, numPartitions = 5], 0, state cleanup [ left = null, right = null ]
      :- Exchange hashpartitioning(key#869592, 5), true
      :  +- *(1) Project [value#869588 AS key#869592, (value#869588 * 2) AS leftValue#869593]
      :     +- *(1) Project [value#869588]
      :        +- MicroBatchScan[value#869588] MemoryStreamDataSource
      +- Exchange hashpartitioning(key#869597, 5), true
         +- LocalTableScan <empty>, [key#869597, rightValue#869598]

         
         
      at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
      at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
      at org.scalatest.Assertions.fail(Assertions.scala:1089)
      at org.scalatest.Assertions.fail$(Assertions.scala:1085)
      at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
      at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:444)
      at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:780)
      at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:756)
      at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:326)
      at org.apache.spark.sql.streaming.StreamingInnerJoinSuite.testStream(StreamingJoinSuite.scala:41)
      at org.apache.spark.sql.streaming.StreamingInnerJoinSuite.$anonfun$new$3(StreamingJoinSuite.scala:78)
      at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
      at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
      at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
      at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
      at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
      at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
      at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
      at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
      at org.apache.spark.sql.streaming.StreamingInnerJoinSuite.org$scalatest$BeforeAndAfter$$super$runTest(StreamingJoinSuite.scala:41)
      at org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:203)
      at org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:192)
      at org.apache.spark.sql.streaming.StreamingInnerJoinSuite.runTest(StreamingJoinSuite.scala:41)
      at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
      at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
      at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
      at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
      at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
      at org.scalatest.Suite.run(Suite.scala:1147)
      at org.scalatest.Suite.run$(Suite.scala:1129)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
      at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
      at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
      at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
      at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
      at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
      at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
      at org.apache.spark.sql.streaming.StreamingInnerJoinSuite.org$scalatest$BeforeAndAfter$$super$run(StreamingJoinSuite.scala:41)
      at org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
      at org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
      at org.apache.spark.sql.streaming.StreamingInnerJoinSuite.run(StreamingJoinSuite.scala:41)
      at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
      at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
      at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
      at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
      at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
      at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
      at org.scalatest.Suite.run(Suite.scala:1144)
      at org.scalatest.Suite.run$(Suite.scala:1129)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
      at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
      at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
      at org.scalatest.tools.Runner$.main(Runner.scala:827)
      at org.scalatest.tools.Runner.main(Runner.scala)