Timed out waiting for stream: The code passed to failAfter did not complete within 60 seconds.
java.lang.Thread.getStackTrace(Thread.java:1559)
 org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:234)
 org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
 org.apache.spark.ml.recommendation.ALSSuite.failAfterImpl(ALSSuite.scala:49)
 org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
 org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)
 org.apache.spark.ml.recommendation.ALSSuite.failAfter(ALSSuite.scala:49)
 org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:463)
 org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:462)
 scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149)

 Caused by: null
 java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2151)
 org.apache.spark.sql.execution.streaming.StreamExecution.processAllAvailable(StreamExecution.scala:508)
 org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:467)
 scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
 org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
 org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:239)
 org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
 org.apache.spark.ml.recommendation.ALSSuite.failAfterImpl(ALSSuite.scala:49)
 org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
 org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)


== Progress ==
 AddData to MemoryStream[user#41099,item#41100,rating#41101]: Rating(-1158177819,186873629,0.024627717),Rating(919154311,811833187,-0.32206595),Rating(1835442223,186873629,-0.10032342),Rating(1835442223,811833187,0.27883497)
=> CheckAnswerByFunc

== Stream ==
Output Mode: Append
Stream state: {MemoryStream[user#41099,item#41100,rating#41101]: 0}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:230)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2914/1586715257.apply$mcZ$sp(Unknown Source)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)


== Sink ==
0: [0.024627717,0.023472412] [-0.10032342,-0.09968895] [-0.32206595,-0.3241125] [0.27883497,0.28169233]


== Plan ==
== Parsed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@412ff256
+- Project [rating#41104, prediction#41161]
 +- Project [user#41102, item#41103, rating#41104, UDF(features#41067, features#41079) AS prediction#41161]
 +- Join LeftOuter, (UDF(item#41103) = id#41078)
 :- Join LeftOuter, (UDF(user#41102) = id#41066)
 : :- Project [user#41109 AS user#41102, item#41110 AS item#41103, rating#41111 AS rating#41104]
 : : +- Project [user#41099 AS user#41109, item#41100 AS item#41110, rating#41101 AS rating#41111]
 : : +- StreamingDataSourceV2Relation [user#41099, item#41100, rating#41101], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@9bde026, MemoryStream[user#41099,item#41100,rating#41101], -1, 0
 : +- Project [_1#41061 AS id#41066, _2#41062 AS features#41067]
 : +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41061, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41062]
 : +- ExternalRDD [obj#41060]
 +- Project [_1#41073 AS id#41078, _2#41074 AS features#41079]
 +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41073, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41074]
 +- ExternalRDD [obj#41072]

== Analyzed Logical Plan ==

WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@412ff256
+- Project [rating#41104, prediction#41161]
 +- Project [user#41102, item#41103, rating#41104, UDF(features#41067, features#41079) AS prediction#41161]
 +- Join LeftOuter, (UDF(item#41103) = id#41078)
 :- Join LeftOuter, (UDF(user#41102) = id#41066)
 : :- Project [user#41109 AS user#41102, item#41110 AS item#41103, rating#41111 AS rating#41104]
 : : +- Project [user#41099 AS user#41109, item#41100 AS item#41110, rating#41101 AS rating#41111]
 : : +- StreamingDataSourceV2Relation [user#41099, item#41100, rating#41101], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@9bde026, MemoryStream[user#41099,item#41100,rating#41101], -1, 0
 : +- Project [_1#41061 AS id#41066, _2#41062 AS features#41067]
 : +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41061, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41062]
 : +- ExternalRDD [obj#41060]
 +- Project [_1#41073 AS id#41078, _2#41074 AS features#41079]
 +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41073, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41074]
 +- ExternalRDD [obj#41072]

== Optimized Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@412ff256
+- Project [rating#41101, UDF(features#41067, features#41079) AS prediction#41161]
 +- Join LeftOuter, (UDF(item#41100) = id#41078)
 :- Project [item#41100, rating#41101, features#41067]
 : +- Join LeftOuter, (UDF(user#41099) = id#41066)
 : :- StreamingDataSourceV2Relation [user#41099, item#41100, rating#41101], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@9bde026, MemoryStream[user#41099,item#41100,rating#41101], -1, 0
 : +- Project [_1#41061 AS id#41066, _2#41062 AS features#41067]
 : +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41061, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41062]
 : +- ExternalRDD [obj#41060]
 +- Project [_1#41073 AS id#41078, _2#41074 AS features#41079]
 +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41073, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41074]
 +- ExternalRDD [obj#41072]

== Physical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@412ff256
+- *(9) Project [rating#41101, UDF(features#41067, features#41079) AS prediction#41161]
 +- SortMergeJoin [UDF(item#41100)], [id#41078], LeftOuter
 :- *(6) Sort [UDF(item#41100) ASC NULLS FIRST], false, 0
 : +- Exchange hashpartitioning(UDF(item#41100), 5)
 : +- *(5) Project [item#41100, rating#41101, features#41067]
 : +- SortMergeJoin [UDF(user#41099)], [id#41066], LeftOuter
 : :- *(2) Sort [UDF(user#41099) ASC NULLS FIRST], false, 0
 : : +- Exchange hashpartitioning(UDF(user#41099), 5)
 : : +- *(1) Project [user#41099, item#41100, rating#41101]
 : : +- *(1) MicroBatchScan[user#41099, item#41100, rating#41101] MemoryStreamDataSource
 : +- *(4) Sort [id#41066 ASC NULLS FIRST], false, 0
 : +- Exchange hashpartitioning(id#41066, 5)
 : +- *(3) Project [_1#41061 AS id#41066, _2#41062 AS features#41067]
 : +- *(3) SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41061, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41062]
 : +- Scan userFactors[obj#41060]
 +- *(8) Sort [id#41078 ASC NULLS FIRST], false, 0
 +- Exchange hashpartitioning(id#41078, 5)
 +- *(7) Project [_1#41073 AS id#41078, _2#41074 AS features#41079]
 +- *(7) SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41073, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41074]
 +- Scan itemFactors[obj#41072]

 

org.scalatest.exceptions.TestFailedException:
Timed out waiting for stream: The code passed to failAfter did not complete within 60 seconds.
java.lang.Thread.getStackTrace(Thread.java:1559)
org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:234)
org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
org.apache.spark.ml.recommendation.ALSSuite.failAfterImpl(ALSSuite.scala:49)
org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)
org.apache.spark.ml.recommendation.ALSSuite.failAfter(ALSSuite.scala:49)
org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:463)
org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:462)
scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149)
Caused by: null
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2151)
org.apache.spark.sql.execution.streaming.StreamExecution.processAllAvailable(StreamExecution.scala:508)
org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:467)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
org.scalatest.concurrent.TimeLimits.failAfterImpl(TimeLimits.scala:239)
org.scalatest.concurrent.TimeLimits.failAfterImpl$(TimeLimits.scala:233)
org.apache.spark.ml.recommendation.ALSSuite.failAfterImpl(ALSSuite.scala:49)
org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:230)
org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:229)
== Progress ==
AddData to MemoryStream[user#41099,item#41100,rating#41101]: Rating(-1158177819,186873629,0.024627717),Rating(919154311,811833187,-0.32206595),Rating(1835442223,186873629,-0.10032342),Rating(1835442223,811833187,0.27883497)
=> CheckAnswerByFunc
== Stream ==
Output Mode: Append
Stream state: {MemoryStream[user#41099,item#41100,rating#41101]: 0}
Thread state: alive
Thread stack trace: java.lang.Thread.sleep(Native Method)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:230)
org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2914/1586715257.apply$mcZ$sp(Unknown Source)
org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56)
org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:175)
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:332)
org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)
== Sink ==
0: [0.024627717,0.023472412] [-0.10032342,-0.09968895] [-0.32206595,-0.3241125] [0.27883497,0.28169233]
== Plan ==
== Parsed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@412ff256
+- Project [rating#41104, prediction#41161]
+- Project [user#41102, item#41103, rating#41104, UDF(features#41067, features#41079) AS prediction#41161]
+- Join LeftOuter, (UDF(item#41103) = id#41078)
:- Join LeftOuter, (UDF(user#41102) = id#41066)
: :- Project [user#41109 AS user#41102, item#41110 AS item#41103, rating#41111 AS rating#41104]
: : +- Project [user#41099 AS user#41109, item#41100 AS item#41110, rating#41101 AS rating#41111]
: : +- StreamingDataSourceV2Relation [user#41099, item#41100, rating#41101], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@9bde026, MemoryStream[user#41099,item#41100,rating#41101], -1, 0
: +- Project [_1#41061 AS id#41066, _2#41062 AS features#41067]
: +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41061, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41062]
: +- ExternalRDD [obj#41060]
+- Project [_1#41073 AS id#41078, _2#41074 AS features#41079]
+- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41073, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41074]
+- ExternalRDD [obj#41072]
== Analyzed Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@412ff256
+- Project [rating#41104, prediction#41161]
+- Project [user#41102, item#41103, rating#41104, UDF(features#41067, features#41079) AS prediction#41161]
+- Join LeftOuter, (UDF(item#41103) = id#41078)
:- Join LeftOuter, (UDF(user#41102) = id#41066)
: :- Project [user#41109 AS user#41102, item#41110 AS item#41103, rating#41111 AS rating#41104]
: : +- Project [user#41099 AS user#41109, item#41100 AS item#41110, rating#41101 AS rating#41111]
: : +- StreamingDataSourceV2Relation [user#41099, item#41100, rating#41101], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@9bde026, MemoryStream[user#41099,item#41100,rating#41101], -1, 0
: +- Project [_1#41061 AS id#41066, _2#41062 AS features#41067]
: +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41061, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41062]
: +- ExternalRDD [obj#41060]
+- Project [_1#41073 AS id#41078, _2#41074 AS features#41079]
+- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41073, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41074]
+- ExternalRDD [obj#41072]
== Optimized Logical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@412ff256
+- Project [rating#41101, UDF(features#41067, features#41079) AS prediction#41161]
+- Join LeftOuter, (UDF(item#41100) = id#41078)
:- Project [item#41100, rating#41101, features#41067]
: +- Join LeftOuter, (UDF(user#41099) = id#41066)
: :- StreamingDataSourceV2Relation [user#41099, item#41100, rating#41101], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@9bde026, MemoryStream[user#41099,item#41100,rating#41101], -1, 0
: +- Project [_1#41061 AS id#41066, _2#41062 AS features#41067]
: +- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41061, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41062]
: +- ExternalRDD [obj#41060]
+- Project [_1#41073 AS id#41078, _2#41074 AS features#41079]
+- SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41073, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41074]
+- ExternalRDD [obj#41072]
== Physical Plan ==
WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@412ff256
+- *(9) Project [rating#41101, UDF(features#41067, features#41079) AS prediction#41161]
+- SortMergeJoin [UDF(item#41100)], [id#41078], LeftOuter
:- *(6) Sort [UDF(item#41100) ASC NULLS FIRST], false, 0
: +- Exchange hashpartitioning(UDF(item#41100), 5)
: +- *(5) Project [item#41100, rating#41101, features#41067]
: +- SortMergeJoin [UDF(user#41099)], [id#41066], LeftOuter
: :- *(2) Sort [UDF(user#41099) ASC NULLS FIRST], false, 0
: : +- Exchange hashpartitioning(UDF(user#41099), 5)
: : +- *(1) Project [user#41099, item#41100, rating#41101]
: : +- *(1) MicroBatchScan[user#41099, item#41100, rating#41101] MemoryStreamDataSource
: +- *(4) Sort [id#41066 ASC NULLS FIRST], false, 0
: +- Exchange hashpartitioning(id#41066, 5)
: +- *(3) Project [_1#41061 AS id#41066, _2#41062 AS features#41067]
: +- *(3) SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41061, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41062]
: +- Scan userFactors[obj#41060]
+- *(8) Sort [id#41078 ASC NULLS FIRST], false, 0
+- Exchange hashpartitioning(id#41078, 5)
+- *(7) Project [_1#41073 AS id#41078, _2#41074 AS features#41079]
+- *(7) SerializeFromObject [knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1 AS _1#41073, staticinvoke(class org.apache.spark.sql.catalyst.expressions.UnsafeArrayData, ArrayType(FloatType,false), fromPrimitiveArray, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false) AS _2#41074]
+- Scan itemFactors[obj#41072]
at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:528)
at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:527)
at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560)
at org.scalatest.Assertions.fail(Assertions.scala:1089)
at org.scalatest.Assertions.fail$(Assertions.scala:1085)
at org.scalatest.FunSuite.fail(FunSuite.scala:1560)
at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:444)
at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:780)
at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:756)
at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:326)
at org.apache.spark.ml.recommendation.ALSSuite.testStream(ALSSuite.scala:49)
at org.apache.spark.ml.util.MLTest.testTransformerOnStreamData(MLTest.scala:86)
at org.apache.spark.ml.util.MLTest.testTransformerOnStreamData$(MLTest.scala:67)
at org.apache.spark.ml.recommendation.ALSSuite.testTransformerOnStreamData(ALSSuite.scala:49)
at org.apache.spark.ml.util.MLTest.testTransformerByGlobalCheckFunc(MLTest.scala:121)
at org.apache.spark.ml.util.MLTest.testTransformerByGlobalCheckFunc$(MLTest.scala:114)
at org.apache.spark.ml.recommendation.ALSSuite.testTransformerByGlobalCheckFunc(ALSSuite.scala:49)
at org.apache.spark.ml.recommendation.ALSSuite.testALS(ALSSuite.scala:413)
at org.apache.spark.ml.recommendation.ALSSuite.$anonfun$new$44(ALSSuite.scala:479)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
at org.scalatest.Transformer.apply(Transformer.scala:22)
at org.scalatest.Transformer.apply(Transformer.scala:20)
at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:149)
at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184)
at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196)
at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196)
at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221)
at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214)
at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:56)
at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229)
at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:396)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:379)
at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229)
at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228)
at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
at org.scalatest.Suite.run(Suite.scala:1147)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233)
at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233)
at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232)
at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:56)
at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:30)
at org.scalatest.Suite.run(Suite.scala:1144)
at org.scalatest.Suite.run$(Suite.scala:1129)
at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:30)
at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1346)
at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1340)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1340)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:1031)
at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:1010)
at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1506)
at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
at org.scalatest.tools.Runner$.main(Runner.scala:827)
at org.scalatest.tools.Runner.main(Runner.scala)