isContain was true Interpreter output contained 'AssertionError':&#010;&#010;scala> import org.apache.spark.rdd.RDD&#010;&#010;scala> &#010;scala> lines: org.apache.spark.rdd.RDD[String] = pom.xml MapPartitionsRDD[46] at textFile at <console>:41&#010;&#010;scala> defined class Data&#010;&#010;scala> dataRDD: org.apache.spark.rdd.RDD[Data] = MapPartitionsRDD[47] at map at <console>:45&#010;&#010;scala> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 24.0 failed 4 times, most recent failure: Lost task 0.3 in stage 24.0 (TID 172, 192.168.10.26, executor 1): java.lang.ClassNotFoundException: Data&#010; at java.net.URLClassLoader.findClass(URLClassLoader.java:381)&#010; at java.lang.ClassLoader.loadClass(ClassLoader.java:424)&#010; at java.lang.ClassLoader.loadClass(ClassLoader.java:357)&#010; at java.lang.Class.forName0(Native Method)&#010; at java.lang.Class.forName(Class.java:348)&#010; at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67)&#010; at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1613)&#010; at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1518)&#010; at java.io.ObjectInputStream.readClass(ObjectInputStream.java:1484)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1334)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)&#010; at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)&#010; at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)&#010; at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:80)&#010; at org.apache.spark.scheduler.Task.run(Task.scala:108)&#010; at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)&#010; at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)&#010; at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)&#010; at java.lang.Thread.run(Thread.java:745)&#010;&#010;Driver stacktrace:&#010; at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1499)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1487)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1486)&#010; at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)&#010; at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)&#010; at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1486)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)&#010; at scala.Option.foreach(Option.scala:257)&#010; at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)&#010; at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1714)&#010; at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1669)&#010; at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1658)&#010; at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)&#010; at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:2022)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:2043)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:2062)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:2087)&#010; at org.apache.spark.rdd.RDD.count(RDD.scala:1158)&#010; ... 45 elided&#010;Caused by: java.lang.ClassNotFoundException: Data&#010; at java.net.URLClassLoader.findClass(URLClassLoader.java:381)&#010; at java.lang.ClassLoader.loadClass(ClassLoader.java:424)&#010; at java.lang.ClassLoader.loadClass(ClassLoader.java:357)&#010; at java.lang.Class.forName0(Native Method)&#010; at java.lang.Class.forName(Class.java:348)&#010; at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67)&#010; at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1613)&#010; at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1518)&#010; at java.io.ObjectInputStream.readClass(ObjectInputStream.java:1484)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1334)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)&#010; at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)&#010; at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)&#010; at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:80)&#010; at org.apache.spark.scheduler.Task.run(Task.scala:108)&#010; at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)&#010; at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)&#010; at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)&#010; ... 1 more&#010;&#010;scala> repartitioned: org.apache.spark.rdd.RDD[Data] = MapPartitionsRDD[51] at repartition at <console>:47&#010;&#010;scala> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 25.0 failed 4 times, most recent failure: Lost task 0.3 in stage 25.0 (TID 180, 192.168.10.26, executor 0): java.lang.ClassCastException: cannot assign instance of scala.collection.immutable.List$SerializationProxy to field org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$dependencies_ of type scala.collection.Seq in instance of org.apache.spark.rdd.MapPartitionsRDD&#010; at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2089)&#010; at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1261)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2006)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)&#010; at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)&#010; at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)&#010; at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:85)&#010; at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)&#010; at org.apache.spark.scheduler.Task.run(Task.scala:108)&#010; at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)&#010; at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)&#010; at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)&#010; at java.lang.Thread.run(Thread.java:745)&#010;&#010;Driver stacktrace:&#010; at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1499)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1487)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1486)&#010; at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)&#010; at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)&#010; at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1486)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)&#010; at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)&#010; at scala.Option.foreach(Option.scala:257)&#010; at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)&#010; at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1714)&#010; at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1669)&#010; at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1658)&#010; at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)&#010; at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:2022)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:2043)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:2062)&#010; at org.apache.spark.SparkContext.runJob(SparkContext.scala:2087)&#010; at org.apache.spark.rdd.RDD.count(RDD.scala:1158)&#010; ... 45 elided&#010;Caused by: java.lang.ClassCastException: cannot assign instance of scala.collection.immutable.List$SerializationProxy to field org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$dependencies_ of type scala.collection.Seq in instance of org.apache.spark.rdd.MapPartitionsRDD&#010; at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2089)&#010; at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1261)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2006)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)&#010; at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)&#010; at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)&#010; at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)&#010; at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)&#010; at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)&#010; at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)&#010; at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:85)&#010; at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)&#010; at org.apache.spark.scheduler.Task.run(Task.scala:108)&#010; at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)&#010; at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)&#010; at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)&#010; ... 1 more&#010;&#010;scala> &#010;scala> | | getCacheSize: (rdd: org.apache.spark.rdd.RDD[_])Long&#010;&#010;scala> cacheSize1: Long = 0&#010;&#010;scala> cacheSize2: Long = 0&#010;&#010;scala> &#010;scala> &#010;scala> deviation: Double = NaN&#010;&#010;scala> | java.lang.AssertionError: assertion failed: deviation too large: NaN, first size: 0, second size: 0&#010; at scala.Predef$.assert(Predef.scala:170)&#010; ... 46 elided&#010;&#010;scala> | _result_1496448063782: Int = 1&#010;&#010;scala>


      org.scalatest.exceptions.TestFailedException: isContain was true Interpreter output contained 'AssertionError':

scala> import org.apache.spark.rdd.RDD

scala> 
scala> lines: org.apache.spark.rdd.RDD[String] = pom.xml MapPartitionsRDD[46] at textFile at <console>:41

scala> defined class Data

scala> dataRDD: org.apache.spark.rdd.RDD[Data] = MapPartitionsRDD[47] at map at <console>:45

scala> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 24.0 failed 4 times, most recent failure: Lost task 0.3 in stage 24.0 (TID 172, 192.168.10.26, executor 1): java.lang.ClassNotFoundException: Data
	at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
	at java.lang.Class.forName0(Native Method)
	at java.lang.Class.forName(Class.java:348)
	at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67)
	at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1613)
	at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1518)
	at java.io.ObjectInputStream.readClass(ObjectInputStream.java:1484)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1334)
	at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
	at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
	at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
	at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
	at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)
	at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:80)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)

Driver stacktrace:
  at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1499)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1487)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1486)
  at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1486)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  at scala.Option.foreach(Option.scala:257)
  at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
  at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1714)
  at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1669)
  at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1658)
  at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2022)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2043)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2062)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2087)
  at org.apache.spark.rdd.RDD.count(RDD.scala:1158)
  ... 45 elided
Caused by: java.lang.ClassNotFoundException: Data
  at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  at java.lang.Class.forName0(Native Method)
  at java.lang.Class.forName(Class.java:348)
  at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67)
  at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1613)
  at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1518)
  at java.io.ObjectInputStream.readClass(ObjectInputStream.java:1484)
  at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1334)
  at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
  at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
  at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
  at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
  at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
  at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
  at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
  at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
  at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
  at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
  at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
  at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
  at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
  at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)
  at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)
  at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:80)
  at org.apache.spark.scheduler.Task.run(Task.scala:108)
  at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)
  at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  ... 1 more

scala> repartitioned: org.apache.spark.rdd.RDD[Data] = MapPartitionsRDD[51] at repartition at <console>:47

scala> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 25.0 failed 4 times, most recent failure: Lost task 0.3 in stage 25.0 (TID 180, 192.168.10.26, executor 0): java.lang.ClassCastException: cannot assign instance of scala.collection.immutable.List$SerializationProxy to field org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$dependencies_ of type scala.collection.Seq in instance of org.apache.spark.rdd.MapPartitionsRDD
	at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2089)
	at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1261)
	at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2006)
	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
	at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
	at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
	at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)
	at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:85)
	at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)

Driver stacktrace:
  at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1499)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1487)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1486)
  at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1486)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
  at scala.Option.foreach(Option.scala:257)
  at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
  at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1714)
  at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1669)
  at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1658)
  at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:630)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2022)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2043)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2062)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2087)
  at org.apache.spark.rdd.RDD.count(RDD.scala:1158)
  ... 45 elided
Caused by: java.lang.ClassCastException: cannot assign instance of scala.collection.immutable.List$SerializationProxy to field org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$dependencies_ of type scala.collection.Seq in instance of org.apache.spark.rdd.MapPartitionsRDD
  at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2089)
  at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1261)
  at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2006)
  at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
  at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
  at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
  at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
  at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
  at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
  at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
  at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
  at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)
  at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)
  at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:85)
  at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53)
  at org.apache.spark.scheduler.Task.run(Task.scala:108)
  at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)
  at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  ... 1 more

scala> 
scala>      |      | getCacheSize: (rdd: org.apache.spark.rdd.RDD[_])Long

scala> cacheSize1: Long = 0

scala> cacheSize2: Long = 0

scala> 
scala> 
scala> deviation: Double = NaN

scala>      | java.lang.AssertionError: assertion failed: deviation too large: NaN, first size: 0, second size: 0
  at scala.Predef$.assert(Predef.scala:170)
  ... 46 elided

scala>      | _result_1496448063782: Int = 1

scala> 
      at org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:500)
      at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
      at org.scalatest.Assertions$AssertionsHelper.macroAssert(Assertions.scala:466)
      at org.apache.spark.repl.SingletonReplSuite.assertDoesNotContain(SingletonReplSuite.scala:121)
      at org.apache.spark.repl.SingletonReplSuite$$anonfun$17.apply$mcV$sp(SingletonReplSuite.scala:394)
      at org.apache.spark.repl.SingletonReplSuite$$anonfun$17.apply(SingletonReplSuite.scala:371)
      at org.apache.spark.repl.SingletonReplSuite$$anonfun$17.apply(SingletonReplSuite.scala:371)
      at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
      at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
      at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
      at org.scalatest.Transformer.apply(Transformer.scala:22)
      at org.scalatest.Transformer.apply(Transformer.scala:20)
      at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
      at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
      at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
      at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
      at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
      at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
      at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
      at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
      at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
      at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
      at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
      at org.scalatest.Suite$class.run(Suite.scala:1424)
      at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
      at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
      at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
      at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
      at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
      at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
      at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
      at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1492)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1528)
      at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1526)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1526)
      at org.scalatest.tools.DiscoverySuite.runNestedSuites(DiscoverySuite.scala:29)
      at org.scalatest.Suite$class.run(Suite.scala:1421)
      at org.scalatest.tools.DiscoverySuite.run(DiscoverySuite.scala:29)
      at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
      at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
      at scala.collection.immutable.List.foreach(List.scala:381)
      at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
      at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
      at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
      at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
      at org.scalatest.tools.Runner$.main(Runner.scala:860)
      at org.scalatest.tools.Runner.main(Runner.scala)