File tree Expand file tree Collapse file tree 2 files changed +2
-2
lines changed
main/scala/org/apache/spark/storage
test/scala/org/apache/spark Expand file tree Collapse file tree 2 files changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -977,8 +977,7 @@ private[spark] class BlockManager(
977
977
// [SPARK-16550] Erase the typed classTag when using default serialization, since
978
978
// NettyBlockRpcServer crashes when deserializing repl-defined classes.
979
979
// TODO(ekl) remove this once the classloader issue on the remote end is fixed.
980
- val remoteClassTag = classTag
981
- if (! serializerManager.canUseKryo(classTag)) {
980
+ val remoteClassTag = if (! serializerManager.canUseKryo(classTag)) {
982
981
scala.reflect.classTag[Any ]
983
982
} else {
984
983
classTag
Original file line number Diff line number Diff line change @@ -151,6 +151,7 @@ class DistributedSuite extends SparkFunSuite with Matchers with LocalSparkContex
151
151
152
152
private def testCaching (storageLevel : StorageLevel ): Unit = {
153
153
sc = new SparkContext (clusterUrl, " test" )
154
+ sc.jobProgressListener.waitUntilExecutorsUp(2 , 30000 )
154
155
val data = sc.parallelize(1 to 1000 , 10 )
155
156
val cachedData = data.persist(storageLevel)
156
157
assert(cachedData.count === 1000 )
You can’t perform that action at this time.
0 commit comments