diff --git a/core/src/main/scala/org/apache/spark/deploy/client/Client.scala b/core/src/main/scala/org/apache/spark/deploy/client/Client.scala index f60e56d959634..d0d65ca72ad7c 100644 --- a/core/src/main/scala/org/apache/spark/deploy/client/Client.scala +++ b/core/src/main/scala/org/apache/spark/deploy/client/Client.scala @@ -104,8 +104,10 @@ private[spark] class Client( activeMasterUrl = url master = context.actorSelection(Master.toAkkaUrl(activeMasterUrl)) masterAddress = activeMasterUrl match { - case Master.sparkUrlRegex(host, port) => Address("akka.tcp", Master.systemName, host, port.toInt) - case x => throw new SparkException("Invalid spark URL:"+x) + case Master.sparkUrlRegex(host, port) => + Address("akka.tcp", Master.systemName, host, port.toInt) + case x => + throw new SparkException("Invalid spark URL: " + x) } } diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index 808b54c0af782..87531b6719e1b 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -138,8 +138,10 @@ private[spark] class Worker( activeMasterWebUiUrl = uiUrl master = context.actorSelection(Master.toAkkaUrl(activeMasterUrl)) masterAddress = activeMasterUrl match { - case Master.sparkUrlRegex(_host, _port) => Address("akka.tcp", Master.systemName, _host, _port.toInt) - case x => throw new SparkException("Invalid spark URL:"+x) + case Master.sparkUrlRegex(_host, _port) => + Address("akka.tcp", Master.systemName, _host, _port.toInt) + case x => + throw new SparkException("Invalid spark URL: " + x) } connected = true } diff --git a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala index df33f6bfb0bb1..b4451fc7b8e56 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala @@ -44,7 +44,7 @@ abstract class BlockObjectWriter(val blockId: BlockId) { * Flush the partial writes and commit them as a single atomic block. Return the * number of bytes written for this commit. */ - def commit(): LongSpark + def commit(): Long /** * Reverts writes that haven't been flushed yet. Callers should invoke this function diff --git a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala index 9f3f1632776b7..74133cef6c132 100644 --- a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala @@ -17,11 +17,8 @@ package org.apache.spark.util -import akka.actor.{IndestructibleActorSystem, ActorSystem, ExtendedActorSystem} +import akka.actor.{ActorSystem, ExtendedActorSystem, IndestructibleActorSystem} import com.typesafe.config.ConfigFactory -import scala.concurrent.duration._ -import scala.concurrent.Await -import akka.remote.RemoteActorRefProvider /** * Various utility classes for working with Akka. @@ -47,7 +44,8 @@ private[spark] object AkkaUtils { val akkaTimeout = System.getProperty("spark.akka.timeout", "100").toInt val akkaFrameSize = System.getProperty("spark.akka.frameSize", "10").toInt - val lifecycleEvents = if (System.getProperty("spark.akka.logLifecycleEvents", "false").toBoolean) "on" else "off" + val lifecycleEvents = + if (System.getProperty("spark.akka.logLifecycleEvents", "false").toBoolean) "on" else "off" val akkaHeartBeatPauses = System.getProperty("spark.akka.heartbeat.pauses", "600").toInt val akkaFailureDetector =