Skip to content

Commit ccb327a

Browse files
NirmalReddyaarondav
authored andcommitted
Optimized imports
Optimized imports and arranged according to scala style guide @ https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide#SparkCodeStyleGuide-Imports Author: NirmalReddy <nirmal.reddy@imaginea.com> Author: NirmalReddy <nirmal_reddy2000@yahoo.com> Closes apache#613 from NirmalReddy/opt-imports and squashes the following commits: 578b4f5 [NirmalReddy] imported java.lang.Double as JDouble a2cbcc5 [NirmalReddy] addressed the comments 776d664 [NirmalReddy] Optimized imports in core
1 parent f74ae0e commit ccb327a

File tree

246 files changed

+446
-552
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

246 files changed

+446
-552
lines changed

core/src/main/java/org/apache/spark/network/netty/FileClient.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.network.netty;
1919

20+
import java.util.concurrent.TimeUnit;
21+
2022
import io.netty.bootstrap.Bootstrap;
2123
import io.netty.channel.Channel;
2224
import io.netty.channel.ChannelOption;
@@ -27,8 +29,6 @@
2729
import org.slf4j.Logger;
2830
import org.slf4j.LoggerFactory;
2931

30-
import java.util.concurrent.TimeUnit;
31-
3232
class FileClient {
3333

3434
private static final Logger LOG = LoggerFactory.getLogger(FileClient.class.getName());

core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,11 @@
2323
import io.netty.channel.ChannelHandlerContext;
2424
import io.netty.channel.SimpleChannelInboundHandler;
2525
import io.netty.channel.DefaultFileRegion;
26+
import org.slf4j.Logger;
27+
import org.slf4j.LoggerFactory;
2628

2729
import org.apache.spark.storage.BlockId;
2830
import org.apache.spark.storage.FileSegment;
29-
import org.slf4j.Logger;
30-
import org.slf4j.LoggerFactory;
3131

3232
class FileServerHandler extends SimpleChannelInboundHandler<String> {
3333

core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717

1818
package org.apache.hadoop.mapreduce
1919

20-
import java.lang.{Integer => JInteger, Boolean => JBoolean}
20+
import java.lang.{Boolean => JBoolean, Integer => JInteger}
21+
2122
import org.apache.hadoop.conf.Configuration
2223

2324
private[apache]

core/src/main/scala/org/apache/spark/Accumulators.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,9 @@ package org.apache.spark
1919

2020
import java.io.{ObjectInputStream, Serializable}
2121

22-
import scala.collection.mutable.Map
2322
import scala.collection.generic.Growable
23+
import scala.collection.mutable.Map
24+
2425
import org.apache.spark.serializer.JavaSerializer
2526

2627
/**

core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,11 @@ package org.apache.spark
2020
import scala.collection.mutable.ArrayBuffer
2121
import scala.collection.mutable.HashMap
2222

23-
import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
23+
import org.apache.spark.executor.ShuffleReadMetrics
2424
import org.apache.spark.serializer.Serializer
2525
import org.apache.spark.storage.{BlockId, BlockManagerId, ShuffleBlockId}
2626
import org.apache.spark.util.CompletionIterator
2727

28-
2928
private[spark] class BlockStoreShuffleFetcher extends ShuffleFetcher with Logging {
3029

3130
override def fetch[T](

core/src/main/scala/org/apache/spark/CacheManager.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818
package org.apache.spark
1919

2020
import scala.collection.mutable.{ArrayBuffer, HashSet}
21-
import org.apache.spark.storage.{BlockId, BlockManager, StorageLevel, RDDBlockId}
22-
import org.apache.spark.rdd.RDD
2321

22+
import org.apache.spark.rdd.RDD
23+
import org.apache.spark.storage.{BlockManager, RDDBlockId, StorageLevel}
2424

2525
/** Spark class responsible for passing RDDs split contents to the BlockManager and making
2626
sure a node doesn't load two copies of an RDD at once.

core/src/main/scala/org/apache/spark/FutureAction.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,8 @@ import scala.concurrent._
2121
import scala.concurrent.duration.Duration
2222
import scala.util.Try
2323

24-
import org.apache.spark.scheduler.{JobSucceeded, JobWaiter}
25-
import org.apache.spark.scheduler.JobFailed
2624
import org.apache.spark.rdd.RDD
27-
25+
import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}
2826

2927
/**
3028
* A future for the result of an action to support cancellation. This is an extension of the

core/src/main/scala/org/apache/spark/HttpFileServer.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,10 @@
1717

1818
package org.apache.spark
1919

20-
import java.io.{File}
20+
import java.io.File
21+
2122
import com.google.common.io.Files
23+
2224
import org.apache.spark.util.Utils
2325

2426
private[spark] class HttpFileServer extends Logging {

core/src/main/scala/org/apache/spark/HttpServer.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,14 @@
1818
package org.apache.spark
1919

2020
import java.io.File
21-
import java.net.InetAddress
2221

2322
import org.eclipse.jetty.server.Server
2423
import org.eclipse.jetty.server.bio.SocketConnector
2524
import org.eclipse.jetty.server.handler.DefaultHandler
2625
import org.eclipse.jetty.server.handler.HandlerList
2726
import org.eclipse.jetty.server.handler.ResourceHandler
2827
import org.eclipse.jetty.util.thread.QueuedThreadPool
28+
2929
import org.apache.spark.util.Utils
3030

3131
/**

core/src/main/scala/org/apache/spark/MapOutputTracker.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream}
2222

2323
import scala.collection.mutable.HashSet
2424
import scala.concurrent.Await
25-
import scala.concurrent.duration._
2625

2726
import akka.actor._
2827
import akka.pattern.ask

0 commit comments

Comments
 (0)