Skip to content

Commit 9a66cb0

Browse files
committed
resolving merge conflicts
*fingers crossed* I admit I’m not exactly sure how this works… Let’s see if I did the right thing.
1 parent a31ccc4 commit 9a66cb0

File tree

132 files changed

+4986
-263
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

132 files changed

+4986
-263
lines changed

LICENSE

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -549,3 +549,4 @@ The following components are provided under the MIT License. See project link fo
549549
(MIT License) pyrolite (org.spark-project:pyrolite:2.0.1 - http://pythonhosted.org/Pyro4/)
550550
(MIT License) scopt (com.github.scopt:scopt_2.10:3.2.0 - https://github.com/scopt/scopt)
551551
(The MIT License) Mockito (org.mockito:mockito-all:1.8.5 - http://www.mockito.org)
552+
(MIT License) jquery (https://jquery.org/license/)

bin/run-example

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@ if [ -n "$1" ]; then
2929
else
3030
echo "Usage: ./bin/run-example <example-class> [example-args]" 1>&2
3131
echo " - set MASTER=XX to use a specific master" 1>&2
32-
echo " - can use abbreviated example class name (e.g. SparkPi, mllib.LinearRegression)" 1>&2
32+
echo " - can use abbreviated example class name relative to com.apache.spark.examples" 1>&2
33+
echo " (e.g. SparkPi, mllib.LinearRegression, streaming.KinesisWordCountASL)" 1>&2
3334
exit 1
3435
fi
3536

bin/run-example2.cmd

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ rem Test that an argument was given
3232
if not "x%1"=="x" goto arg_given
3333
echo Usage: run-example ^<example-class^> [example-args]
3434
echo - set MASTER=XX to use a specific master
35-
echo - can use abbreviated example class name (e.g. SparkPi, mllib.LinearRegression)
35+
echo - can use abbreviated example class name relative to com.apache.spark.examples
36+
echo (e.g. SparkPi, mllib.LinearRegression, streaming.KinesisWordCountASL)
3637
goto exit
3738
:arg_given
3839

core/src/main/scala/org/apache/spark/Logging.scala

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,13 +39,17 @@ trait Logging {
3939
// be serialized and used on another machine
4040
@transient private var log_ : Logger = null
4141

42+
// Method to get the logger name for this object
43+
protected def logName = {
44+
// Ignore trailing $'s in the class names for Scala objects
45+
this.getClass.getName.stripSuffix("$")
46+
}
47+
4248
// Method to get or create the logger for this object
4349
protected def log: Logger = {
4450
if (log_ == null) {
4551
initializeIfNecessary()
46-
var className = this.getClass.getName
47-
// Ignore trailing $'s in the class names for Scala objects
48-
log_ = LoggerFactory.getLogger(className.stripSuffix("$"))
52+
log_ = LoggerFactory.getLogger(logName)
4953
}
5054
log_
5155
}

core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ private[spark] class Worker(
7171
// TTL for app folders/data; after TTL expires it will be cleaned up
7272
val APP_DATA_RETENTION_SECS = conf.getLong("spark.worker.cleanup.appDataTtl", 7 * 24 * 3600)
7373

74-
74+
val testing: Boolean = sys.props.contains("spark.testing")
7575
val masterLock: Object = new Object()
7676
var master: ActorSelection = null
7777
var masterAddress: Address = null
@@ -82,7 +82,12 @@ private[spark] class Worker(
8282
@volatile var connected = false
8383
val workerId = generateWorkerId()
8484
val sparkHome =
85-
new File(sys.props.get("spark.test.home").orElse(sys.env.get("SPARK_HOME")).getOrElse("."))
85+
if (testing) {
86+
assert(sys.props.contains("spark.test.home"), "spark.test.home is not set!")
87+
new File(sys.props("spark.test.home"))
88+
} else {
89+
new File(sys.env.get("SPARK_HOME").getOrElse("."))
90+
}
8691
var workDir: File = null
8792
val executors = new HashMap[String, ExecutorRunner]
8893
val finishedExecutors = new HashMap[String, ExecutorRunner]

core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,8 @@ private[spark] class BlockManagerSource(val blockManager: BlockManager, sc: Spar
4646
metricRegistry.register(MetricRegistry.name("memory", "memUsed_MB"), new Gauge[Long] {
4747
override def getValue: Long = {
4848
val storageStatusList = blockManager.master.getStorageStatus
49-
val maxMem = storageStatusList.map(_.maxMem).sum
50-
val remainingMem = storageStatusList.map(_.memRemaining).sum
51-
(maxMem - remainingMem) / 1024 / 1024
49+
val memUsed = storageStatusList.map(_.memUsed).sum
50+
memUsed / 1024 / 1024
5251
}
5352
})
5453

core/src/main/scala/org/apache/spark/storage/StorageUtils.scala

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -172,16 +172,13 @@ class StorageStatus(val blockManagerId: BlockManagerId, val maxMem: Long) {
172172
def memRemaining: Long = maxMem - memUsed
173173

174174
/** Return the memory used by this block manager. */
175-
def memUsed: Long =
176-
_nonRddStorageInfo._1 + _rddBlocks.keys.toSeq.map(memUsedByRdd).sum
175+
def memUsed: Long = _nonRddStorageInfo._1 + _rddBlocks.keys.toSeq.map(memUsedByRdd).sum
177176

178177
/** Return the disk space used by this block manager. */
179-
def diskUsed: Long =
180-
_nonRddStorageInfo._2 + _rddBlocks.keys.toSeq.map(diskUsedByRdd).sum
178+
def diskUsed: Long = _nonRddStorageInfo._2 + _rddBlocks.keys.toSeq.map(diskUsedByRdd).sum
181179

182180
/** Return the off-heap space used by this block manager. */
183-
def offHeapUsed: Long =
184-
_nonRddStorageInfo._3 + _rddBlocks.keys.toSeq.map(offHeapUsedByRdd).sum
181+
def offHeapUsed: Long = _nonRddStorageInfo._3 + _rddBlocks.keys.toSeq.map(offHeapUsedByRdd).sum
185182

186183
/** Return the memory used by the given RDD in this block manager in O(1) time. */
187184
def memUsedByRdd(rddId: Int): Long = _rddStorageInfo.get(rddId).map(_._1).getOrElse(0L)
@@ -246,7 +243,7 @@ private[spark] object StorageUtils {
246243
val rddId = rddInfo.id
247244
// Assume all blocks belonging to the same RDD have the same storage level
248245
val storageLevel = statuses
249-
.map(_.rddStorageLevel(rddId)).flatMap(s => s).headOption.getOrElse(StorageLevel.NONE)
246+
.flatMap(_.rddStorageLevel(rddId)).headOption.getOrElse(StorageLevel.NONE)
250247
val numCachedPartitions = statuses.map(_.numRddBlocksById(rddId)).sum
251248
val memSize = statuses.map(_.memUsedByRdd(rddId)).sum
252249
val diskSize = statuses.map(_.diskUsedByRdd(rddId)).sum

core/src/test/scala/org/apache/spark/DriverSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import scala.language.postfixOps
3434
class DriverSuite extends FunSuite with Timeouts {
3535

3636
test("driver should exit after finishing") {
37-
val sparkHome = sys.props("spark.test.home")
37+
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
3838
// Regression test for SPARK-530: "Spark driver process doesn't exit after finishing"
3939
val masters = Table(("master"), ("local"), ("local-cluster[2,1,512]"))
4040
forAll(masters) { (master: String) =>

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -295,7 +295,7 @@ class SparkSubmitSuite extends FunSuite with Matchers {
295295

296296
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
297297
def runSparkSubmit(args: Seq[String]): String = {
298-
val sparkHome = sys.props("spark.test.home")
298+
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
299299
Utils.executeAndGetOutput(
300300
Seq("./bin/spark-submit") ++ args,
301301
new File(sparkHome),

core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import org.apache.spark.SparkConf
2727
class ExecutorRunnerTest extends FunSuite {
2828
test("command includes appId") {
2929
def f(s:String) = new File(s)
30-
val sparkHome = sys.props("spark.test.home")
30+
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
3131
val appDesc = new ApplicationDescription("app name", Some(8), 500,
3232
Command("foo", Seq(), Map(), Seq(), Seq(), Seq()), "appUiUrl")
3333
val appId = "12345-worker321-9876"

0 commit comments

Comments
 (0)