Skip to content

Commit b2c4736

Browse files
committed
Add failing tests for SPARK-2974 and SPARK-2975.
1 parent 007298b commit b2c4736

File tree

2 files changed

+62
-1
lines changed

2 files changed

+62
-1
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
4545
/** Create a SparkConf that loads defaults from system properties and the classpath */
4646
def this() = this(true)
4747

48-
private val settings = new HashMap[String, String]()
48+
private[spark] val settings = new HashMap[String, String]()
4949

5050
if (loadDefaults) {
5151
// Load any spark.* system properties
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.storage
19+
20+
import java.io.File
21+
22+
import org.apache.spark.util.Utils
23+
import org.scalatest.FunSuite
24+
25+
import org.apache.spark.SparkConf
26+
27+
28+
/**
29+
* Tests for the spark.local.dirs and SPARK_LOCAL_DIRS configuration options.
30+
*/
31+
class LocalDirsSuite extends FunSuite {
32+
33+
test("Utils.getLocalDir() returns a valid directory, even if some local dirs are missing") {
34+
// Regression test for SPARK-2974
35+
assert(!new File("/NONEXISTENT_DIR").exists())
36+
val conf = new SparkConf(false)
37+
.set("spark.local.dir", s"/NONEXISTENT_PATH,${System.getProperty("java.io.tmpdir")}")
38+
assert(new File(Utils.getLocalDir(conf)).exists())
39+
}
40+
41+
test("SPARK_LOCAL_DIRS override also affects driver") {
42+
// Regression test for SPARK-2975
43+
assert(!new File("/NONEXISTENT_DIR").exists())
44+
// SPARK_LOCAL_DIRS is a valid directory:
45+
class MySparkConf extends SparkConf(false) {
46+
override def getenv(name: String) = {
47+
if (name == "SPARK_LOCAL_DIRS") System.getProperty("java.io.tmpdir")
48+
else super.getenv(name)
49+
}
50+
51+
override def clone: SparkConf = {
52+
new MySparkConf().setAll(settings)
53+
}
54+
}
55+
// spark.local.dir only contains invalid directories, but that's not a problem since
56+
// SPARK_LOCAL_DIRS will override it on both the driver and workers:
57+
val conf = new MySparkConf().set("spark.local.dir", "/NONEXISTENT_PATH")
58+
assert(new File(Utils.getLocalDir(conf)).exists())
59+
}
60+
61+
}

0 commit comments

Comments
 (0)