@@ -26,6 +26,8 @@ import org.scalatest.FunSuite
26
26
27
27
import org .apache .hadoop .io .BytesWritable
28
28
29
+ import org .apache .spark .util .Utils
30
+
29
31
class SparkContextSuite extends FunSuite with LocalSparkContext {
30
32
31
33
test(" Only one SparkContext may be active at a time" ) {
@@ -79,7 +81,7 @@ class SparkContextSuite extends FunSuite with LocalSparkContext {
79
81
}
80
82
81
83
test(" addFile works" ) {
82
- val file = new File ( " somefile " )
84
+ val file = File .createTempFile( " someprefix " , " somesuffix " )
83
85
val absolutePath = file.getAbsolutePath
84
86
try {
85
87
Files .write(" somewords" , file, UTF_8 )
@@ -102,60 +104,49 @@ class SparkContextSuite extends FunSuite with LocalSparkContext {
102
104
}).count()
103
105
} finally {
104
106
sc.stop()
105
- file.delete()
106
107
}
107
108
}
108
109
109
110
test(" addFile recursive works" ) {
110
- val pluto = new File ( " pluto " )
111
- val neptune = new File (pluto, " neptune " )
112
- val saturn = new File (neptune, " saturn " )
113
- val alien1 = new File (neptune , " alien1 " )
114
- val alien2 = new File (saturn , " alien2 " )
111
+ val pluto = Utils .createTempDir( )
112
+ val neptune = Utils .createTempDir (pluto.getAbsolutePath )
113
+ val saturn = Utils .createTempDir (neptune.getAbsolutePath )
114
+ val alien1 = File .createTempFile( " alien " , " 1 " , neptune )
115
+ val alien2 = File .createTempFile( " alien " , " 2 " , saturn )
115
116
116
117
try {
117
- assert(neptune.mkdirs())
118
- assert(saturn.mkdir())
119
- assert(alien1.createNewFile())
120
- assert(alien2.createNewFile())
121
-
122
118
sc = new SparkContext (new SparkConf ().setAppName(" test" ).setMaster(" local" ))
123
119
sc.addFile(neptune.getAbsolutePath, true )
124
120
sc.parallelize(Array (1 ), 1 ).map(x => {
125
121
val sep = File .separator
126
- if (! new File (SparkFiles .get(" neptune" + sep + " alien1" )).exists()) {
122
+ if (! new File (SparkFiles .get(neptune.getName + sep + alien1.getName )).exists()) {
127
123
throw new SparkException (" can't access file under root added directory" )
128
124
}
129
- if (! new File (SparkFiles .get(" neptune" + sep + " saturn" + sep + " alien2" )).exists()) {
125
+ if (! new File (SparkFiles .get(neptune.getName + sep + saturn.getName + sep + alien2.getName))
126
+ .exists()) {
130
127
throw new SparkException (" can't access file in nested directory" )
131
128
}
132
- if (new File (SparkFiles .get(" pluto" + sep + " neptune" + sep + " alien1" )).exists()) {
129
+ if (new File (SparkFiles .get(pluto.getName + sep + neptune.getName + sep + alien1.getName))
130
+ .exists()) {
133
131
throw new SparkException (" file exists that shouldn't" )
134
132
}
135
133
x
136
134
}).count()
137
135
} finally {
138
136
sc.stop()
139
- alien2.delete()
140
- saturn.delete()
141
- alien1.delete()
142
- neptune.delete()
143
- pluto.delete()
144
137
}
145
138
}
146
139
147
140
test(" addFile recursive can't add directories by default" ) {
148
- val dir = new File ( " dir " )
141
+ val dir = Utils .createTempDir( )
149
142
150
143
try {
151
144
sc = new SparkContext (new SparkConf ().setAppName(" test" ).setMaster(" local" ))
152
- sc.addFile(dir.getAbsolutePath)
153
- assert(false , " should have thrown exception" )
154
- } catch {
155
- case _ : SparkException =>
145
+ intercept[SparkException ] {
146
+ sc.addFile(dir.getAbsolutePath)
147
+ }
156
148
} finally {
157
149
sc.stop()
158
- dir.delete()
159
150
}
160
151
}
161
152
}
0 commit comments