@@ -28,55 +28,54 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
2828import org .scalatest .Matchers
2929
3030import org .apache .spark .scheduler .{SparkListener , SparkListenerTaskStart }
31- import org .apache .spark .util .ResetSystemProperties
3231
3332/**
3433 * Test suite for cancelling running jobs. We run the cancellation tasks for single job action
3534 * (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers
3635 * in both FIFO and fair scheduling modes.
3736 */
3837class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter
39- with ResetSystemProperties with LocalSparkContext {
38+ with LocalSparkContext {
4039
4140 override def afterEach () {
4241 super .afterEach()
4342 resetSparkContext()
4443 }
4544
4645 test(" local mode, FIFO scheduler" ) {
47- System .setProperty (" spark.scheduler.mode" , " FIFO" )
48- sc = new SparkContext (" local[2]" , " test" )
46+ val conf = new SparkConf ().set (" spark.scheduler.mode" , " FIFO" )
47+ sc = new SparkContext (" local[2]" , " test" , conf )
4948 testCount()
5049 testTake()
5150 // Make sure we can still launch tasks.
5251 assert(sc.parallelize(1 to 10 , 2 ).count === 10 )
5352 }
5453
5554 test(" local mode, fair scheduler" ) {
56- System .setProperty (" spark.scheduler.mode" , " FAIR" )
55+ val conf = new SparkConf ().set (" spark.scheduler.mode" , " FAIR" )
5756 val xmlPath = getClass.getClassLoader.getResource(" fairscheduler.xml" ).getFile()
58- System .setProperty (" spark.scheduler.allocation.file" , xmlPath)
59- sc = new SparkContext (" local[2]" , " test" )
57+ conf.set (" spark.scheduler.allocation.file" , xmlPath)
58+ sc = new SparkContext (" local[2]" , " test" , conf )
6059 testCount()
6160 testTake()
6261 // Make sure we can still launch tasks.
6362 assert(sc.parallelize(1 to 10 , 2 ).count === 10 )
6463 }
6564
6665 test(" cluster mode, FIFO scheduler" ) {
67- System .setProperty (" spark.scheduler.mode" , " FIFO" )
68- sc = new SparkContext (" local-cluster[2,1,512]" , " test" )
66+ val conf = new SparkConf ().set (" spark.scheduler.mode" , " FIFO" )
67+ sc = new SparkContext (" local-cluster[2,1,512]" , " test" , conf )
6968 testCount()
7069 testTake()
7170 // Make sure we can still launch tasks.
7271 assert(sc.parallelize(1 to 10 , 2 ).count === 10 )
7372 }
7473
7574 test(" cluster mode, fair scheduler" ) {
76- System .setProperty (" spark.scheduler.mode" , " FAIR" )
75+ val conf = new SparkConf ().set (" spark.scheduler.mode" , " FAIR" )
7776 val xmlPath = getClass.getClassLoader.getResource(" fairscheduler.xml" ).getFile()
78- System .setProperty (" spark.scheduler.allocation.file" , xmlPath)
79- sc = new SparkContext (" local-cluster[2,1,512]" , " test" )
77+ conf.set (" spark.scheduler.allocation.file" , xmlPath)
78+ sc = new SparkContext (" local-cluster[2,1,512]" , " test" , conf )
8079 testCount()
8180 testTake()
8281 // Make sure we can still launch tasks.
0 commit comments