Skip to content

Commit 3ddc8bb

Browse files
committed
Deprected *With functions in scala and added a few missing Java APIs
1 parent 84f7ca1 commit 3ddc8bb

File tree

4 files changed

+30
-5
lines changed

4 files changed

+30
-5
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -240,6 +240,7 @@ class SparkContext(
240240
localProperties.set(props)
241241
}
242242

243+
@deprecated("Properties no longer need to be explicitly initialized.", "1.0.0")
243244
def initLocalProperties() {
244245
localProperties.set(new Properties())
245246
}
@@ -308,7 +309,7 @@ class SparkContext(
308309
private val dagSchedulerSource = new DAGSchedulerSource(this.dagScheduler, this)
309310
private val blockManagerSource = new BlockManagerSource(SparkEnv.get.blockManager, this)
310311

311-
def initDriverMetrics() {
312+
private def initDriverMetrics() {
312313
SparkEnv.get.metricsSystem.registerSource(dagSchedulerSource)
313314
SparkEnv.get.metricsSystem.registerSource(blockManagerSource)
314315
}

core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,8 @@ class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])
126126
def subtract(other: JavaRDD[T], p: Partitioner): JavaRDD[T] =
127127
wrapRDD(rdd.subtract(other, p))
128128

129+
def generator = rdd.generator
130+
129131
override def toString = rdd.toString
130132

131133
/** Assign a name to this RDD */

core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,24 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
9292

9393
private[spark] val env = sc.env
9494

95+
def isLocal = sc.isLocal
96+
97+
def sparkUser = sc.sparkUser
98+
99+
def master = sc.master
100+
101+
def appName = sc.appName
102+
103+
def jars = JavaConversions.seqAsJavaList(sc.jars)
104+
105+
def startTime = sc.startTime
106+
107+
/** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */
108+
def defaultParallelism = sc.defaultParallelism
109+
110+
/** Default min number of partitions for Hadoop RDDs when not given by user */
111+
def defaultMinSplits = sc.defaultMinSplits
112+
95113
/** Distribute a local Scala collection to form an RDD. */
96114
def parallelize[T](list: java.util.List[T], numSlices: Int): JavaRDD[T] = {
97115
implicit val ctag: ClassTag[T] = fakeClassTag

core/src/main/scala/org/apache/spark/rdd/RDD.scala

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -543,7 +543,8 @@ abstract class RDD[T: ClassTag](
543543
* additional parameter is produced by constructA, which is called in each
544544
* partition with the index of that partition.
545545
*/
546-
def mapWith[A: ClassTag, U: ClassTag]
546+
@deprecated("use mapPartitionsWithIndex", "1.0.0")
547+
def mapWith[A, U: ClassTag]
547548
(constructA: Int => A, preservesPartitioning: Boolean = false)
548549
(f: (T, A) => U): RDD[U] = {
549550
mapPartitionsWithIndex((index, iter) => {
@@ -557,7 +558,8 @@ abstract class RDD[T: ClassTag](
557558
* additional parameter is produced by constructA, which is called in each
558559
* partition with the index of that partition.
559560
*/
560-
def flatMapWith[A: ClassTag, U: ClassTag]
561+
@deprecated("use mapPartitionsWithIndex and flatMap", "1.0.0")
562+
def flatMapWith[A, U: ClassTag]
561563
(constructA: Int => A, preservesPartitioning: Boolean = false)
562564
(f: (T, A) => Seq[U]): RDD[U] = {
563565
mapPartitionsWithIndex((index, iter) => {
@@ -571,7 +573,8 @@ abstract class RDD[T: ClassTag](
571573
* This additional parameter is produced by constructA, which is called in each
572574
* partition with the index of that partition.
573575
*/
574-
def foreachWith[A: ClassTag](constructA: Int => A)(f: (T, A) => Unit) {
576+
@deprecated("use mapPartitionsWithIndex and foreach", "1.0.0")
577+
def foreachWith[A](constructA: Int => A)(f: (T, A) => Unit) {
575578
mapPartitionsWithIndex { (index, iter) =>
576579
val a = constructA(index)
577580
iter.map(t => {f(t, a); t})
@@ -583,7 +586,8 @@ abstract class RDD[T: ClassTag](
583586
* additional parameter is produced by constructA, which is called in each
584587
* partition with the index of that partition.
585588
*/
586-
def filterWith[A: ClassTag](constructA: Int => A)(p: (T, A) => Boolean): RDD[T] = {
589+
@deprecated("use mapPartitionsWithIndex and filter", "1.0.0")
590+
def filterWith[A](constructA: Int => A)(p: (T, A) => Boolean): RDD[T] = {
587591
mapPartitionsWithIndex((index, iter) => {
588592
val a = constructA(index)
589593
iter.filter(t => p(t, a))

0 commit comments

Comments
 (0)