Skip to content

Commit 0af3faf

Browse files
committed
add JavaAPI test
1 parent 01745ee commit 0af3faf

File tree

3 files changed

+31
-6
lines changed

3 files changed

+31
-6
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -385,7 +385,7 @@ class SparkContext(
385385
* hdfs://a-hdfs-path/part-nnnnn
386386
* }}}
387387
*
388-
* Do `val rdd = mlContext.wholeTextFile("hdfs://a-hdfs-path")`,
388+
* Do `val rdd = sparkContext.wholeTextFile("hdfs://a-hdfs-path")`,
389389
*
390390
* <p> then `rdd` contains
391391
* {{{

core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
167167
* hdfs://a-hdfs-path/part-nnnnn
168168
* }}}
169169
*
170-
* Do `val rdd = mlContext.wholeTextFile("hdfs://a-hdfs-path")`,
170+
* Do `JavaPairRDD<String, String> rdd = context.wholeTextFiles("hdfs://a-hdfs-path")`,
171171
*
172172
* <p> then `rdd` contains
173173
* {{{
@@ -177,7 +177,8 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
177177
* (a-hdfs-path/part-nnnnn, its content)
178178
* }}}
179179
*/
180-
def wholeTextFiles(path: String): JavaRDD[(String, String)] = sc.wholeTextFiles(path)
180+
def wholeTextFiles(path: String): JavaPairRDD[String, String] =
181+
new JavaPairRDD(sc.wholeTextFiles(path))
181182

182183
/** Get an RDD for a Hadoop SequenceFile with given key and value types.
183184
*

core/src/test/java/org/apache/spark/JavaAPISuite.java

Lines changed: 27 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,7 @@
1717

1818
package org.apache.spark;
1919

20-
import java.io.File;
21-
import java.io.IOException;
22-
import java.io.Serializable;
20+
import java.io.*;
2321
import java.util.*;
2422

2523
import scala.Tuple2;
@@ -599,6 +597,32 @@ public void textFiles() throws IOException {
599597
Assert.assertEquals(expected, readRDD.collect());
600598
}
601599

600+
@Test
601+
public void wholeTextFiles() throws IOException {
602+
byte[] content1 = "spark is easy to use.\n".getBytes();
603+
byte[] content2 = "spark is also easy to use.\n".getBytes();
604+
605+
File tempDir = Files.createTempDir();
606+
String tempDirName = tempDir.getAbsolutePath();
607+
DataOutputStream ds = new DataOutputStream(new FileOutputStream(tempDirName + "/part-00000"));
608+
ds.write(content1);
609+
ds.close();
610+
ds = new DataOutputStream(new FileOutputStream(tempDirName + "/part-00001"));
611+
ds.write(content2);
612+
ds.close();
613+
614+
HashMap<String, String> container = new HashMap<String, String>();
615+
container.put(tempDirName+"/part-00000", new Text(content1).toString());
616+
container.put(tempDirName+"/part-00001", new Text(content2).toString());
617+
618+
JavaPairRDD<String, String> readRDD = sc.wholeTextFiles(tempDirName);
619+
List<Tuple2<String, String>> result = readRDD.collect();
620+
621+
for (Tuple2<String, String> res : result) {
622+
Assert.assertEquals(res._2(), container.get(res._1()));
623+
}
624+
}
625+
602626
@Test
603627
public void textFilesCompressed() throws IOException {
604628
File tempDir = Files.createTempDir();

0 commit comments

Comments
 (0)