17
17
18
18
package org .apache .spark .sql .parquet
19
19
20
+ import java .io .File
21
+
20
22
import org .scalatest .{BeforeAndAfterAll , FunSuiteLike }
21
23
22
24
import org .apache .avro .{SchemaBuilder , Schema }
@@ -31,18 +33,13 @@ import parquet.hadoop.util.ContextUtil
31
33
import parquet .schema .MessageTypeParser
32
34
33
35
import org .apache .spark .sql ._
34
- import org .apache .spark .sql .catalyst .util .getTempFilePath
35
- import org .apache .spark .sql .test .TestSQLContext
36
- import org .apache .spark .sql .TestData
37
- import org .apache .spark .sql .SchemaRDD
38
36
import org .apache .spark .sql .catalyst .expressions ._
39
37
import org .apache .spark .sql .catalyst .types .IntegerType
40
- import org .apache .spark .util .Utils
38
+ import org .apache .spark .sql .test .TestSQLContext
39
+ import org .apache .spark .sql .TestData
41
40
import org .apache .spark .sql .SchemaRDD
42
41
import org .apache .spark .sql .catalyst .util .getTempFilePath
43
42
import org .apache .spark .sql .catalyst .expressions .Row
44
- import org .apache .spark .sql .catalyst .types .{StringType , IntegerType , DataType }
45
- import org .apache .spark .sql .catalyst .expressions .AttributeReference
46
43
import org .apache .spark .util .Utils
47
44
48
45
// Implicits
@@ -557,6 +554,7 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
557
554
// has no effect in this test case
558
555
implicit def anyToRow (value : Any ): Row = value.asInstanceOf [Row ]
559
556
val tmpdir = Utils .createTempDir()
557
+ Utils .deleteRecursively(tmpdir)
560
558
val result = TestSQLContext
561
559
.parquetFile(ParquetTestData .testNestedDir1.toString)
562
560
.toSchemaRDD
@@ -581,6 +579,7 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
581
579
.parquetFile(ParquetTestData .testNestedDir4.toString)
582
580
.toSchemaRDD
583
581
val tmpdir = Utils .createTempDir()
582
+ Utils .deleteRecursively(tmpdir)
584
583
data.saveAsParquetFile(tmpdir.toString)
585
584
TestSQLContext
586
585
.parquetFile(tmpdir.toString)
@@ -744,18 +743,4 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
744
743
assert(mapResult2(0 ) === 1.1f )
745
744
assert(mapResult2(2 ) === 1.3f )
746
745
}
747
-
748
- /**
749
- * Creates an empty SchemaRDD backed by a ParquetRelation.
750
- *
751
- * TODO: since this is so experimental it is better to have it here and not
752
- * in SQLContext. Also note that when creating new AttributeReferences
753
- * one needs to take care not to create duplicate Attribute ID's.
754
- */
755
- private def createParquetFile (path : String , schema : (Tuple2 [String , DataType ])* ): SchemaRDD = {
756
- val attributes = schema.map(t => new AttributeReference (t._1, t._2)())
757
- new SchemaRDD (
758
- TestSQLContext ,
759
- ParquetRelation .createEmpty(path, attributes, sparkContext.hadoopConfiguration))
760
- }
761
746
}
0 commit comments