Skip to content

Commit 60b8e43

Browse files
committed
Create FileBasedDataSourceSuite.
1 parent f9a35f1 commit 60b8e43

File tree

3 files changed

+67
-42
lines changed

3 files changed

+67
-42
lines changed
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark
19+
20+
import org.apache.spark.sql.QueryTest
21+
import org.apache.spark.sql.test.SharedSQLContext
22+
23+
class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext {
24+
import testImplicits._
25+
26+
Seq("orc", "parquet", "csv", "json", "text").foreach { format =>
27+
test(s"Writing empty datasets should not fail - $format") {
28+
withTempDir { dir =>
29+
Seq("str").toDS.limit(0).write.format(format).save(dir.getCanonicalPath + "/tmp")
30+
}
31+
}
32+
}
33+
34+
Seq("orc", "parquet", "csv", "json").foreach { format =>
35+
test(s"Write and read back unicode schema - $format") {
36+
withTempPath { path =>
37+
val dir = path.getCanonicalPath
38+
39+
// scalastyle:off nonascii
40+
val df = Seq("a").toDF("한글")
41+
// scalastyle:on nonascii
42+
43+
df.write.format(format).option("header", "true").save(dir)
44+
val answerDf = spark.read.format(format).option("header", "true").load(dir)
45+
46+
assert(df.schema === answerDf.schema)
47+
checkAnswer(df, answerDf)
48+
}
49+
}
50+
}
51+
52+
// Only New OrcFileFormat supports this
53+
Seq(classOf[org.apache.spark.sql.execution.datasources.orc.OrcFileFormat].getCanonicalName,
54+
"parquet").foreach { format =>
55+
test(s"SPARK-15474 Write and read back non-emtpy schema with empty dataframe - $format") {
56+
withTempPath { file =>
57+
val path = file.getCanonicalPath
58+
val emptyDf = Seq((true, 1, "str")).toDF.limit(0)
59+
emptyDf.write.format(format).save(path)
60+
61+
val df = spark.read.format(format).load(path)
62+
assert(df.schema.sameType(emptyDf.schema))
63+
checkAnswer(df, emptyDf)
64+
}
65+
}
66+
}
67+
}

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 0 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -2757,38 +2757,4 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
27572757
}
27582758
}
27592759
}
2760-
2761-
// Only New OrcFileFormat supports this
2762-
Seq(classOf[org.apache.spark.sql.execution.datasources.orc.OrcFileFormat].getCanonicalName,
2763-
"parquet").foreach { format =>
2764-
test(s"SPARK-15474 Write and read back non-emtpy schema with empty dataframe - $format") {
2765-
withTempPath { file =>
2766-
val path = file.getCanonicalPath
2767-
val emptyDf = Seq((true, 1, "str")).toDF.limit(0)
2768-
emptyDf.write.format(format).save(path)
2769-
2770-
val df = spark.read.format(format).load(path)
2771-
assert(df.schema.sameType(emptyDf.schema))
2772-
checkAnswer(df, emptyDf)
2773-
}
2774-
}
2775-
}
2776-
2777-
Seq("orc", "parquet", "csv", "json").foreach { format =>
2778-
test(s"Write and read back unicode schema - $format") {
2779-
withTempPath { path =>
2780-
val dir = path.getCanonicalPath
2781-
2782-
// scalastyle:off nonascii
2783-
val df = Seq("a").toDF("한글")
2784-
// scalastyle:on nonascii
2785-
2786-
df.write.format(format).option("header", "true").save(dir)
2787-
val answerDf = spark.read.format(format).option("header", "true").load(dir)
2788-
2789-
assert(df.schema === answerDf.schema)
2790-
checkAnswer(df, answerDf)
2791-
}
2792-
}
2793-
}
27942760
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2159,12 +2159,4 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
21592159
}
21602160
}
21612161
}
2162-
2163-
Seq("orc", "parquet", "csv", "json", "text").foreach { format =>
2164-
test(s"Writing empty datasets should not fail - $format") {
2165-
withTempDir { dir =>
2166-
Seq("str").toDS.limit(0).write.format(format).save(dir.getCanonicalPath + "/tmp")
2167-
}
2168-
}
2169-
}
21702162
}

0 commit comments

Comments
 (0)