Skip to content

Commit f1cc867

Browse files
dongjoon-hyuncloud-fan
authored andcommitted
[SPARK-31181][SQL][TESTS] Remove the default value assumption on CREATE TABLE test cases
### What changes were proposed in this pull request? A few `CREATE TABLE` test cases have some assumption on the default value of `LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED`. This PR (SPARK-31181) makes the test cases more explicit from test-case side. The configuration change was tested via #27894 during discussing SPARK-31136. This PR has only the test case part from that PR. ### Why are the changes needed? This makes our test case more robust in terms of the default value of `LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED`. Even in the case where we switch the conf value, that will be one-liner with no test case changes. ### Does this PR introduce any user-facing change? No. ### How was this patch tested? Pass the Jenkins with the existing tests. Closes #27946 from dongjoon-hyun/SPARK-EXPLICIT-TEST. Authored-by: Dongjoon Hyun <dongjoon@apache.org> Signed-off-by: Wenchen Fan <wenchen@databricks.com>
1 parent ca499e9 commit f1cc867

File tree

9 files changed

+29
-21
lines changed

9 files changed

+29
-21
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
2626
import org.apache.spark.sql.catalyst.plans.logical._
2727
import org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition.{after, first}
2828
import org.apache.spark.sql.connector.expressions.{ApplyTransform, BucketTransform, DaysTransform, FieldReference, HoursTransform, IdentityTransform, LiteralValue, MonthsTransform, Transform, YearsTransform}
29+
import org.apache.spark.sql.internal.SQLConf
2930
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructType, TimestampType}
3031
import org.apache.spark.unsafe.types.UTF8String
3132

@@ -2163,18 +2164,20 @@ class DDLParserSuite extends AnalysisTest {
21632164
}
21642165

21652166
test("create table - without using") {
2166-
val sql = "CREATE TABLE 1m.2g(a INT)"
2167-
val expectedTableSpec = TableSpec(
2168-
Seq("1m", "2g"),
2169-
Some(new StructType().add("a", IntegerType)),
2170-
Seq.empty[Transform],
2171-
None,
2172-
Map.empty[String, String],
2173-
None,
2174-
Map.empty[String, String],
2175-
None,
2176-
None)
2167+
withSQLConf(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key -> "false") {
2168+
val sql = "CREATE TABLE 1m.2g(a INT)"
2169+
val expectedTableSpec = TableSpec(
2170+
Seq("1m", "2g"),
2171+
Some(new StructType().add("a", IntegerType)),
2172+
Seq.empty[Transform],
2173+
None,
2174+
Map.empty[String, String],
2175+
None,
2176+
Map.empty[String, String],
2177+
None,
2178+
None)
21772179

2178-
testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false)
2180+
testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false)
2181+
}
21792182
}
21802183
}

sql/core/src/test/resources/sql-tests/inputs/describe-table-column.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ DROP TABLE desc_complex_col_table;
5252

5353
--Test case insensitive
5454

55-
CREATE TABLE customer(CName STRING);
55+
CREATE TABLE customer(CName STRING) USING PARQUET;
5656

5757
INSERT INTO customer VALUES('Maria');
5858

sql/core/src/test/resources/sql-tests/inputs/postgreSQL/create_view.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ DROP TABLE emp;
4141
-- These views are left around mainly to exercise special cases in pg_dump.
4242

4343
-- [SPARK-19842] Informational Referential Integrity Constraints Support in Spark
44-
CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20));
44+
CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20)) USING PARQUET;
4545
--
4646
CREATE VIEW key_dependent_view AS
4747
SELECT * FROM view_base_table GROUP BY key;

sql/core/src/test/resources/sql-tests/results/describe-table-column.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ struct<>
267267

268268

269269
-- !query
270-
CREATE TABLE customer(CName STRING)
270+
CREATE TABLE customer(CName STRING) USING PARQUET
271271
-- !query schema
272272
struct<>
273273
-- !query output

sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ struct<>
4242

4343

4444
-- !query
45-
CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20))
45+
CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20)) USING PARQUET
4646
-- !query schema
4747
struct<>
4848
-- !query output

sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,7 @@ class DataSourceV2SQLSuite
257257
}
258258

259259
test("CreateTable: without USING clause") {
260+
spark.conf.set(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key, "false")
260261
// unset this config to use the default v2 session catalog.
261262
spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key)
262263
val testCatalog = catalog("testcat").asTableCatalog
@@ -681,6 +682,7 @@ class DataSourceV2SQLSuite
681682
}
682683

683684
test("CreateTableAsSelect: without USING clause") {
685+
spark.conf.set(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key, "false")
684686
// unset this config to use the default v2 session catalog.
685687
spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key)
686688
val testCatalog = catalog("testcat").asTableCatalog

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,8 @@ import org.apache.spark.sql.test.SharedSparkSession
4040
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
4141

4242
class DDLParserSuite extends AnalysisTest with SharedSparkSession {
43-
private lazy val parser = new SparkSqlParser(new SQLConf)
43+
private lazy val parser = new SparkSqlParser(new SQLConf().copy(
44+
SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED -> false))
4445

4546
private def assertUnsupported(sql: String, containsThesePhrases: Seq[String] = Seq()): Unit = {
4647
val e = intercept[ParseException] {

sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -846,16 +846,16 @@ class InsertSuite extends DataSourceTest with SharedSparkSession {
846846
test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source") {
847847
withTempPath { dir =>
848848
val path = dir.toURI.getPath
849-
sql(s"""create table tab1 ( a int) location '$path'""")
849+
sql(s"""create table tab1 ( a int) using parquet location '$path'""")
850850
sql("insert into tab1 values(1)")
851851
checkAnswer(sql("select * from tab1"), Seq(1).map(i => Row(i)))
852-
sql("create table tab2 ( a int)")
852+
sql("create table tab2 ( a int) using parquet")
853853
sql("insert into tab2 values(2)")
854854
checkAnswer(sql("select * from tab2"), Seq(2).map(i => Row(i)))
855855
sql(s"""insert overwrite local directory '$path' using parquet select * from tab2""")
856856
sql("refresh table tab1")
857857
checkAnswer(sql("select * from tab1"), Seq(2).map(i => Row(i)))
858-
}
858+
}
859859
}
860860

861861
test("SPARK-29174 fail LOCAL in INSERT OVERWRITE DIRECT remote path") {

sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1520,10 +1520,12 @@ class StatisticsSuite extends StatisticsCollectionTestBase with TestHiveSingleto
15201520
val ext_tbl = "SPARK_30269_external"
15211521
withTempDir { dir =>
15221522
withTable(tbl, ext_tbl) {
1523-
sql(s"CREATE TABLE $tbl (key INT, value STRING, ds STRING) PARTITIONED BY (ds)")
1523+
sql(s"CREATE TABLE $tbl (key INT, value STRING, ds STRING)" +
1524+
"USING parquet PARTITIONED BY (ds)")
15241525
sql(
15251526
s"""
15261527
| CREATE TABLE $ext_tbl (key INT, value STRING, ds STRING)
1528+
| USING PARQUET
15271529
| PARTITIONED BY (ds)
15281530
| LOCATION '${dir.toURI}'
15291531
""".stripMargin)

0 commit comments

Comments
 (0)