Skip to content

Commit c42f9f6

Browse files
committed
[SPARK-31181][SQL][TESTS] Remove the default value assumption on CREATE TABLE test cases
A few `CREATE TABLE` test cases have some assumption on the default value of `LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED`. This PR (SPARK-31181) makes the test cases more explicit from test-case side. The configuration change was tested via #27894 during discussing SPARK-31136. This PR has only the test case part from that PR. This makes our test case more robust in terms of the default value of `LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED`. Even in the case where we switch the conf value, that will be one-liner with no test case changes. No. Pass the Jenkins with the existing tests. Closes #27946 from dongjoon-hyun/SPARK-EXPLICIT-TEST. Authored-by: Dongjoon Hyun <dongjoon@apache.org> Signed-off-by: Wenchen Fan <wenchen@databricks.com> (cherry picked from commit f1cc867) Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
1 parent 7d4c1b8 commit c42f9f6

File tree

8 files changed

+26
-18
lines changed

8 files changed

+26
-18
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
2626
import org.apache.spark.sql.catalyst.plans.logical._
2727
import org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition.{after, first}
2828
import org.apache.spark.sql.connector.expressions.{ApplyTransform, BucketTransform, DaysTransform, FieldReference, HoursTransform, IdentityTransform, LiteralValue, MonthsTransform, Transform, YearsTransform}
29+
import org.apache.spark.sql.internal.SQLConf
2930
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructType, TimestampType}
3031
import org.apache.spark.unsafe.types.UTF8String
3132

@@ -2117,18 +2118,20 @@ class DDLParserSuite extends AnalysisTest {
21172118
}
21182119

21192120
test("create table - without using") {
2120-
val sql = "CREATE TABLE 1m.2g(a INT)"
2121-
val expectedTableSpec = TableSpec(
2122-
Seq("1m", "2g"),
2123-
Some(new StructType().add("a", IntegerType)),
2124-
Seq.empty[Transform],
2125-
None,
2126-
Map.empty[String, String],
2127-
None,
2128-
Map.empty[String, String],
2129-
None,
2130-
None)
2121+
withSQLConf(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key -> "false") {
2122+
val sql = "CREATE TABLE 1m.2g(a INT)"
2123+
val expectedTableSpec = TableSpec(
2124+
Seq("1m", "2g"),
2125+
Some(new StructType().add("a", IntegerType)),
2126+
Seq.empty[Transform],
2127+
None,
2128+
Map.empty[String, String],
2129+
None,
2130+
Map.empty[String, String],
2131+
None,
2132+
None)
21312133

2132-
testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false)
2134+
testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false)
2135+
}
21332136
}
21342137
}

sql/core/src/test/resources/sql-tests/inputs/describe-table-column.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ DROP TABLE desc_complex_col_table;
5252

5353
--Test case insensitive
5454

55-
CREATE TABLE customer(CName STRING);
55+
CREATE TABLE customer(CName STRING) USING PARQUET;
5656

5757
INSERT INTO customer VALUES('Maria');
5858

sql/core/src/test/resources/sql-tests/inputs/postgreSQL/create_view.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ DROP TABLE emp;
4141
-- These views are left around mainly to exercise special cases in pg_dump.
4242

4343
-- [SPARK-19842] Informational Referential Integrity Constraints Support in Spark
44-
CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20));
44+
CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20)) USING PARQUET;
4545
--
4646
CREATE VIEW key_dependent_view AS
4747
SELECT * FROM view_base_table GROUP BY key;

sql/core/src/test/resources/sql-tests/results/describe-table-column.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ struct<>
267267

268268

269269
-- !query
270-
CREATE TABLE customer(CName STRING)
270+
CREATE TABLE customer(CName STRING) USING PARQUET
271271
-- !query schema
272272
struct<>
273273
-- !query output

sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ struct<>
4242

4343

4444
-- !query
45-
CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20))
45+
CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20)) USING PARQUET
4646
-- !query schema
4747
struct<>
4848
-- !query output

sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,7 @@ class DataSourceV2SQLSuite
257257
}
258258

259259
test("CreateTable: without USING clause") {
260+
spark.conf.set(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key, "false")
260261
// unset this config to use the default v2 session catalog.
261262
spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key)
262263
val testCatalog = catalog("testcat").asTableCatalog
@@ -681,6 +682,7 @@ class DataSourceV2SQLSuite
681682
}
682683

683684
test("CreateTableAsSelect: without USING clause") {
685+
spark.conf.set(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key, "false")
684686
// unset this config to use the default v2 session catalog.
685687
spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key)
686688
val testCatalog = catalog("testcat").asTableCatalog

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,8 @@ import org.apache.spark.sql.test.SharedSparkSession
4040
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
4141

4242
class DDLParserSuite extends AnalysisTest with SharedSparkSession {
43-
private lazy val parser = new SparkSqlParser(new SQLConf)
43+
private lazy val parser = new SparkSqlParser(new SQLConf().copy(
44+
SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED -> false))
4445

4546
private def assertUnsupported(sql: String, containsThesePhrases: Seq[String] = Seq()): Unit = {
4647
val e = intercept[ParseException] {

sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1520,10 +1520,12 @@ class StatisticsSuite extends StatisticsCollectionTestBase with TestHiveSingleto
15201520
val ext_tbl = "SPARK_30269_external"
15211521
withTempDir { dir =>
15221522
withTable(tbl, ext_tbl) {
1523-
sql(s"CREATE TABLE $tbl (key INT, value STRING, ds STRING) PARTITIONED BY (ds)")
1523+
sql(s"CREATE TABLE $tbl (key INT, value STRING, ds STRING)" +
1524+
"USING parquet PARTITIONED BY (ds)")
15241525
sql(
15251526
s"""
15261527
| CREATE TABLE $ext_tbl (key INT, value STRING, ds STRING)
1528+
| USING PARQUET
15271529
| PARTITIONED BY (ds)
15281530
| LOCATION '${dir.toURI}'
15291531
""".stripMargin)

0 commit comments

Comments
 (0)