Skip to content

Commit 1a68632

Browse files
committed
fix more
1 parent 7765675 commit 1a68632

File tree

3 files changed

+33
-25
lines changed

3 files changed

+33
-25
lines changed

sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,7 @@ class DataSourceV2SQLSuite
257257
}
258258

259259
test("CreateTable: without USING clause") {
260+
spark.conf.set(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key, "false")
260261
// unset this config to use the default v2 session catalog.
261262
spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key)
262263
val testCatalog = catalog("testcat").asTableCatalog
@@ -613,6 +614,7 @@ class DataSourceV2SQLSuite
613614
}
614615

615616
test("CreateTableAsSelect: without USING clause") {
617+
spark.conf.set(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key, "false")
616618
// unset this config to use the default v2 session catalog.
617619
spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key)
618620
val testCatalog = catalog("testcat").asTableCatalog

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -77,8 +77,10 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
7777

7878
private def withCreateTableStatement(sql: String)(prediction: CreateTableStatement => Unit)
7979
: Unit = {
80-
val statement = parser.parsePlan(sql).asInstanceOf[CreateTableStatement]
81-
prediction(statement)
80+
withSQLConf(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key -> "false") {
81+
val statement = parser.parsePlan(sql).asInstanceOf[CreateTableStatement]
82+
prediction(statement)
83+
}
8284
}
8385

8486
test("alter database - property values must be set") {
@@ -485,18 +487,20 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
485487
}
486488

487489
test("Test CTAS #3") {
488-
val s3 = """CREATE TABLE page_view AS SELECT * FROM src"""
489-
val statement = parser.parsePlan(s3).asInstanceOf[CreateTableAsSelectStatement]
490-
assert(statement.tableName(0) == "page_view")
491-
assert(statement.asSelect == parser.parsePlan("SELECT * FROM src"))
492-
assert(statement.partitioning.isEmpty)
493-
assert(statement.bucketSpec.isEmpty)
494-
assert(statement.properties.isEmpty)
495-
assert(statement.provider.isEmpty)
496-
assert(statement.options.isEmpty)
497-
assert(statement.location.isEmpty)
498-
assert(statement.comment.isEmpty)
499-
assert(!statement.ifNotExists)
490+
withSQLConf(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key -> "false") {
491+
val s3 = """CREATE TABLE page_view AS SELECT * FROM src"""
492+
val statement = parser.parsePlan(s3).asInstanceOf[CreateTableAsSelectStatement]
493+
assert(statement.tableName(0) == "page_view")
494+
assert(statement.asSelect == parser.parsePlan("SELECT * FROM src"))
495+
assert(statement.partitioning.isEmpty)
496+
assert(statement.bucketSpec.isEmpty)
497+
assert(statement.properties.isEmpty)
498+
assert(statement.provider.isEmpty)
499+
assert(statement.options.isEmpty)
500+
assert(statement.location.isEmpty)
501+
assert(statement.comment.isEmpty)
502+
assert(!statement.ifNotExists)
503+
}
500504
}
501505

502506
test("Test CTAS #4") {

sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -844,18 +844,20 @@ class InsertSuite extends DataSourceTest with SharedSparkSession {
844844
}
845845

846846
test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source") {
847-
withTempPath { dir =>
848-
val path = dir.toURI.getPath
849-
sql(s"""create table tab1 ( a int) location '$path'""")
850-
sql("insert into tab1 values(1)")
851-
checkAnswer(sql("select * from tab1"), Seq(1).map(i => Row(i)))
852-
sql("create table tab2 ( a int)")
853-
sql("insert into tab2 values(2)")
854-
checkAnswer(sql("select * from tab2"), Seq(2).map(i => Row(i)))
855-
sql(s"""insert overwrite local directory '$path' using parquet select * from tab2""")
856-
sql("refresh table tab1")
857-
checkAnswer(sql("select * from tab1"), Seq(2).map(i => Row(i)))
847+
withSQLConf(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key -> "false") {
848+
withTempPath { dir =>
849+
val path = dir.toURI.getPath
850+
sql(s"""create table tab1 ( a int) location '$path'""")
851+
sql("insert into tab1 values(1)")
852+
checkAnswer(sql("select * from tab1"), Seq(1).map(i => Row(i)))
853+
sql("create table tab2 ( a int)")
854+
sql("insert into tab2 values(2)")
855+
checkAnswer(sql("select * from tab2"), Seq(2).map(i => Row(i)))
856+
sql(s"""insert overwrite local directory '$path' using parquet select * from tab2""")
857+
sql("refresh table tab1")
858+
checkAnswer(sql("select * from tab1"), Seq(2).map(i => Row(i)))
858859
}
860+
}
859861
}
860862

861863
test("SPARK-29174 fail LOCAL in INSERT OVERWRITE DIRECT remote path") {

0 commit comments

Comments
 (0)