-
Notifications
You must be signed in to change notification settings - Fork 28.6k
[SPARK-23348][SQL] append data using saveAsTable should adjust the data types #20527
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -37,6 +37,8 @@ import org.apache.spark.util.Utils | |
|
||
|
||
class InMemoryCatalogedDDLSuite extends DDLSuite with SharedSQLContext with BeforeAndAfterEach { | ||
import testImplicits._ | ||
|
||
override def afterEach(): Unit = { | ||
try { | ||
// drop all databases, tables and functions after each test | ||
|
@@ -132,6 +134,32 @@ class InMemoryCatalogedDDLSuite extends DDLSuite with SharedSQLContext with Befo | |
checkAnswer(spark.table("t"), Row(Row("a", 1)) :: Nil) | ||
} | ||
} | ||
|
||
// TODO: This test is copied from HiveDDLSuite, unify it later. | ||
test("SPARK-23348: append data to data source table with saveAsTable") { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Do we also want to cover the following case:
? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. maybe we can add this when unifying the test cases? |
||
withTable("t", "t1") { | ||
Seq(1 -> "a").toDF("i", "j").write.saveAsTable("t") | ||
checkAnswer(spark.table("t"), Row(1, "a")) | ||
|
||
sql("INSERT INTO t SELECT 2, 'b'") | ||
checkAnswer(spark.table("t"), Row(1, "a") :: Row(2, "b") :: Nil) | ||
|
||
Seq(3 -> "c").toDF("i", "j").write.mode("append").saveAsTable("t") | ||
checkAnswer(spark.table("t"), Row(1, "a") :: Row(2, "b") :: Row(3, "c") :: Nil) | ||
|
||
Seq("c" -> 3).toDF("i", "j").write.mode("append").saveAsTable("t") | ||
checkAnswer(spark.table("t"), Row(1, "a") :: Row(2, "b") :: Row(3, "c") | ||
:: Row(null, "3") :: Nil) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thank you for pining me, @cloud-fan . +1 for the patch, LGTM. |
||
|
||
Seq(4 -> "d").toDF("i", "j").write.saveAsTable("t1") | ||
|
||
val e = intercept[AnalysisException] { | ||
Seq(5 -> "e").toDF("i", "j").write.mode("append").format("json").saveAsTable("t1") | ||
} | ||
assert(e.message.contains("The format of the existing table default.t1 is " + | ||
"`ParquetFileFormat`. It doesn't match the specified format `JsonFileFormat`.")) | ||
} | ||
} | ||
} | ||
|
||
abstract class DDLSuite extends QueryTest with SQLTestUtils { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nit: don't need to copy the
newQuery
if it is the same asquery
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
it's also ok to always copy it and the code is neater.