Skip to content

Commit 3b22291

Browse files
dongjoon-hyunrxin
authored andcommitted
[SPARK-16387][SQL] JDBC Writer should use dialect to quote field names.
## What changes were proposed in this pull request? Currently, JDBC Writer uses dialects to get datatypes, but doesn't to quote field names. This PR uses dialects to quote the field names, too. **Reported Error Scenario (MySQL case)** ```scala scala> val url="jdbc:mysql://localhost:3306/temp" scala> val prop = new java.util.Properties scala> prop.setProperty("user","root") scala> spark.createDataset(Seq("a","b","c")).toDF("order") scala> df.write.mode("overwrite").jdbc(url, "temptable", prop) ...MySQLSyntaxErrorException: ... near 'order TEXT ) ``` ## How was this patch tested? Pass the Jenkins tests and manually do the above case. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #14107 from dongjoon-hyun/SPARK-16387.
1 parent 60ba436 commit 3b22291

File tree

2 files changed

+11
-4
lines changed

2 files changed

+11
-4
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,9 @@ object JdbcUtils extends Logging {
100100
/**
101101
* Returns a PreparedStatement that inserts a row into table via conn.
102102
*/
103-
def insertStatement(conn: Connection, table: String, rddSchema: StructType): PreparedStatement = {
104-
val columns = rddSchema.fields.map(_.name).mkString(",")
103+
def insertStatement(conn: Connection, table: String, rddSchema: StructType, dialect: JdbcDialect)
104+
: PreparedStatement = {
105+
val columns = rddSchema.fields.map(x => dialect.quoteIdentifier(x.name)).mkString(",")
105106
val placeholders = rddSchema.fields.map(_ => "?").mkString(",")
106107
val sql = s"INSERT INTO $table ($columns) VALUES ($placeholders)"
107108
conn.prepareStatement(sql)
@@ -177,7 +178,7 @@ object JdbcUtils extends Logging {
177178
if (supportsTransactions) {
178179
conn.setAutoCommit(false) // Everything in the same db transaction.
179180
}
180-
val stmt = insertStatement(conn, table, rddSchema)
181+
val stmt = insertStatement(conn, table, rddSchema, dialect)
181182
try {
182183
var rowCount = 0
183184
while (iterator.hasNext) {
@@ -260,7 +261,7 @@ object JdbcUtils extends Logging {
260261
val sb = new StringBuilder()
261262
val dialect = JdbcDialects.get(url)
262263
df.schema.fields foreach { field =>
263-
val name = field.name
264+
val name = dialect.quoteIdentifier(field.name)
264265
val typ: String = getJdbcType(field.dataType, dialect).databaseTypeDefinition
265266
val nullable = if (field.nullable) "" else "NOT NULL"
266267
sb.append(s", $name $typ $nullable")

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -764,4 +764,10 @@ class JDBCSuite extends SparkFunSuite
764764
assertEmptyQuery(s"SELECT * FROM tempFrame where $FALSE2")
765765
}
766766
}
767+
768+
test("SPARK-16387: Reserved SQL words are not escaped by JDBC writer") {
769+
val df = spark.createDataset(Seq("a", "b", "c")).toDF("order")
770+
val schema = JdbcUtils.schemaString(df, "jdbc:mysql://localhost:3306/temp")
771+
assert(schema.contains("`order` TEXT"))
772+
}
767773
}

0 commit comments

Comments
 (0)