Skip to content

Commit 1cc2b34

Browse files
committed
address comments
1 parent c6ab192 commit 1cc2b34

File tree

10 files changed

+26
-37
lines changed

10 files changed

+26
-37
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonGenerator.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,7 @@ private[sql] class JacksonGenerator(
178178
var i = 0
179179
while (i < row.numFields) {
180180
val field = schema(i)
181-
if (!row.isNullAt(i) || field.dataType == NullType) {
181+
if (!row.isNullAt(i)) {
182182
gen.writeFieldName(field.name)
183183
fieldWriters(i).apply(row, i)
184184
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ object DataSourceUtils {
4545
*/
4646
private def verifySchema(format: FileFormat, schema: StructType, isReadPath: Boolean): Unit = {
4747
schema.foreach { field =>
48-
if (!format.supportsDataType(field.dataType)) {
48+
if (!format.supportDataType(field.dataType)) {
4949
throw new AnalysisException(
5050
s"$format data source does not support ${field.dataType.catalogString} data type.")
5151
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileFormat.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ trait FileFormat {
156156
* Returns whether this format supports the given [[DataType]] in read/write path.
157157
* By default all data types are supported.
158158
*/
159-
def supportsDataType(dataType: DataType): Boolean = true
159+
def supportDataType(dataType: DataType): Boolean = true
160160
}
161161

162162
/**

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVFileFormat.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -153,10 +153,10 @@ class CSVFileFormat extends TextBasedFileFormat with DataSourceRegister {
153153

154154
override def equals(other: Any): Boolean = other.isInstanceOf[CSVFileFormat]
155155

156-
override def supportsDataType(dataType: DataType): Boolean = dataType match {
156+
override def supportDataType(dataType: DataType): Boolean = dataType match {
157157
case _: AtomicType => true
158158

159-
case udt: UserDefinedType[_] => supportsDataType(udt.sqlType)
159+
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
160160

161161
case _ => false
162162
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -140,17 +140,17 @@ class JsonFileFormat extends TextBasedFileFormat with DataSourceRegister {
140140

141141
override def equals(other: Any): Boolean = other.isInstanceOf[JsonFileFormat]
142142

143-
override def supportsDataType(dataType: DataType): Boolean = dataType match {
143+
override def supportDataType(dataType: DataType): Boolean = dataType match {
144144
case _: AtomicType => true
145145

146-
case st: StructType => st.forall { f => supportsDataType(f.dataType) }
146+
case st: StructType => st.forall { f => supportDataType(f.dataType) }
147147

148-
case ArrayType(elementType, _) => supportsDataType(elementType)
148+
case ArrayType(elementType, _) => supportDataType(elementType)
149149

150150
case MapType(keyType, valueType, _) =>
151-
supportsDataType(keyType) && supportsDataType(valueType)
151+
supportDataType(keyType) && supportDataType(valueType)
152152

153-
case udt: UserDefinedType[_] => supportsDataType(udt.sqlType)
153+
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
154154

155155
case _: NullType => true
156156

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFileFormat.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -235,17 +235,17 @@ class OrcFileFormat
235235
}
236236
}
237237

238-
override def supportsDataType(dataType: DataType): Boolean = dataType match {
238+
override def supportDataType(dataType: DataType): Boolean = dataType match {
239239
case _: AtomicType => true
240240

241-
case st: StructType => st.forall { f => supportsDataType(f.dataType) }
241+
case st: StructType => st.forall { f => supportDataType(f.dataType) }
242242

243-
case ArrayType(elementType, _) => supportsDataType(elementType)
243+
case ArrayType(elementType, _) => supportDataType(elementType)
244244

245245
case MapType(keyType, valueType, _) =>
246-
supportsDataType(keyType) && supportsDataType(valueType)
246+
supportDataType(keyType) && supportDataType(valueType)
247247

248-
case udt: UserDefinedType[_] => supportsDataType(udt.sqlType)
248+
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
249249

250250
case _ => false
251251
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -453,17 +453,17 @@ class ParquetFileFormat
453453
}
454454
}
455455

456-
override def supportsDataType(dataType: DataType): Boolean = dataType match {
456+
override def supportDataType(dataType: DataType): Boolean = dataType match {
457457
case _: AtomicType => true
458458

459-
case st: StructType => st.forall { f => supportsDataType(f.dataType) }
459+
case st: StructType => st.forall { f => supportDataType(f.dataType) }
460460

461-
case ArrayType(elementType, _) => supportsDataType(elementType)
461+
case ArrayType(elementType, _) => supportDataType(elementType)
462462

463463
case MapType(keyType, valueType, _) =>
464-
supportsDataType(keyType) && supportsDataType(valueType)
464+
supportDataType(keyType) && supportDataType(valueType)
465465

466-
case udt: UserDefinedType[_] => supportsDataType(udt.sqlType)
466+
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
467467

468468
case _ => false
469469
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/text/TextFileFormat.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ class TextFileFormat extends TextBasedFileFormat with DataSourceRegister {
139139
}
140140
}
141141

142-
override def supportsDataType(dataType: DataType): Boolean =
142+
override def supportDataType(dataType: DataType): Boolean =
143143
dataType == StringType
144144
}
145145

sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -366,17 +366,6 @@ class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext with Befo
366366
}
367367
}
368368

369-
test("SPARK-26716 supports writing and reading Null data type - json") {
370-
withTempPath { dir =>
371-
val df = spark.range(10).map(id => (id, null)).toDF("c1", "c2")
372-
df.write.json(dir.getCanonicalPath)
373-
checkAnswer(spark.read.json(dir.getCanonicalPath), df)
374-
val schema =
375-
StructType(Seq(StructField("c1", LongType, true), StructField("c2", NullType, true)))
376-
checkAnswer(spark.read.schema(schema).json(dir.getCanonicalPath), df)
377-
}
378-
}
379-
380369
test("SPARK-24204 error handling for unsupported Null data types - csv, parquet, orc") {
381370
withSQLConf(SQLConf.USE_V1_SOURCE_READER_LIST.key -> "orc") {
382371
withTempDir { dir =>

sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -181,17 +181,17 @@ class OrcFileFormat extends FileFormat with DataSourceRegister with Serializable
181181
}
182182
}
183183

184-
override def supportsDataType(dataType: DataType): Boolean = dataType match {
184+
override def supportDataType(dataType: DataType): Boolean = dataType match {
185185
case _: AtomicType => true
186186

187-
case st: StructType => st.forall { f => supportsDataType(f.dataType) }
187+
case st: StructType => st.forall { f => supportDataType(f.dataType) }
188188

189-
case ArrayType(elementType, _) => supportsDataType(elementType)
189+
case ArrayType(elementType, _) => supportDataType(elementType)
190190

191191
case MapType(keyType, valueType, _) =>
192-
supportsDataType(keyType) && supportsDataType(valueType)
192+
supportDataType(keyType) && supportDataType(valueType)
193193

194-
case udt: UserDefinedType[_] => supportsDataType(udt.sqlType)
194+
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
195195

196196
case _ => false
197197
}

0 commit comments

Comments
 (0)