Skip to content

Commit 1f26805

Browse files
SPARK-3807: SparkSql does not work for tables created using custom serde (Incorporated Review Comments)
1 parent ba4bc0c commit 1f26805

File tree

1 file changed

+4
-5
lines changed

1 file changed

+4
-5
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -80,15 +80,14 @@ case class HiveTableScan(
8080
ColumnProjectionUtils.appendReadColumnIDs(hiveConf, neededColumnIDs)
8181
ColumnProjectionUtils.appendReadColumnNames(hiveConf, attributes.map(_.name))
8282

83-
val td = relation.tableDesc
84-
val deClass = td.getDeserializerClass;
85-
val de = deClass.newInstance();
86-
de.initialize(hiveConf, td.getProperties);
83+
val tableDesc = relation.tableDesc
84+
val deserializer = tableDesc.getDeserializerClass.newInstance
85+
deserializer.initialize(hiveConf, tableDesc.getProperties)
8786

8887
// Specifies types and object inspectors of columns to be scanned.
8988
val structOI = ObjectInspectorUtils
9089
.getStandardObjectInspector(
91-
de.getObjectInspector,
90+
deserializer.getObjectInspector,
9291
ObjectInspectorCopyOption.JAVA)
9392
.asInstanceOf[StructObjectInspector]
9493

0 commit comments

Comments
 (0)