Skip to content

Commit a510a23

Browse files
committed
reset origin classloader
1 parent fb39a53 commit a510a23

File tree

1 file changed

+9
-2
lines changed

1 file changed

+9
-2
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScanExec.scala

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.serde.serdeConstants
2727
import org.apache.hadoop.hive.serde2.objectinspector._
2828
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption
2929
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils
30+
import org.apache.hadoop.hive.serde2.Deserializer
3031

3132
import org.apache.spark.rdd.RDD
3233
import org.apache.spark.sql.SparkSession
@@ -122,12 +123,18 @@ case class HiveTableScanExec(
122123
HiveShim.appendReadColumns(hiveConf, neededColumnIDs, output.map(_.name))
123124

124125
val currentState = SessionState.get()
125-
if (currentState != null) {
126+
val deserializer = if (currentState != null) {
127+
val originClassLoader = currentState.getConf.getClassLoader
126128
currentState.getConf.setClassLoader(Thread.currentThread().getContextClassLoader)
129+
val instance = tableDesc.getDeserializerClass.getConstructor().newInstance()
130+
currentState.getConf.setClassLoader(originClassLoader)
131+
instance
132+
} else {
133+
tableDesc.getDeserializerClass.getConstructor().newInstance()
127134
}
128-
val deserializer = tableDesc.getDeserializerClass.getConstructor().newInstance()
129135
deserializer.initialize(hiveConf, tableDesc.getProperties)
130136

137+
131138
// Specifies types and object inspectors of columns to be scanned.
132139
val structOI = ObjectInspectorUtils
133140
.getStandardObjectInspector(

0 commit comments

Comments
 (0)