Skip to content

Commit afc7da5

Browse files
committed
make getConvertedOI compatible between 0.12.0 and 0.13.1
1 parent b1527d5 commit afc7da5

File tree

3 files changed

+15
-5
lines changed

3 files changed

+15
-5
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -285,9 +285,9 @@ private[hive] object HadoopTableReader extends HiveInspectors {
285285
deserializer.getObjectInspector().asInstanceOf[StructObjectInspector]
286286
}
287287
else {
288-
ObjectInspectorConverters.getConvertedOI(
289-
deserializer.getObjectInspector(), convert.getObjectInspector(),
290-
new java.lang.Boolean(true)).asInstanceOf[StructObjectInspector]
288+
HiveShim.getConvertedOI(
289+
deserializer.getObjectInspector(),
290+
convert.getObjectInspector()).asInstanceOf[StructObjectInspector]
291291
}
292292
case None =>
293293
deserializer.getObjectInspector().asInstanceOf[StructObjectInspector]

sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim12.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.ql.plan.{CreateTableDesc, FileSinkDesc, TableDesc}
3434
import org.apache.hadoop.hive.ql.processors._
3535
import org.apache.hadoop.hive.ql.stats.StatsSetupConst
3636
import org.apache.hadoop.hive.serde2.{ColumnProjectionUtils, Deserializer, io => hiveIo}
37-
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, PrimitiveObjectInspector}
37+
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspectorConverters, ObjectInspector, PrimitiveObjectInspector}
3838
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory
3939
import org.apache.hadoop.hive.serde2.objectinspector.primitive.{HiveDecimalObjectInspector, PrimitiveObjectInspectorFactory}
4040
import org.apache.hadoop.hive.serde2.typeinfo.{TypeInfo, TypeInfoFactory}
@@ -241,6 +241,11 @@ private[hive] object HiveShim {
241241
Decimal(hdoi.getPrimitiveJavaObject(data).bigDecimalValue())
242242
}
243243
}
244+
245+
// make getConvertedOI compatible between 0.12.0 and 0.13.1
246+
def getConvertedOI(inputOI: ObjectInspector, outputOI: ObjectInspector): ObjectInspector = {
247+
ObjectInspectorConverters.getConvertedOI(inputOI, outputOI, new java.lang.Boolean(true))
248+
}
244249
}
245250

246251
class ShimFileSinkDesc(var dir: String, var tableInfo: TableDesc, var compressed: Boolean)

sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim13.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ import org.apache.hadoop.hive.ql.plan.{CreateTableDesc, FileSinkDesc, TableDesc}
3636
import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory
3737
import org.apache.hadoop.hive.serde2.typeinfo.{TypeInfo, DecimalTypeInfo, TypeInfoFactory}
3838
import org.apache.hadoop.hive.serde2.objectinspector.primitive.{HiveDecimalObjectInspector, PrimitiveObjectInspectorFactory}
39-
import org.apache.hadoop.hive.serde2.objectinspector.{PrimitiveObjectInspector, ObjectInspector}
39+
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspectorConverters, PrimitiveObjectInspector, ObjectInspector}
4040
import org.apache.hadoop.hive.serde2.{Deserializer, ColumnProjectionUtils}
4141
import org.apache.hadoop.hive.serde2.{io => hiveIo}
4242
import org.apache.hadoop.{io => hadoopIo}
@@ -395,6 +395,11 @@ private[hive] object HiveShim {
395395
Decimal(hdoi.getPrimitiveJavaObject(data).bigDecimalValue(), hdoi.precision(), hdoi.scale())
396396
}
397397
}
398+
399+
// make getConvertedOI compatible between 0.12.0 and 0.13.1
400+
def getConvertedOI(inputOI: ObjectInspector, outputOI: ObjectInspector): ObjectInspector = {
401+
ObjectInspectorConverters.getConvertedOI(inputOI, outputOI)
402+
}
398403
}
399404

400405
/*

0 commit comments

Comments
 (0)