1
+ /*
2
+ * Licensed to the Apache Software Foundation (ASF) under one or more
3
+ * contributor license agreements. See the NOTICE file distributed with
4
+ * this work for additional information regarding copyright ownership.
5
+ * The ASF licenses this file to You under the Apache License, Version 2.0
6
+ * (the "License"); you may not use this file except in compliance with
7
+ * the License. You may obtain a copy of the License at
8
+ *
9
+ * http://www.apache.org/licenses/LICENSE-2.0
10
+ *
11
+ * Unless required by applicable law or agreed to in writing, software
12
+ * distributed under the License is distributed on an "AS IS" BASIS,
13
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ * See the License for the specific language governing permissions and
15
+ * limitations under the License.
16
+ */
17
+
18
+ package org .apache .spark .sql .hive .parquet
19
+
20
+ import java .util .Properties
21
+
22
+ import org .apache .hadoop .conf .Configuration
23
+ import org .apache .hadoop .hive .serde2 .objectinspector .ObjectInspector .Category
24
+ import org .apache .hadoop .hive .serde2 .{SerDeStats , SerDe }
25
+ import org .apache .hadoop .hive .serde2 .objectinspector .ObjectInspector
26
+ import org .apache .hadoop .io .Writable
27
+
28
+ /**
29
+ * A placeholder that allows SparkSQL users to create metastore tables that are stored as
30
+ * parquet files. It is only intended to pass the checks that the serde is valid and exists
31
+ * when a CREATE TABLE is run. The actual work of decoding will be done by ParquetTableScan
32
+ * when "spark.sql.hive.convertMetastoreParquet" is set to true.
33
+ */
34
+ @ deprecated(" No code should depend on FakeParquetHiveSerDe as it is only intended as a " +
35
+ " placeholder in the Hive MetaStore" )
36
+ class FakeParquetSerDe extends SerDe {
37
+ override def getObjectInspector : ObjectInspector = new ObjectInspector {
38
+ override def getCategory : Category = Category .PRIMITIVE
39
+
40
+ override def getTypeName : String = " string"
41
+ }
42
+
43
+ override def deserialize (p1 : Writable ): AnyRef = throwError
44
+
45
+ override def initialize (p1 : Configuration , p2 : Properties ): Unit = {}
46
+
47
+ override def getSerializedClass : Class [_ <: Writable ] = throwError
48
+
49
+ override def getSerDeStats : SerDeStats = throwError
50
+
51
+ override def serialize (p1 : scala.Any , p2 : ObjectInspector ): Writable = throwError
52
+
53
+ private def throwError =
54
+ sys.error(
55
+ " spark.sql.hive.convertMetastoreParquet must be set to true to use FakeParquetSerDe" )
56
+ }
0 commit comments