Skip to content

Commit 4324f70

Browse files
Fokkoiemejia
andcommitted
HIVE-21737: Make Avro use in Hive compatible with Avro 1.9.x
Co-Authored-By: Ismaël Mejía <iemejia@gmail.com>
1 parent a4c5ddd commit 4324f70

File tree

5 files changed

+17
-14
lines changed

5 files changed

+17
-14
lines changed

pom.xml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -502,7 +502,6 @@
502502
<dependency>
503503
<groupId>org.apache.avro</groupId>
504504
<artifactId>avro-mapred</artifactId>
505-
<classifier>hadoop2</classifier>
506505
<version>${avro.version}</version>
507506
<exclusions>
508507
<exclusion>

ql/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,7 @@
220220
<dependency>
221221
<groupId>org.apache.avro</groupId>
222222
<artifactId>avro-mapred</artifactId>
223-
<classifier>hadoop2</classifier>
223+
<version>${avro.version}</version>
224224
</dependency>
225225
<dependency>
226226
<groupId>org.apache.ant</groupId>

serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
278278

279279
int scale = 0;
280280
try {
281-
scale = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).asInt();
281+
scale = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_SCALE);
282282
} catch(Exception ex) {
283283
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + fileSchema, ex);
284284
}
@@ -294,7 +294,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
294294

295295
int maxLength = 0;
296296
try {
297-
maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
297+
maxLength = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
298298
} catch (Exception ex) {
299299
throw new AvroSerdeException("Failed to obtain maxLength value for char field from file schema: " + fileSchema, ex);
300300
}
@@ -309,7 +309,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
309309

310310
maxLength = 0;
311311
try {
312-
maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
312+
maxLength = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
313313
} catch (Exception ex) {
314314
throw new AvroSerdeException("Failed to obtain maxLength value for varchar field from file schema: " + fileSchema, ex);
315315
}

serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -136,8 +136,8 @@ public static TypeInfo generateTypeInfo(Schema schema,
136136
int precision = 0;
137137
int scale = 0;
138138
try {
139-
precision = schema.getJsonProp(AvroSerDe.AVRO_PROP_PRECISION).getIntValue();
140-
scale = schema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).getIntValue();
139+
precision = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_PRECISION);
140+
scale = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_SCALE);
141141
} catch (Exception ex) {
142142
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + schema, ex);
143143
}
@@ -155,7 +155,7 @@ public static TypeInfo generateTypeInfo(Schema schema,
155155
AvroSerDe.CHAR_TYPE_NAME.equalsIgnoreCase(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
156156
int maxLength = 0;
157157
try {
158-
maxLength = schema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
158+
maxLength = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
159159
} catch (Exception ex) {
160160
throw new AvroSerdeException("Failed to obtain maxLength value from file schema: " + schema, ex);
161161
}
@@ -166,7 +166,7 @@ public static TypeInfo generateTypeInfo(Schema schema,
166166
.equalsIgnoreCase(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
167167
int maxLength = 0;
168168
try {
169-
maxLength = schema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
169+
maxLength = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
170170
} catch (Exception ex) {
171171
throw new AvroSerdeException("Failed to obtain maxLength value from file schema: " + schema, ex);
172172
}

serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,6 @@
2828
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
2929
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
3030
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
31-
import org.codehaus.jackson.JsonNode;
32-
import org.codehaus.jackson.node.JsonNodeFactory;
3331

3432
import java.util.ArrayList;
3533
import java.util.Arrays;
@@ -42,6 +40,13 @@ public class TypeInfoToSchema {
4240

4341
private long recordCounter = 0;
4442

43+
/**
44+
* This constant value must be equal to org.apache.avro.Schema.Field.NULL_DEFAULT_VALUE
45+
* starting with Avro 1.9.x, but it is not available in Avro 1.8.x so we define it here
46+
* for backwards compatibility.
47+
*/
48+
private static final Object NULL_DEFAULT_VALUE = new Object();
49+
4550
/**
4651
* Converts Hive schema to avro schema
4752
*
@@ -235,14 +240,13 @@ private Schema createAvroArray(TypeInfo typeInfo) {
235240
private List<Schema.Field> getFields(Schema.Field schemaField) {
236241
List<Schema.Field> fields = new ArrayList<Schema.Field>();
237242

238-
JsonNode nullDefault = JsonNodeFactory.instance.nullNode();
239243
if (schemaField.schema().getType() == Schema.Type.RECORD) {
240244
for (Schema.Field field : schemaField.schema().getFields()) {
241-
fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), nullDefault));
245+
fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), NULL_DEFAULT_VALUE));
242246
}
243247
} else {
244248
fields.add(new Schema.Field(schemaField.name(), schemaField.schema(), schemaField.doc(),
245-
nullDefault));
249+
NULL_DEFAULT_VALUE));
246250
}
247251

248252
return fields;

0 commit comments

Comments
 (0)