39
39
40
40
HAS_FAST = False
41
41
try :
42
- from fastavro import schemaless_reader
42
+ from fastavro import schemaless_reader , schemaless_writer
43
43
44
44
HAS_FAST = True
45
45
except ImportError :
@@ -75,9 +75,13 @@ def __init__(self, registry_client, reader_key_schema=None, reader_value_schema=
75
75
self .reader_key_schema = reader_key_schema
76
76
self .reader_value_schema = reader_value_schema
77
77
78
- '''
79
-
80
- '''
78
+ # Encoder support
79
+ def _get_encoder_func (self , writer_schema ):
80
+ if HAS_FAST :
81
+ schema = writer_schema .to_json ()
82
+ return lambda record , fp : schemaless_writer (fp , schema , record )
83
+ writer = avro .io .DatumWriter (writer_schema )
84
+ return lambda record , fp : writer .write (record , avro .io .BinaryEncoder (fp ))
81
85
82
86
def encode_record_with_schema (self , topic , schema , record , is_key = False ):
83
87
"""
@@ -103,7 +107,7 @@ def encode_record_with_schema(self, topic, schema, record, is_key=False):
103
107
raise serialize_err (message )
104
108
105
109
# cache writer
106
- self .id_to_writers [schema_id ] = avro . io . DatumWriter (schema )
110
+ self .id_to_writers [schema_id ] = self . _get_encoder_func (schema )
107
111
108
112
return self .encode_record_with_schema_id (schema_id , record , is_key = is_key )
109
113
@@ -126,7 +130,7 @@ def encode_record_with_schema_id(self, schema_id, record, is_key=False):
126
130
schema = self .registry_client .get_by_id (schema_id )
127
131
if not schema :
128
132
raise serialize_err ("Schema does not exist" )
129
- self .id_to_writers [schema_id ] = avro . io . DatumWriter (schema )
133
+ self .id_to_writers [schema_id ] = self . _get_encoder_func (schema )
130
134
except ClientError :
131
135
exc_type , exc_value , exc_traceback = sys .exc_info ()
132
136
raise serialize_err (repr (traceback .format_exception (exc_type , exc_value , exc_traceback )))
@@ -136,19 +140,14 @@ def encode_record_with_schema_id(self, schema_id, record, is_key=False):
136
140
with ContextStringIO () as outf :
137
141
# write the header
138
142
# magic byte
139
-
140
143
outf .write (struct .pack ('b' , MAGIC_BYTE ))
141
144
142
145
# write the schema ID in network byte order (big end)
143
-
144
146
outf .write (struct .pack ('>I' , schema_id ))
145
147
146
- # write the record to the rest of it
148
+ # write the record to the rest of the buffer
147
149
# Create an encoder that we'll write to
148
- encoder = avro .io .BinaryEncoder (outf )
149
- # write the magic byte
150
- # write the object in 'obj' as Avro to the fake file...
151
- writer .write (record , encoder )
150
+ writer (record , outf )
152
151
153
152
return outf .getvalue ()
154
153
0 commit comments