@@ -22,10 +22,12 @@ class Kafka(unittest.TestCase):
2222 MULTIPLE_KAFKA_TIMESTAMP_1 = "2020-06-20T05:06:25.945Z"
2323 MULTIPLE_KAFKA_DATA_0 = '{"Offset":62,"Partition":1,"Topic":"message",' \
2424 '"Timestamp":"2020-06-20T05:06:25.139Z","Value":"a", ' \
25- '"Headers":[{"Key":"test","Value":"1"}]}'
25+ '"Headers":[{"Key":"test","Value":"1"}], ' \
26+ '"Key" : "1"}'
2627 MULTIPLE_KAFKA_DATA_1 = '{"Offset":63,"Partition":1,"Topic":"message",' \
2728 '"Timestamp":"2020-06-20T05:06:25.945Z","Value":"a", ' \
28- '"Headers":[{"Key":"test2","Value":"2"}]}'
29+ '"Headers":[{"Key":"test2","Value":"2"}], ' \
30+ '"Key": "2"}'
2931
3032 def test_kafka_input_type (self ):
3133 check_input_type = (
@@ -238,10 +240,10 @@ def _generate_single_kafka_datum(self, datum_type='string'):
238240 def _generate_multiple_kafka_data (self , data_type = 'json' ):
239241 data = '[{"Offset":62,"Partition":1,"Topic":"message",' \
240242 '"Timestamp":"2020-06-20T05:06:25.139Z","Value":"a",' \
241- ' "Headers":[{"Key":"test","Value":"1"}]},' \
243+ ' "Headers":[{"Key":"test","Value":"1"}], "Key": "1" },' \
242244 ' {"Offset":63,"Partition":1,"Topic":"message",' \
243245 '"Timestamp":"2020-06-20T05:06:25.945Z","Value":"a", ' \
244- '"Headers":[{"Key":"test2","Value":"2"}]}]'
246+ '"Headers":[{"Key":"test2","Value":"2"}], "Key": "2" }]'
245247 if data_type == 'collection_bytes' :
246248 data = list (
247249 map (lambda x : json .dumps (x ).encode ('utf-8' ),
@@ -282,7 +284,7 @@ def _generate_single_trigger_metadatum(self):
282284 }
283285
284286 def _generate_multiple_trigger_metadata (self ):
285- key_array = [None , None ]
287+ key_array = ["1" , "2" ]
286288 partition_array = [1 , 2 ]
287289 timestamp_array = ["2020-06-20T05:06:25.139Z" ,
288290 "2020-06-20T05:06:25.945Z" ]
0 commit comments