1
- import type { StandardSchemaV1 } from '@standard-schema/spec' ;
2
- import type { StreamRecord } from 'aws-lambda' ;
3
1
import { BasePartialBatchProcessor } from './BasePartialBatchProcessor.js' ;
4
- import { EventType , SchemaVendor } from './constants.js' ;
5
2
import { BatchProcessingError } from './errors.js' ;
6
- import type {
7
- BaseRecord ,
8
- EventSourceDataClassTypes ,
9
- FailureResponse ,
10
- SuccessResponse ,
11
- } from './types.js' ;
3
+ import type { BaseRecord , FailureResponse , SuccessResponse } from './types.js' ;
12
4
13
5
/**
14
6
* Process records in a batch asynchronously and handle partial failure cases.
@@ -108,13 +100,16 @@ class BatchProcessor extends BasePartialBatchProcessor {
108
100
record : BaseRecord
109
101
) : Promise < SuccessResponse | FailureResponse > {
110
102
try {
111
- const recordToProcess =
112
- this . schema == null
113
- ? record
114
- : await this . #parseRecord( record , this . eventType , this . schema ) ;
103
+ const recordToProcess = this . parserConfig ?. parser
104
+ ? await this . parserConfig . parser (
105
+ record ,
106
+ this . eventType ,
107
+ this . logger ,
108
+ this . parserConfig
109
+ )
110
+ : record ;
115
111
const data = this . toBatchType ( recordToProcess , this . eventType ) ;
116
112
const result = await this . handler ( data , this . options ?. context ) ;
117
-
118
113
return this . successHandler ( record , result ) ;
119
114
} catch ( error ) {
120
115
return this . failureHandler ( record , error as Error ) ;
@@ -133,163 +128,6 @@ class BatchProcessor extends BasePartialBatchProcessor {
133
128
'Not implemented. Use asyncProcess() instead.'
134
129
) ;
135
130
}
136
-
137
- /**
138
- * Extend the schema according to the event type passed.
139
- *
140
- * If useTransformers is true, extend using opinionated transformers.
141
- * Otherwise, extend without any transformers.
142
- *
143
- * @param eventType - The type of event to process (SQS, Kinesis, DynamoDB)
144
- * @param schema - The StandardSchema to be used for parsing
145
- * @param useTransformers - Whether to use transformers for parsing
146
- */
147
- async #createExtendedSchema( options : {
148
- eventType : keyof typeof EventType ;
149
- schema : StandardSchemaV1 ;
150
- useTransformers : boolean ;
151
- } ) {
152
- const { eventType, schema, useTransformers } = options ;
153
- switch ( eventType ) {
154
- case EventType . SQS : {
155
- if ( useTransformers ) {
156
- const [ { JSONStringified } , { SqsRecordSchema } ] = await Promise . all ( [
157
- import ( '@aws-lambda-powertools/parser/helpers' ) ,
158
- import ( '@aws-lambda-powertools/parser/schemas/sqs' ) ,
159
- ] ) ;
160
- return SqsRecordSchema . extend ( {
161
- body : JSONStringified ( schema as any ) ,
162
- } ) ;
163
- }
164
- const { SqsRecordSchema } = await import (
165
- '@aws-lambda-powertools/parser/schemas/sqs'
166
- ) ;
167
- return SqsRecordSchema . extend ( { body : schema } ) ;
168
- }
169
-
170
- case EventType . KinesisDataStreams : {
171
- if ( useTransformers ) {
172
- const [
173
- { Base64Encoded } ,
174
- { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } ,
175
- ] = await Promise . all ( [
176
- import ( '@aws-lambda-powertools/parser/helpers' ) ,
177
- import ( '@aws-lambda-powertools/parser/schemas/kinesis' ) ,
178
- ] ) ;
179
- return KinesisDataStreamRecord . extend ( {
180
- kinesis : KinesisDataStreamRecordPayload . extend ( {
181
- data : Base64Encoded ( schema as any ) ,
182
- } ) ,
183
- } ) ;
184
- }
185
- const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } =
186
- await import ( '@aws-lambda-powertools/parser/schemas/kinesis' ) ;
187
- return KinesisDataStreamRecord . extend ( {
188
- kinesis : KinesisDataStreamRecordPayload . extend ( { data : schema } ) ,
189
- } ) ;
190
- }
191
-
192
- case EventType . DynamoDBStreams : {
193
- if ( useTransformers ) {
194
- const [
195
- { DynamoDBMarshalled } ,
196
- { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } ,
197
- ] = await Promise . all ( [
198
- import ( '@aws-lambda-powertools/parser/helpers/dynamodb' ) ,
199
- import ( '@aws-lambda-powertools/parser/schemas/dynamodb' ) ,
200
- ] ) ;
201
- return DynamoDBStreamRecord . extend ( {
202
- dynamodb : DynamoDBStreamChangeRecordBase . extend ( {
203
- OldImage : DynamoDBMarshalled < StreamRecord [ 'OldImage' ] > (
204
- schema as any
205
- ) . optional ( ) ,
206
- NewImage : DynamoDBMarshalled < StreamRecord [ 'NewImage' ] > (
207
- schema as any
208
- ) . optional ( ) ,
209
- } ) ,
210
- } ) ;
211
- }
212
- const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } =
213
- await import ( '@aws-lambda-powertools/parser/schemas/dynamodb' ) ;
214
- return DynamoDBStreamRecord . extend ( {
215
- dynamodb : DynamoDBStreamChangeRecordBase . extend ( {
216
- OldImage : ( schema as any ) . optional ( ) ,
217
- NewImage : ( schema as any ) . optional ( ) ,
218
- } ) ,
219
- } ) ;
220
- }
221
-
222
- default : {
223
- console . warn (
224
- `The event type provided is not supported. Supported events: ${ Object . values ( EventType ) . join ( ',' ) } `
225
- ) ;
226
- throw new Error ( 'Unsupported event type' ) ;
227
- }
228
- }
229
- }
230
-
231
- /**
232
- * Parse the record according to the schema and event type passed.
233
- *
234
- * If the passed schema is already an extended schema,
235
- * use the schema directly to parse the record.
236
- *
237
- * Only Zod Schemas are supported for schema extension.
238
- *
239
- * @param record - The record to be parsed
240
- * @param eventType - The type of event to process
241
- * @param schema - The StandardSchema to be used for parsing
242
- */
243
- async #parseRecord(
244
- record : EventSourceDataClassTypes ,
245
- eventType : keyof typeof EventType ,
246
- schema : StandardSchemaV1
247
- ) : Promise < EventSourceDataClassTypes > {
248
- const { parse } = await import ( '@aws-lambda-powertools/parser' ) ;
249
- // Try parsing with the original schema first
250
- const extendedSchemaParsing = parse ( record , undefined , schema , true ) ;
251
- if ( extendedSchemaParsing . success ) {
252
- return extendedSchemaParsing . data as EventSourceDataClassTypes ;
253
- }
254
- // Only proceed with schema extension if it's a Zod schema
255
- if ( schema [ '~standard' ] . vendor !== SchemaVendor . Zod ) {
256
- console . warn (
257
- 'The schema provided is not supported. Only Zod schemas are supported for extension.'
258
- ) ;
259
- throw new Error ( 'Unsupported schema type' ) ;
260
- }
261
- // Handle schema extension based on event type
262
- // Try without transformers first, then with transformers
263
- const schemaWithoutTransformers = await this . #createExtendedSchema( {
264
- eventType,
265
- schema,
266
- useTransformers : false ,
267
- } ) ;
268
- const schemaWithoutTransformersParsing = parse (
269
- record ,
270
- undefined ,
271
- schemaWithoutTransformers ,
272
- true
273
- ) ;
274
- if ( schemaWithoutTransformersParsing . success ) {
275
- return schemaWithoutTransformersParsing . data as EventSourceDataClassTypes ;
276
- }
277
- const schemaWithTransformers = await this . #createExtendedSchema( {
278
- eventType,
279
- schema,
280
- useTransformers : true ,
281
- } ) ;
282
- const schemaWithTransformersParsing = parse (
283
- record ,
284
- undefined ,
285
- schemaWithTransformers ,
286
- true
287
- ) ;
288
- if ( schemaWithTransformersParsing . success ) {
289
- return schemaWithTransformersParsing . data as EventSourceDataClassTypes ;
290
- }
291
- throw new Error ( 'Failed to parse record' ) ;
292
- }
293
131
}
294
132
295
133
export { BatchProcessor } ;
0 commit comments