@@ -152,15 +152,15 @@ def aten_ops_fmod(
152
152
return impl .elementwise .fmod (network , target , SourceIR .ATEN , name , args [0 ], args [1 ])
153
153
154
154
155
- @dynamo_tensorrt_converter (torch .ops .aten .gelu .default ) # type: ignore[misc]
156
- def aten_ops_gelu (
155
+ @dynamo_tensorrt_converter (torch .ops .aten .relu .default )
156
+ def aten_ops_relu (
157
157
network : TRTNetwork ,
158
158
target : Target ,
159
159
args : Tuple [Argument , ...],
160
160
kwargs : Dict [str , Argument ],
161
161
name : str ,
162
162
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
163
- return impl .activation .gelu (
163
+ return impl .activation .relu (
164
164
network ,
165
165
target ,
166
166
SourceIR .ATEN ,
@@ -169,55 +169,165 @@ def aten_ops_gelu(
169
169
)
170
170
171
171
172
- @dynamo_tensorrt_converter (torch .ops .aten .matmul ) # type: ignore[misc]
173
- @dynamo_tensorrt_converter (torch .ops .aten .mm .default ) # type: ignore[misc]
174
- def aten_ops_matmul (
172
+ @dynamo_tensorrt_converter (torch .ops .aten .sigmoid .default )
173
+ def aten_ops_sigmoid (
175
174
network : TRTNetwork ,
176
175
target : Target ,
177
176
args : Tuple [Argument , ...],
178
177
kwargs : Dict [str , Argument ],
179
178
name : str ,
180
179
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
181
- return impl .matmul .matrix_multiply (
182
- network , target , SourceIR .ATEN , name , args [0 ], args [1 ]
180
+ return impl .activation .sigmoid (
181
+ network ,
182
+ target ,
183
+ SourceIR .ATEN ,
184
+ name ,
185
+ args [0 ],
183
186
)
184
187
185
188
186
- @dynamo_tensorrt_converter (torch .ops .aten .layer_norm .default ) # type: ignore[misc]
187
- def aten_ops_layernorm (
189
+ @dynamo_tensorrt_converter (torch .ops .aten .tanh .default )
190
+ def aten_ops_tanh (
188
191
network : TRTNetwork ,
189
192
target : Target ,
190
193
args : Tuple [Argument , ...],
191
194
kwargs : Dict [str , Argument ],
192
195
name : str ,
193
196
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
194
- return impl .normalization . layer_norm (
197
+ return impl .activation . tanh (
195
198
network ,
196
199
target ,
197
200
SourceIR .ATEN ,
198
201
name ,
199
202
args [0 ],
200
- args [1 ],
201
- args [2 ],
202
- args [3 ],
203
- args [4 ],
204
203
)
205
204
206
205
207
- @dynamo_tensorrt_converter (torch .ops .aten .relu .default ) # type: ignore[misc]
208
- def aten_ops_relu (
206
+ @dynamo_tensorrt_converter (torch .ops .aten .leaky_relu .default )
207
+ def aten_ops_leaky_relu (
209
208
network : TRTNetwork ,
210
209
target : Target ,
211
210
args : Tuple [Argument , ...],
212
211
kwargs : Dict [str , Argument ],
213
212
name : str ,
214
213
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
215
- return impl .activation .relu (
214
+ return impl .activation .leaky_relu (
215
+ network ,
216
+ target ,
217
+ SourceIR .ATEN ,
218
+ name ,
219
+ args [0 ],
220
+ args_bounds_check (args , 1 , 0.01 ),
221
+ )
222
+
223
+
224
+ @dynamo_tensorrt_converter (torch .ops .aten .elu .default )
225
+ def aten_ops_elu (
226
+ network : TRTNetwork ,
227
+ target : Target ,
228
+ args : Tuple [Argument , ...],
229
+ kwargs : Dict [str , Argument ],
230
+ name : str ,
231
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
232
+ return impl .activation .elu (
233
+ network ,
234
+ target ,
235
+ SourceIR .ATEN ,
236
+ name ,
237
+ args [0 ],
238
+ alpha = args_bounds_check (args , 1 , 1.0 ),
239
+ beta = args_bounds_check (args , 2 , None ),
240
+ )
241
+
242
+
243
+ @dynamo_tensorrt_converter (torch .ops .aten .softplus .default )
244
+ def aten_ops_softplus (
245
+ network : TRTNetwork ,
246
+ target : Target ,
247
+ args : Tuple [Argument , ...],
248
+ kwargs : Dict [str , Argument ],
249
+ name : str ,
250
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
251
+ return impl .activation .softplus (
252
+ network ,
253
+ target ,
254
+ SourceIR .ATEN ,
255
+ name ,
256
+ args [0 ],
257
+ beta = args_bounds_check (args , 1 , 1 ),
258
+ )
259
+
260
+
261
+ @dynamo_tensorrt_converter (torch .ops .aten .clip .default )
262
+ def aten_ops_clip (
263
+ network : TRTNetwork ,
264
+ target : Target ,
265
+ args : Tuple [Argument , ...],
266
+ kwargs : Dict [str , Argument ],
267
+ name : str ,
268
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
269
+ return impl .activation .clip (
270
+ network ,
271
+ target ,
272
+ SourceIR .ATEN ,
273
+ name ,
274
+ args [0 ],
275
+ alpha = args_bounds_check (args , 1 ),
276
+ beta = args_bounds_check (args , 2 ),
277
+ )
278
+
279
+
280
+ @dynamo_tensorrt_converter (torch .ops .aten .hardsigmoid .default )
281
+ def aten_ops_hard_sigmoid (
282
+ network : TRTNetwork ,
283
+ target : Target ,
284
+ args : Tuple [Argument , ...],
285
+ kwargs : Dict [str , Argument ],
286
+ name : str ,
287
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
288
+ return impl .activation .hard_sigmoid (
289
+ network ,
290
+ target ,
291
+ SourceIR .ATEN ,
292
+ name ,
293
+ args [0 ],
294
+ alpha = args_bounds_check (args , 1 , 1 / 6 ),
295
+ beta = args_bounds_check (args , 2 , 1 / 2 ),
296
+ )
297
+
298
+
299
+ @dynamo_tensorrt_converter (torch .ops .aten .matmul ) # type: ignore[misc]
300
+ @dynamo_tensorrt_converter (torch .ops .aten .mm .default ) # type: ignore[misc]
301
+ def aten_ops_matmul (
302
+ network : TRTNetwork ,
303
+ target : Target ,
304
+ args : Tuple [Argument , ...],
305
+ kwargs : Dict [str , Argument ],
306
+ name : str ,
307
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
308
+ return impl .matmul .matrix_multiply (
309
+ network , target , SourceIR .ATEN , name , args [0 ], args [1 ]
310
+ )
311
+
312
+
313
+ @dynamo_tensorrt_converter (torch .ops .aten .layer_norm .default ) # type: ignore[misc]
314
+ def aten_ops_layernorm (
315
+ network : TRTNetwork ,
316
+ target : Target ,
317
+ args : Tuple [Argument , ...],
318
+ kwargs : Dict [str , Argument ],
319
+ name : str ,
320
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
321
+ return impl .normalization .layer_norm (
216
322
network ,
217
323
target ,
218
324
SourceIR .ATEN ,
219
325
name ,
220
326
args [0 ],
327
+ args [1 ],
328
+ args [2 ],
329
+ args [3 ],
330
+ args [4 ],
221
331
)
222
332
223
333
0 commit comments