Skip to content

Moving permute core to impl - permute(FX Converter Refactor [22/N]) <Target: converter_reorg_elementwise> #1999

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
535 changes: 384 additions & 151 deletions py/torch_tensorrt/fx/converters/acc_ops_converters.py

Large diffs are not rendered by default.

93 changes: 90 additions & 3 deletions py/torch_tensorrt/fx/converters/aten_ops_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
from .converter_utils import * # noqa: F403
import torch_tensorrt.fx.tracer.acc_tracer.acc_utils as acc_utils
from torch_tensorrt.fx.converters.impl import activation
from torch_tensorrt.fx.converters.impl import permute
from torch_tensorrt.fx.converters.impl.elementwise import trunc_div
from torch_tensorrt.fx.converters.impl.elementwise import rsqrt

_LOGGER: logging.Logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -161,9 +164,7 @@ def aten_ops_div(
network, target, None, kwargs_new, name
)
elif rounding_mode == "trunc":
return acc_ops_converters.acc_ops_trunc_div(
network, target, None, kwargs_new, name
)
return trunc_div(network, target, SourceIR.ATEN, name, args[0], args[1])
else:
raise RuntimeError(
f"Target {target} does not support rounding mode {rounding_mode}"
Expand Down Expand Up @@ -335,6 +336,24 @@ def aten_ops_mul(
return acc_ops_converters.acc_ops_mul(network, target, None, kwargs_new, name)


@tensorrt_converter(torch.ops.aten.permute.default)
def aten_ops_permute_default(
network: TRTNetwork,
target: Target,
args: Tuple[Argument, ...],
kwargs: Dict[str, Argument],
name: str,
) -> Union[TRTTensor, Sequence[TRTTensor]]:
return permute.permute(
network,
target,
SourceIR.ATEN,
name=name,
input_val=args[0],
index=args[1:],
)


@tensorrt_converter(torch.ops.aten.pow.Tensor_Scalar)
@tensorrt_converter(torch.ops.aten.pow.Tensor_Tensor)
def aten_ops_pow(
Expand Down Expand Up @@ -369,6 +388,42 @@ def aten_ops_relu(
)


@tensorrt_converter(torch.ops.aten.relu.default)
def aten_ops_relu(
network: TRTNetwork,
target: Target,
args: Tuple[Argument, ...],
kwargs: Dict[str, Argument],
name: str,
) -> Union[TRTTensor, Sequence[TRTTensor]]:

return activation.relu(
network,
target,
SourceIR.ATEN,
name,
args[0],
)


@tensorrt_converter(torch.ops.aten.rsqrt.default)
def aten_ops_rsqrt(
network: TRTNetwork,
target: Target,
args: Tuple[Argument, ...],
kwargs: Dict[str, Argument],
name: str,
) -> Union[TRTTensor, Sequence[TRTTensor]]:

return rsqrt(
network,
target,
SourceIR.ATEN,
name,
args[0],
)


@tensorrt_converter(torch.ops.aten.sub.Tensor)
def aten_ops_sub(
network: TRTNetwork,
Expand All @@ -384,6 +439,38 @@ def aten_ops_sub(
return acc_ops_converters.acc_ops_sub(network, target, None, kwargs_new, name)


@tensorrt_converter(torch.ops.aten.transpose.int)
def aten_ops_transpose_int(
network: TRTNetwork,
target: Target,
args: Tuple[Argument, ...],
kwargs: Dict[str, Argument],
name: str,
) -> Union[TRTTensor, Sequence[TRTTensor]]:
input_val = args[0]
ndim = len(input_val.shape)
if len(args) == 1:
# default is to reverse dimensions
new_order = torch.arange(0, start=ndim - 1, step=-1)
else:
assert (
len(args) == 3
), f"Wrong number of arguments to transpose(): {len(args)-1}"
new_order = torch.arange(ndim)
dim0 = args[1]
if args[1] < 0:
dim0 = dim0 + ndim
dim1 = args[2]
if args[2] < 0:
dim1 = dim1 + ndim
new_order[dim0] = dim1
new_order[dim1] = dim0
print("New order: ", new_order)
return permute.permute(
network, target, SourceIR.ATEN, name=name, input_val=input_val, index=new_order
)


@tensorrt_converter(torch.ops.aten.view.default)
def aten_ops_reshape(
network: TRTNetwork,
Expand Down
Loading