|
17 | 17 | # pylint: disable=invalid-name, import-self, len-as-condition, unused-argument, too-many-lines |
18 | 18 | # pylint: disable=import-outside-toplevel |
19 | 19 | """ONNX: Open Neural Network Exchange frontend for Relay.""" |
20 | | -import logging |
21 | 20 | import numpy as np |
22 | 21 | import tvm |
23 | 22 | from tvm.ir import IRModule |
|
34 | 33 | from .common import infer_type, get_name |
35 | 34 |
|
36 | 35 |
|
37 | | -logger = logging.getLogger("onnx_frontend") |
38 | | - |
39 | 36 | __all__ = ["from_onnx"] |
40 | 37 |
|
41 | 38 |
|
@@ -257,11 +254,8 @@ def _impl_v1(cls, inputs, attr, params): |
257 | 254 | pad_tuple = tuple([val for pair in zip(*pad_tuple) for val in pair]) |
258 | 255 | attr["pads"] = pad_tuple |
259 | 256 | else: |
260 | | - warning = ( |
261 | | - "Performing dynamic autopadding on Pool. " |
262 | | - + "Pool kernels don't currently support dynamic shapes." |
263 | | - ) |
264 | | - logger.warning(warning) |
| 257 | + # Warning: Pool does not yet support dynamic shapes, |
| 258 | + # one will need to run dynamic_to_static on this model after import |
265 | 259 | data = autopad(data, attr["strides"], attr["kernel_shape"], [1] * ndim, ndim) |
266 | 260 | elif attr["auto_pad"] == "VALID": |
267 | 261 | attr["pads"] = tuple([0 for i in range(ndim - 2)]) |
@@ -382,11 +376,8 @@ def _impl_v1(cls, inputs, attr, params): |
382 | 376 | if "auto_pad" in attr: |
383 | 377 | attr["auto_pad"] = attr["auto_pad"].decode("utf-8") |
384 | 378 | if attr["auto_pad"] in ("SAME_UPPER", "SAME_LOWER"): |
385 | | - warning = ( |
386 | | - "Performing dynamic autopadding on Conv. " |
387 | | - + "Conv kernels don't currently support dynamic shapes." |
388 | | - ) |
389 | | - logger.warning(warning) |
| 379 | + # Warning: Convolution does not yet support dynamic shapes, |
| 380 | + # one will need to run dynamic_to_static on this model after import |
390 | 381 | data = autopad(data, attr["strides"], attr["kernel_shape"], attr["dilations"], ndim) |
391 | 382 | elif attr["auto_pad"] == "VALID": |
392 | 383 | attr["pads"] = tuple([0 for i in range(ndim - 2)]) |
@@ -442,11 +433,8 @@ def _impl_v1(cls, inputs, attr, params): |
442 | 433 | if "auto_pad" in attr: |
443 | 434 | attr["auto_pad"] = attr["auto_pad"].decode("utf-8") |
444 | 435 | if attr["auto_pad"] in ("SAME_UPPER", "SAME_LOWER"): |
445 | | - warning = ( |
446 | | - "Performing dynamic autopadding on ConvTranspose. " |
447 | | - + "ConvTranspose kernels don't currently support dynamic shapes." |
448 | | - ) |
449 | | - logger.warning(warning) |
| 436 | + # Warning: Convolution does not yet support dynamic shapes, |
| 437 | + # one will need to run dynamic_to_static on this model after import |
450 | 438 | data = autopad( |
451 | 439 | data, |
452 | 440 | attr["strides"], |
@@ -616,11 +604,8 @@ def _impl_v1(cls, inputs, attr, params): |
616 | 604 | if "auto_pad" in attr: |
617 | 605 | attr["auto_pad"] = attr["auto_pad"].decode("utf-8") |
618 | 606 | if attr["auto_pad"] in ("SAME_UPPER", "SAME_LOWER"): |
619 | | - warning = ( |
620 | | - "Performing dynamic autopadding on LpPool. " |
621 | | - + "LpPool kernels don't currently support dynamic shapes." |
622 | | - ) |
623 | | - logger.warning(warning) |
| 607 | + # Warning: LpPool does not yet support dynamic shapes, |
| 608 | + # one will need to run dynamic_to_static on this model after import |
624 | 609 | data = autopad(data, attr["strides"], attr["kernel_shape"], [1] * ndim, ndim) |
625 | 610 | elif attr["auto_pad"] == "VALID": |
626 | 611 | attr["pads"] = tuple([0 for i in range(ndim - 2)]) |
|
0 commit comments