Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump ruff to 0.3.2 and black to 24 #19878

Merged
merged 8 commits into from
Mar 13, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
More fixes
  • Loading branch information
justinchuby committed Mar 13, 2024
commit 408178459f21f20712f3ebe202a6f0b3333932e1
6 changes: 2 additions & 4 deletions onnxruntime/python/tools/symbolic_shape_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2661,9 +2661,7 @@ def get_prereq(node):
if self.verbose_ > 2:
logger.debug(node.op_type + ": " + node.name) # noqa: G003
for i, name in enumerate(node.input):
logger.debug(
" Input {}: {} {}".format(i, name, "initializer" if name in self.initializers_ else "")
)
logger.debug(" Input %s: %s %s", i, name, "initializer" if name in self.initializers_ else "")

# onnx automatically merge dims with value, i.e. Mul(['aaa', 'bbb'], [1000, 1]) -> [1000, 'bbb']
# symbolic shape inference needs to apply merge of 'aaa' -> 1000 in this case
Expand Down Expand Up @@ -2843,7 +2841,7 @@ def get_prereq(node):
continue # continue the inference after guess, no need to stop as no merge is needed

if self.verbose_ > 0 or not self.auto_merge_ or out_type_undefined:
logger.debug("Stopping at incomplete shape inference at " + node.op_type + ": " + node.name)
logger.debug("Stopping at incomplete shape inference at %s: %s", node.op_type, node.name)
logger.debug("node inputs:")
for i in node.input:
if i in self.known_vi_:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -741,7 +741,7 @@ def onnxruntime_inference_with_binded_io(
latency.append(time.time() - start)

average_latency = sum(latency) * 1000 / len(latency)
logger.debug("OnnxRuntime with IO binding inference time = {} ms".format(format(average_latency, ".2f")))
logger.debug("OnnxRuntime with IO binding inference time = %.2f ms", average_latency)

return ort_outputs, average_latency

Expand Down
4 changes: 2 additions & 2 deletions orttraining/orttraining/python/training/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@

try:
if is_ortmodule_available():
from .ortmodule import ORTModule # noqa: F401
from .ortmodule import ORTModule

__all__.append("ORTModule")
__all__ += ["ORTModule"]
except ImportError:
# That is OK iff this is not a ORTModule training package
pass
2 changes: 1 addition & 1 deletion orttraining/orttraining/test/python/_test_commons.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ def run_subprocess(args, cwd=None, capture=False, dll_path=None, shell=False, en
completed_process = subprocess.run(args, cwd=cwd, check=True, stdout=stdout, stderr=stderr, env=my_env, shell=shell)

if log:
log.debug("Subprocess completed. Return code=" + str(completed_process.returncode))
log.debug("Subprocess completed. Return code=%s", completed_process.returncode)
return completed_process
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def quantize_static(input_model_dir, output_model_dir):
logging.info(
"Invoking onnxruntime.quantization.quantize_static with AddQDQPairToWeight=True and QuantizeBias=False.."
)
logging.info("Quantized model will be saved to %s." % output_model_dir)
logging.info("Quantized model will be saved to %s.", output_model_dir)
quantization.quantize_static(
input_model_dir,
output_model_dir,
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,8 @@ ignore = [
"NPY002", # np.random.Generator may not always fit our use cases
"PERF203", # "try-except-in-loop" only affects Python <3.11, and the improvement is minor; can have false positives
"PERF401", # List comprehensions are not always readable
"PYI041", # May create confusion
"PYI024", # May create confusion
"SIM102", # We don't perfer always combining if branches
"SIM108", # We don't encourage ternary operators
"SIM114", # Don't combine if branches for debugability
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def run(self):
auditwheel_cmd = ["auditwheel", "-v", "repair", "-w", self.dist_dir, file]
for i in cuda_dependencies + rocm_dependencies + tensorrt_dependencies:
auditwheel_cmd += ["--exclude", i]
logger.info("Running {}".format(" ".join([shlex.quote(arg) for arg in auditwheel_cmd])))
logger.info("Running %s", " ".join([shlex.quote(arg) for arg in auditwheel_cmd]))
try:
subprocess.run(auditwheel_cmd, check=True, stdout=subprocess.PIPE)
finally:
Expand Down
2 changes: 1 addition & 1 deletion tools/ci_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def invalid_hetero_build():
print("pick the build type for specific Hardware Device from following options: ", choices)
print("(or) from the following options with graph partitioning disabled: ", choices1)
print("\n")
if not (device_read.startswith("HETERO") or device_read.startswith("MULTI") or device_read.startswith("AUTO")):
if not (device_read.startswith(("HETERO", "MULTI", "AUTO"))):
invalid_hetero_build()
sys.exit("Wrong Build Type selected")

Expand Down
2 changes: 1 addition & 1 deletion tools/ci_build/clean_docker_image_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def download_logs(storage_account, container, log_path_pattern, target_dir, az_p
return [os.path.join(target_dir, log_path) for log_path in log_paths]


ImageInfo = collections.namedtuple("ImageInfo", ["repository", "digest"]) # noqa: PYI024
ImageInfo = collections.namedtuple("ImageInfo", ["repository", "digest"])


def get_image_name(image_info):
Expand Down
2 changes: 1 addition & 1 deletion tools/python/util/android/android.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
_log = get_logger("util.android")


SdkToolPaths = collections.namedtuple("SdkToolPaths", ["emulator", "adb", "sdkmanager", "avdmanager"]) # noqa: PYI024
SdkToolPaths = collections.namedtuple("SdkToolPaths", ["emulator", "adb", "sdkmanager", "avdmanager"])


def get_sdk_tool_paths(sdk_root: str):
Expand Down
Loading