Skip to content

Commit b88ab17

Browse files
authored
Merge branch 'main' into justinchu/op-signature
2 parents ff1aaa5 + 2b60939 commit b88ab17

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+2086
-1149
lines changed

.github/workflows/main.yaml

+5-26
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ jobs:
7171
- name: Pull Test Data
7272
run: git lfs pull
7373
- name: Run tests
74-
run: nox -t ${{ matrix.nox-tag }} --forcecolor -- --cov=onnxscript --cov-report=xml --cov-append --cov-branch -n=auto --junit-xml pytest.xml
74+
run: nox -t ${{ matrix.nox-tag }} --forcecolor -- --cov=onnxscript --cov-report=xml --cov-append --cov-branch -n=auto --junitxml junit.xml
7575
env:
7676
CATCH_ORT_SEGFAULT: "${{ matrix.os == 'ubuntu-latest' && '1' || '0' }}"
7777
CREATE_REPRODUCTION_REPORT: "${{ matrix.os == 'ubuntu-latest' && '1' || '0' }}"
@@ -80,12 +80,11 @@ jobs:
8080
uses: codecov/codecov-action@v4
8181
with:
8282
token: ${{ secrets.CODECOV_TOKEN }}
83-
- name: Upload Test Results
84-
if: always()
85-
uses: actions/upload-artifact@v3
83+
- name: Upload test results to Codecov
84+
if: ${{ !cancelled() }}
85+
uses: codecov/test-results-action@v1
8686
with:
87-
name: Test Results (${{ matrix.name }}-${{ matrix.os }})
88-
path: pytest.xml
87+
token: ${{ secrets.CODECOV_TOKEN }}
8988
- name: Upload torchlib error reports
9089
if: always()
9190
uses: actions/upload-artifact@v3
@@ -161,23 +160,3 @@ jobs:
161160
echo "Update readme by running `python docs/update_readme.py`"
162161
exit 1
163162
fi
164-
165-
publish-test-results:
166-
name: "Publish Tests Results to Github"
167-
needs: test
168-
runs-on: ubuntu-latest
169-
permissions:
170-
checks: write
171-
# only needed unless run with comment_mode: off
172-
pull-requests: write
173-
if: always()
174-
steps:
175-
- name: Download Artifacts
176-
uses: actions/download-artifact@v3
177-
with:
178-
path: artifacts
179-
180-
- name: Publish Test Results
181-
uses: EnricoMi/publish-unit-test-result-action@v2
182-
with:
183-
files: "artifacts/**/*.xml"

.lintrunner.toml

+2-3
Original file line numberDiff line numberDiff line change
@@ -46,12 +46,11 @@ exclude_patterns = [
4646
'onnxscript/onnx_types.py',
4747
'onnxscript/**/*_test.py', # Skip linting test files for speed
4848
'onnxscript/function_libs/torch_lib/ops/**', # Operators typing do not play well with mypy
49-
'onnxscript/optimizer/evaluator.py', # FIXME
50-
'onnxscript/optimizer/constant_folding.py', # FIXME
49+
'onnxscript/optimizer/_legacy/evaluator.py', # FIXME
50+
'onnxscript/optimizer/_legacy/constant_folding.py', # FIXME
5151
'onnxscript/rewriter/onnxruntime/transformers/fastgelu.py', # FIXME
5252
'onnxscript/rewriter/onnxruntime/instance_to_group_normalization.py', # FIXME
5353
'onnxscript/_legacy_ir/irbuilder.py', # FIXME
54-
'onnxscript/optimizer/fold_constants_v0.py', # FIXME
5554
'onnxscript/rewriter/onnxruntime/transformers/multihead_attention.py', # FIXME
5655
'onnxscript/tools/function_unittest_producer.py', # FIXME
5756
'onnxscript/_legacy_ir/visitor.py', # FIXME
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
# Copyright (c) Microsoft Corporation.
2+
# Licensed under the MIT License.
3+
"""Onnx Pattern Rewriting with attributes
4+
5+
This script shows how to define a rewriting rule based on patterns that
6+
are dependent on the attributes of the nodes.
7+
"""
8+
9+
import onnx
10+
11+
import onnxscript
12+
from onnxscript import FLOAT, opset18, script
13+
from onnxscript.rewriter import pattern
14+
15+
16+
@script()
17+
def original_model(A: FLOAT[2, 2], B: FLOAT[2, 2]) -> FLOAT[2, 2]:
18+
add = opset18.Add(A, B)
19+
result = opset18.Dropout(add, training_mode=False)
20+
return result
21+
22+
23+
_model = original_model.to_model_proto()
24+
onnx.checker.check_model(_model)
25+
26+
27+
####################################
28+
# The target pattern
29+
# =====================
30+
31+
32+
def add_pattern(op, input):
33+
return op.Dropout(input, training_mode=False, _allow_other_attributes=True)
34+
35+
36+
####################################
37+
# The replacement pattern
38+
# =====================
39+
40+
41+
def add_replacement(op, input, **_):
42+
return op.Identity(input)
43+
44+
45+
####################################
46+
# Create Rewrite Rule and Apply to Model
47+
# =====================
48+
49+
50+
def apply_rewrite(model):
51+
# Create rewrite rules
52+
add_rule = pattern.RewriteRule(
53+
add_pattern, # target pattern
54+
add_replacement, # replacement pattern
55+
)
56+
# Create a Rewrite Rule Set
57+
rewrite_rule_set = pattern.RewriteRuleSet([add_rule])
58+
# Apply rewrite while passing match_condition
59+
model_with_rewrite = onnxscript.rewriter.rewrite(
60+
model,
61+
pattern_rewrite_rules=rewrite_rule_set,
62+
)
63+
return model_with_rewrite
64+
65+
66+
_model_with_rewrite = apply_rewrite(_model)
67+
onnx.checker.check_model(_model_with_rewrite)

docs/tutorial/rewriter/examples/broadcast_matmul.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
import onnxscript
1717
from onnxscript import FLOAT, ir, opset18, script
18-
from onnxscript.rewriter import _ir_utils, pattern
18+
from onnxscript.rewriter import pattern
1919

2020
logger = logging.getLogger(__name__)
2121

@@ -83,8 +83,6 @@ def check_if_not_need_reshape(
8383

8484
input_a_shape = input_a.shape
8585
input_b_shape = input_b.shape
86-
# TODO: Get a helper func to get const_value
87-
_ir_utils.propagate_const_value(shape_c)
8886
shape_c_tensor = shape_c.const_value
8987
if shape_c_tensor is None:
9088
logger.info("The value 'shape_c' is not statically known.")

docs/tutorial/rewriter/rewrite_patterns.md

+24
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,29 @@ The graph (on the left) consists of the target pattern before the rewrite rule i
8484

8585
![target_pattern](examples/img/erfgelu_01.png) ![replacement_pattern](examples/img/erfgelu_02.png)
8686

87+
## Specifying attributes in the pattern
88+
89+
This section demonstrates the use of attribute values in pattern-based rewriting.
90+
First, write a target pattern and replacement pattern in a similar way to the previous examples.
91+
The example pattern below will match successfully only against Dropout nodes with the
92+
attribute value `training_mode` set to `False`.
93+
The `_allow_other_attributes` option allows the pattern to match nodes that have additional attributes
94+
not specified in the pattern. If it is set to `False`, then the node must have only the specified
95+
attribute values, and no other attributes, for a successful match. The default value for this
96+
option is `True`.
97+
98+
```{literalinclude} examples/allow_other_attributes.py
99+
:pyobject: add_pattern
100+
```
101+
102+
```{literalinclude} examples/allow_other_attributes.py
103+
:pyobject: add_replacement
104+
```
105+
106+
```{literalinclude} examples/allow_other_attributes.py
107+
:pyobject: apply_rewrite
108+
```
109+
87110

88111
(heading-target-commute)=
89112
## Utilizing `commute` parameter for pattern-matching
@@ -196,3 +219,4 @@ With all the necessary components in place, the pattern rewrite rule with the `m
196219
The final graph with the applied rewrite looks as follows:
197220

198221
![broadcast_rewrite](examples/img/broadcast_02.png){align=center}
222+

onnxscript/_framework_apis/torch_2_5.py

+31-49
Original file line numberDiff line numberDiff line change
@@ -17,17 +17,10 @@
1717
import pathlib
1818
from typing import Callable
1919

20-
import onnx
21-
22-
from onnxscript import ir
20+
from onnxscript import ir, optimizer
2321
from onnxscript.function_libs.torch_lib import registration
2422
from onnxscript.ir import _external_data
2523

26-
# Internal flag. Will go away.
27-
_TORCH_ONNX_SAVE_EXTERNAL_DATA_WITH_IR = (
28-
os.getenv("TORCH_ONNX_OFFLOAD_EXTERNAL_DATA_WITH_IR") == "1"
29-
)
30-
3124

3225
@dataclasses.dataclass(frozen=True)
3326
class _OnnxFunctionMeta:
@@ -49,8 +42,10 @@ class _OnnxFunctionMeta:
4942

5043
def optimize(model: ir.Model) -> ir.Model:
5144
"""Optimize the model."""
52-
53-
# TODO(justinchuby): Use the optimizer
45+
# Internal flag. Will go away.
46+
enabled = os.getenv("TORCH_ONNX_ENABLE_OPTIMIZATION") == "1"
47+
if enabled:
48+
optimizer.optimize_ir(model)
5449
return model
5550

5651

@@ -81,45 +76,32 @@ def save_model_with_external_data(model: ir.Model, model_path: str | os.PathLike
8176
"""Save the model with external data. The model is unchanged after saving."""
8277

8378
# TODO(#1835): Decide if we want to externalize large attributes as well
84-
if _TORCH_ONNX_SAVE_EXTERNAL_DATA_WITH_IR:
85-
initializer_values = tuple(model.graph.initializers.values())
86-
tensors = [v.const_value for v in initializer_values]
87-
for tensor in tensors:
88-
if tensor is None:
89-
raise ValueError(
90-
"The model contains uninitialized initializer values. "
91-
"Please make sure all initializer values are initialized."
92-
)
93-
destination_path = pathlib.Path(model_path)
94-
base_dir = destination_path.parent
95-
data_path = f"{destination_path.name}.data"
96-
97-
external_tensors = _external_data.convert_tensors_to_external(
98-
tensors, # type: ignore[arg-type]
99-
base_dir,
100-
data_path,
101-
)
102-
103-
# Replace the initializer values with external tensors and save the model
104-
for initializer, external_tensor in zip(initializer_values, external_tensors):
105-
initializer.const_value = external_tensor
106-
ir.save(model, model_path)
107-
108-
# Restore the original initializer values so the model is unchanged
109-
for initializer, tensor in zip(initializer_values, tensors):
110-
initializer.const_value = tensor
111-
112-
else:
113-
destination_path = pathlib.Path(model_path)
114-
# Create the directory if it does not exist
115-
data_path = f"{destination_path.name}.data"
116-
proto = ir.serde.serialize_model(model)
117-
onnx.save_model(
118-
proto,
119-
model_path,
120-
save_as_external_data=True,
121-
location=data_path,
122-
)
79+
initializer_values = tuple(model.graph.initializers.values())
80+
tensors = [v.const_value for v in initializer_values]
81+
for tensor in tensors:
82+
if tensor is None:
83+
raise ValueError(
84+
"The model contains uninitialized initializer values. "
85+
"Please make sure all initializer values are initialized."
86+
)
87+
destination_path = pathlib.Path(model_path)
88+
base_dir = destination_path.parent
89+
data_path = f"{destination_path.name}.data"
90+
91+
external_tensors = _external_data.convert_tensors_to_external(
92+
tensors, # type: ignore[arg-type]
93+
base_dir,
94+
data_path,
95+
)
96+
97+
# Replace the initializer values with external tensors and save the model
98+
for initializer, external_tensor in zip(initializer_values, external_tensors):
99+
initializer.const_value = external_tensor
100+
ir.save(model, model_path)
101+
102+
# Restore the original initializer values so the model is unchanged
103+
for initializer, tensor in zip(initializer_values, tensors):
104+
initializer.const_value = tensor
123105

124106

125107
def get_torchlib_ops() -> list[_OnnxFunctionMeta]:
+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
# Copyright (c) Microsoft Corporation.
2+
# Licensed under the MIT License.
3+
"""Stable APIs for PyTorch 2.6."""
4+
5+
from __future__ import annotations
6+
7+
__all__ = [
8+
"check_model",
9+
"convert_version",
10+
"get_torchlib_ops",
11+
"optimize",
12+
"save_model_with_external_data",
13+
]
14+
from onnxscript import ir, optimizer
15+
from onnxscript._framework_apis.torch_2_5 import (
16+
check_model,
17+
convert_version,
18+
get_torchlib_ops,
19+
save_model_with_external_data,
20+
)
21+
22+
23+
def optimize(model: ir.Model) -> ir.Model:
24+
"""Optimize the model."""
25+
optimizer.optimize_ir(model)
26+
return model

0 commit comments

Comments
 (0)