Skip to content

Commit

Permalink
Merge branch 'main' into justinchu/op-signature
Browse files Browse the repository at this point in the history
  • Loading branch information
justinchuby authored Oct 23, 2024
2 parents ff1aaa5 + 2b60939 commit b88ab17
Show file tree
Hide file tree
Showing 53 changed files with 2,086 additions and 1,149 deletions.
31 changes: 5 additions & 26 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ jobs:
- name: Pull Test Data
run: git lfs pull
- name: Run tests
run: nox -t ${{ matrix.nox-tag }} --forcecolor -- --cov=onnxscript --cov-report=xml --cov-append --cov-branch -n=auto --junit-xml pytest.xml
run: nox -t ${{ matrix.nox-tag }} --forcecolor -- --cov=onnxscript --cov-report=xml --cov-append --cov-branch -n=auto --junitxml junit.xml
env:
CATCH_ORT_SEGFAULT: "${{ matrix.os == 'ubuntu-latest' && '1' || '0' }}"
CREATE_REPRODUCTION_REPORT: "${{ matrix.os == 'ubuntu-latest' && '1' || '0' }}"
Expand All @@ -80,12 +80,11 @@ jobs:
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload Test Results
if: always()
uses: actions/upload-artifact@v3
- name: Upload test results to Codecov
if: ${{ !cancelled() }}
uses: codecov/test-results-action@v1
with:
name: Test Results (${{ matrix.name }}-${{ matrix.os }})
path: pytest.xml
token: ${{ secrets.CODECOV_TOKEN }}
- name: Upload torchlib error reports
if: always()
uses: actions/upload-artifact@v3
Expand Down Expand Up @@ -161,23 +160,3 @@ jobs:
echo "Update readme by running `python docs/update_readme.py`"
exit 1
fi
publish-test-results:
name: "Publish Tests Results to Github"
needs: test
runs-on: ubuntu-latest
permissions:
checks: write
# only needed unless run with comment_mode: off
pull-requests: write
if: always()
steps:
- name: Download Artifacts
uses: actions/download-artifact@v3
with:
path: artifacts

- name: Publish Test Results
uses: EnricoMi/publish-unit-test-result-action@v2
with:
files: "artifacts/**/*.xml"
5 changes: 2 additions & 3 deletions .lintrunner.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,11 @@ exclude_patterns = [
'onnxscript/onnx_types.py',
'onnxscript/**/*_test.py', # Skip linting test files for speed
'onnxscript/function_libs/torch_lib/ops/**', # Operators typing do not play well with mypy
'onnxscript/optimizer/evaluator.py', # FIXME
'onnxscript/optimizer/constant_folding.py', # FIXME
'onnxscript/optimizer/_legacy/evaluator.py', # FIXME
'onnxscript/optimizer/_legacy/constant_folding.py', # FIXME
'onnxscript/rewriter/onnxruntime/transformers/fastgelu.py', # FIXME
'onnxscript/rewriter/onnxruntime/instance_to_group_normalization.py', # FIXME
'onnxscript/_legacy_ir/irbuilder.py', # FIXME
'onnxscript/optimizer/fold_constants_v0.py', # FIXME
'onnxscript/rewriter/onnxruntime/transformers/multihead_attention.py', # FIXME
'onnxscript/tools/function_unittest_producer.py', # FIXME
'onnxscript/_legacy_ir/visitor.py', # FIXME
Expand Down
67 changes: 67 additions & 0 deletions docs/tutorial/rewriter/examples/allow_other_attributes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""Onnx Pattern Rewriting with attributes
This script shows how to define a rewriting rule based on patterns that
are dependent on the attributes of the nodes.
"""

import onnx

import onnxscript
from onnxscript import FLOAT, opset18, script
from onnxscript.rewriter import pattern


@script()
def original_model(A: FLOAT[2, 2], B: FLOAT[2, 2]) -> FLOAT[2, 2]:
add = opset18.Add(A, B)
result = opset18.Dropout(add, training_mode=False)
return result


_model = original_model.to_model_proto()
onnx.checker.check_model(_model)


####################################
# The target pattern
# =====================


def add_pattern(op, input):
return op.Dropout(input, training_mode=False, _allow_other_attributes=True)


####################################
# The replacement pattern
# =====================


def add_replacement(op, input, **_):
return op.Identity(input)


####################################
# Create Rewrite Rule and Apply to Model
# =====================


def apply_rewrite(model):
# Create rewrite rules
add_rule = pattern.RewriteRule(
add_pattern, # target pattern
add_replacement, # replacement pattern
)
# Create a Rewrite Rule Set
rewrite_rule_set = pattern.RewriteRuleSet([add_rule])
# Apply rewrite while passing match_condition
model_with_rewrite = onnxscript.rewriter.rewrite(
model,
pattern_rewrite_rules=rewrite_rule_set,
)
return model_with_rewrite


_model_with_rewrite = apply_rewrite(_model)
onnx.checker.check_model(_model_with_rewrite)
4 changes: 1 addition & 3 deletions docs/tutorial/rewriter/examples/broadcast_matmul.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

import onnxscript
from onnxscript import FLOAT, ir, opset18, script
from onnxscript.rewriter import _ir_utils, pattern
from onnxscript.rewriter import pattern

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -83,8 +83,6 @@ def check_if_not_need_reshape(

input_a_shape = input_a.shape
input_b_shape = input_b.shape
# TODO: Get a helper func to get const_value
_ir_utils.propagate_const_value(shape_c)
shape_c_tensor = shape_c.const_value
if shape_c_tensor is None:
logger.info("The value 'shape_c' is not statically known.")
Expand Down
24 changes: 24 additions & 0 deletions docs/tutorial/rewriter/rewrite_patterns.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,29 @@ The graph (on the left) consists of the target pattern before the rewrite rule i

![target_pattern](examples/img/erfgelu_01.png) ![replacement_pattern](examples/img/erfgelu_02.png)

## Specifying attributes in the pattern

This section demonstrates the use of attribute values in pattern-based rewriting.
First, write a target pattern and replacement pattern in a similar way to the previous examples.
The example pattern below will match successfully only against Dropout nodes with the
attribute value `training_mode` set to `False`.
The `_allow_other_attributes` option allows the pattern to match nodes that have additional attributes
not specified in the pattern. If it is set to `False`, then the node must have only the specified
attribute values, and no other attributes, for a successful match. The default value for this
option is `True`.

```{literalinclude} examples/allow_other_attributes.py
:pyobject: add_pattern
```

```{literalinclude} examples/allow_other_attributes.py
:pyobject: add_replacement
```

```{literalinclude} examples/allow_other_attributes.py
:pyobject: apply_rewrite
```


(heading-target-commute)=
## Utilizing `commute` parameter for pattern-matching
Expand Down Expand Up @@ -196,3 +219,4 @@ With all the necessary components in place, the pattern rewrite rule with the `m
The final graph with the applied rewrite looks as follows:

![broadcast_rewrite](examples/img/broadcast_02.png){align=center}

80 changes: 31 additions & 49 deletions onnxscript/_framework_apis/torch_2_5.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,10 @@
import pathlib
from typing import Callable

import onnx

from onnxscript import ir
from onnxscript import ir, optimizer
from onnxscript.function_libs.torch_lib import registration
from onnxscript.ir import _external_data

# Internal flag. Will go away.
_TORCH_ONNX_SAVE_EXTERNAL_DATA_WITH_IR = (
os.getenv("TORCH_ONNX_OFFLOAD_EXTERNAL_DATA_WITH_IR") == "1"
)


@dataclasses.dataclass(frozen=True)
class _OnnxFunctionMeta:
Expand All @@ -49,8 +42,10 @@ class _OnnxFunctionMeta:

def optimize(model: ir.Model) -> ir.Model:
"""Optimize the model."""

# TODO(justinchuby): Use the optimizer
# Internal flag. Will go away.
enabled = os.getenv("TORCH_ONNX_ENABLE_OPTIMIZATION") == "1"
if enabled:
optimizer.optimize_ir(model)
return model


Expand Down Expand Up @@ -81,45 +76,32 @@ def save_model_with_external_data(model: ir.Model, model_path: str | os.PathLike
"""Save the model with external data. The model is unchanged after saving."""

# TODO(#1835): Decide if we want to externalize large attributes as well
if _TORCH_ONNX_SAVE_EXTERNAL_DATA_WITH_IR:
initializer_values = tuple(model.graph.initializers.values())
tensors = [v.const_value for v in initializer_values]
for tensor in tensors:
if tensor is None:
raise ValueError(
"The model contains uninitialized initializer values. "
"Please make sure all initializer values are initialized."
)
destination_path = pathlib.Path(model_path)
base_dir = destination_path.parent
data_path = f"{destination_path.name}.data"

external_tensors = _external_data.convert_tensors_to_external(
tensors, # type: ignore[arg-type]
base_dir,
data_path,
)

# Replace the initializer values with external tensors and save the model
for initializer, external_tensor in zip(initializer_values, external_tensors):
initializer.const_value = external_tensor
ir.save(model, model_path)

# Restore the original initializer values so the model is unchanged
for initializer, tensor in zip(initializer_values, tensors):
initializer.const_value = tensor

else:
destination_path = pathlib.Path(model_path)
# Create the directory if it does not exist
data_path = f"{destination_path.name}.data"
proto = ir.serde.serialize_model(model)
onnx.save_model(
proto,
model_path,
save_as_external_data=True,
location=data_path,
)
initializer_values = tuple(model.graph.initializers.values())
tensors = [v.const_value for v in initializer_values]
for tensor in tensors:
if tensor is None:
raise ValueError(
"The model contains uninitialized initializer values. "
"Please make sure all initializer values are initialized."
)
destination_path = pathlib.Path(model_path)
base_dir = destination_path.parent
data_path = f"{destination_path.name}.data"

external_tensors = _external_data.convert_tensors_to_external(
tensors, # type: ignore[arg-type]
base_dir,
data_path,
)

# Replace the initializer values with external tensors and save the model
for initializer, external_tensor in zip(initializer_values, external_tensors):
initializer.const_value = external_tensor
ir.save(model, model_path)

# Restore the original initializer values so the model is unchanged
for initializer, tensor in zip(initializer_values, tensors):
initializer.const_value = tensor


def get_torchlib_ops() -> list[_OnnxFunctionMeta]:
Expand Down
26 changes: 26 additions & 0 deletions onnxscript/_framework_apis/torch_2_6.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""Stable APIs for PyTorch 2.6."""

from __future__ import annotations

__all__ = [
"check_model",
"convert_version",
"get_torchlib_ops",
"optimize",
"save_model_with_external_data",
]
from onnxscript import ir, optimizer
from onnxscript._framework_apis.torch_2_5 import (
check_model,
convert_version,
get_torchlib_ops,
save_model_with_external_data,
)


def optimize(model: ir.Model) -> ir.Model:
"""Optimize the model."""
optimizer.optimize_ir(model)
return model
Loading

0 comments on commit b88ab17

Please sign in to comment.