Skip to content

Commit fb621e6

Browse files
committed
Update on "[ET-VK] Removing descriptor pool intialization from DescriptorPool ctor."
Descriptor pool is initialized with proper capacity while ComputeGraph is build, in ComputeGraph::prepare function. Thus, initializing descriptor pool in constructor just add throwaway work. Differential Revision: [D74347030](https://our.internmc.facebook.com/intern/diff/D74347030/) [ghstack-poisoned]
2 parents f96d162 + 45baccc commit fb621e6

File tree

104 files changed

+6290
-7493
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

104 files changed

+6290
-7493
lines changed

.github/workflows/build-presets.yml

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,20 @@ on:
1111
concurrency:
1212
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}-${{ github.event_name == 'workflow_dispatch' }}-${{ github.event_name == 'schedule' }}
1313
cancel-in-progress: true
14+
15+
jobs:
16+
apple:
17+
uses: pytorch/test-infra/.github/workflows/macos_job.yml@main
18+
strategy:
19+
matrix:
20+
preset: [macos-arm64]
21+
with:
22+
job-name: build
23+
runner: macos-latest-xlarge
24+
python-version: 3.12
25+
submodules: recursive
26+
script: |
27+
set -eux
28+
${CONDA_RUN} ./install_requirements.sh > /dev/null
29+
${CONDA_RUN} cmake --preset ${{ matrix.preset }}
30+
${CONDA_RUN} cmake --build cmake-out --parallel

CMakeLists.txt

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,13 @@ project(executorch)
4848
# MARK: - Start EXECUTORCH_H12025_BUILD_MIGRATION --------------------------------------------------
4949

5050
include(${PROJECT_SOURCE_DIR}/tools/cmake/common/preset.cmake)
51+
52+
load_build_preset()
5153
include(${PROJECT_SOURCE_DIR}/tools/cmake/preset/default.cmake)
5254

55+
# Print all the configs that were called with announce_configured_options.
56+
print_configured_options()
57+
5358
# MARK: - End EXECUTORCH_H12025_BUILD_MIGRATION ----------------------------------------------------
5459

5560
include(tools/cmake/Utils.cmake)
@@ -175,8 +180,6 @@ option(EXECUTORCH_BUILD_ARM_BAREMETAL
175180
"Build the Arm Baremetal flow for Cortex-M and Ethos-U" OFF
176181
)
177182

178-
option(EXECUTORCH_BUILD_COREML "Build the Core ML backend" OFF)
179-
180183
option(EXECUTORCH_BUILD_KERNELS_CUSTOM "Build the custom kernels" OFF)
181184

182185
option(EXECUTORCH_BUILD_KERNELS_CUSTOM_AOT "Build the custom ops lib for AOT"

CMakePresets.json

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
{
2+
"version": 10,
3+
"cmakeMinimumRequired": {
4+
"major": 3,
5+
"minor": 31,
6+
"patch": 0
7+
},
8+
"$comment": "On-device AI across mobile, embedded and edge for PyTorch.",
9+
"configurePresets": [
10+
{
11+
"name": "common",
12+
"hidden": true,
13+
"binaryDir": "${sourceDir}/cmake-out",
14+
"generator": "Unix Makefiles"
15+
},
16+
{
17+
"name": "macos-arm64",
18+
"inherits": ["common"],
19+
"generator": "Xcode",
20+
"cacheVariables": {
21+
"CMAKE_TOOLCHAIN_FILE": "${sourceDir}/third-party/ios-cmake/ios.toolchain.cmake",
22+
"EXECUTORCH_BUILD_PRESET_FILE": "${sourceDir}/tools/cmake/preset/macos-arm64.cmake",
23+
"PLATFORM": "MAC_ARM64",
24+
"DEPLOYMENT_TARGET": "10.15"
25+
},
26+
"condition": {
27+
"lhs": "${hostSystemName}",
28+
"type": "equals",
29+
"rhs": "Darwin"
30+
}
31+
}
32+
]
33+
}

backends/arm/operators/op_permute.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -46,24 +46,26 @@ def permutation_matrix_to_vector(permutation_matrix: torch.Tensor) -> list[int]:
4646
(1,0,2)
4747
"""
4848
N = len(permutation_matrix)
49-
assert N == len(
50-
permutation_matrix[0]
51-
), f"A permutation matrix must be square, got shape {permutation_matrix.shape}"
49+
if N != len(permutation_matrix[0]):
50+
raise ValueError(
51+
f"A permutation matrix must be square, got shape {permutation_matrix.shape}"
52+
)
5253

5354
p = [0] * N
5455
for row_index, row in enumerate(permutation_matrix):
5556
saw_one = False
5657
for col_index, value in enumerate(row):
5758
if value == 1:
58-
assert (
59-
not saw_one
60-
), f"A permutation matrix can only have one 1 per row, got row {row}."
59+
if saw_one:
60+
raise ValueError(
61+
f"A permutation matrix can only have one 1 per row, got {row=}"
62+
)
6163
p[row_index] = col_index
6264
saw_one = True
63-
else:
64-
assert (
65-
value == 0
66-
), f"A permutation matrix only contains 1's and 0's, got value {value}."
65+
elif value != 0:
66+
raise ValueError(
67+
f"A permutation matrix only contains 1's and 0's, got {value=}"
68+
)
6769
return p
6870

6971

backends/arm/operators/op_where.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,6 @@ def define_node(
6969
) -> None:
7070
import tosa_tools.v0_80.serializer.tosa_serializer as ts # type: ignore
7171

72-
validate_num_inputs(self.target, inputs, 3)
73-
7472
bi_supported_dtypes = [
7573
ts.DType.INT8,
7674
ts.DType.INT16,
@@ -99,8 +97,6 @@ def define_node(
9997
) -> None:
10098
import tosa_tools.v0_80.serializer.tosa_serializer as ts # type: ignore
10199

102-
validate_num_inputs(self.target, inputs, 3)
103-
104100
mi_supported_dtypes = [
105101
ts.DType.FP16,
106102
ts.DType.FP32,
@@ -163,8 +159,6 @@ def define_node(
163159
) -> None:
164160
import serializer.tosa_serializer as ts
165161

166-
validate_num_inputs(self.target, inputs, 3)
167-
168162
bi_supported_dtypes = [
169163
ts.DType.INT8,
170164
ts.DType.INT16,
@@ -193,8 +187,6 @@ def define_node(
193187
) -> None:
194188
import serializer.tosa_serializer as ts
195189

196-
validate_num_inputs(self.target, inputs, 3)
197-
198190
mi_supported_dtypes = [
199191
ts.DType.FP16,
200192
ts.DType.FP32,

backends/arm/scripts/parse_test_names.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,15 @@
55
from executorch.exir.dialects.edge.spec.utils import SAMPLE_INPUT
66

77
# Add edge ops which we lower but which are not included in exir/dialects/edge/edge.yaml here.
8-
CUSTOM_EDGE_OPS = ["linspace.default", "eye.default"]
8+
CUSTOM_EDGE_OPS = [
9+
"linspace.default",
10+
"eye.default",
11+
"hardsigmoid.default",
12+
"hardswish.default",
13+
"linear.default",
14+
"maximum.default",
15+
"adaptive_avg_pool2d.default",
16+
]
917
ALL_EDGE_OPS = SAMPLE_INPUT.keys() | CUSTOM_EDGE_OPS
1018

1119
# Add all targets and TOSA profiles we support here.

backends/arm/test/common.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -259,17 +259,15 @@ def decorator_func(func):
259259
raise RuntimeError(
260260
"xfail info needs to be str, or tuple[str, type[Exception]]"
261261
)
262-
pytest_param = pytest.param(
263-
test_parameters,
264-
id=id,
265-
marks=pytest.mark.xfail(
266-
reason=reason, raises=raises, strict=strict
267-
),
262+
# Set up our fail marker
263+
marker = (
264+
pytest.mark.xfail(reason=reason, raises=raises, strict=strict),
268265
)
269266
else:
270-
pytest_param = pytest.param(test_parameters, id=id)
271-
pytest_testsuite.append(pytest_param)
267+
marker = ()
272268

269+
pytest_param = pytest.param(test_parameters, id=id, marks=marker)
270+
pytest_testsuite.append(pytest_param)
273271
return pytest.mark.parametrize(arg_name, pytest_testsuite)(func)
274272

275273
return decorator_func

backends/arm/test/conftest.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,6 @@
1212

1313
import pytest
1414

15-
try:
16-
import tosa_tools.v0_80.tosa_reference_model as tosa_reference_model
17-
except ImportError:
18-
logging.warning("tosa_reference_model not found, can't run reference model tests")
19-
tosa_reference_model = None
20-
2115
"""
2216
This file contains the pytest hooks, fixtures etc. for the Arm test suite.
2317
"""
@@ -50,10 +44,11 @@ def pytest_configure(config):
5044
if getattr(config.option, "fast_fvp", False):
5145
pytest._test_options["fast_fvp"] = config.option.fast_fvp # type: ignore[attr-defined]
5246

53-
# TODO: remove this flag once we have a way to run the reference model tests with Buck
54-
pytest._test_options["tosa_ref_model"] = False # type: ignore[attr-defined]
55-
if tosa_reference_model is not None:
56-
pytest._test_options["tosa_ref_model"] = True # type: ignore[attr-defined]
47+
if config.option.arm_run_tosa_version:
48+
pytest._test_options["tosa_version"] = config.option.arm_run_tosa_version
49+
50+
pytest._test_options["tosa_ref_model"] = True # type: ignore[attr-defined]
51+
5752
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
5853

5954

@@ -76,6 +71,7 @@ def try_addoption(*args, **kwargs):
7671
nargs="+",
7772
help="List of two files. Firstly .pt file. Secondly .json",
7873
)
74+
try_addoption("--arm_run_tosa_version", action="store", default="0.80")
7975

8076

8177
def pytest_sessionstart(session):

backends/arm/test/models/test_llama.py

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -33,27 +33,35 @@
3333
class TestLlama(unittest.TestCase):
3434
"""
3535
Test class of Llama models. Type of Llama model depends on command line parameters:
36-
--llama_inputs <path to .pt file> <path to json file>
37-
Example: --llama_inputs stories110M/stories110M.pt stories110M/params.json
36+
--llama_inputs <path to .pt file> <path to json file> <name of model variant>
37+
Example: --llama_inputs stories110M/stories110M.pt stories110M/params.json stories110m
38+
For more examples and info see examples/models/llama/README.md.
3839
"""
3940

4041
def prepare_model(self):
4142

4243
checkpoint = None
4344
params_file = None
45+
usage = "To run use --llama_inputs <.pt/.pth> <.json> <name>"
46+
4447
if conftest.is_option_enabled("llama_inputs"):
4548
param_list = conftest.get_option("llama_inputs")
46-
assert (
47-
isinstance(param_list, list) and len(param_list) == 2
48-
), "invalid number of inputs for --llama_inputs"
49+
50+
if not isinstance(param_list, list) or len(param_list) != 3:
51+
raise RuntimeError(
52+
f"Invalid number of inputs for --llama_inputs. {usage}"
53+
)
54+
if not all(isinstance(param, str) for param in param_list):
55+
raise RuntimeError(
56+
f"All --llama_inputs are expected to be strings. {usage}"
57+
)
58+
4959
checkpoint = param_list[0]
5060
params_file = param_list[1]
51-
assert isinstance(checkpoint, str) and isinstance(
52-
params_file, str
53-
), "invalid input for --llama_inputs"
61+
model_name = param_list[2]
5462
else:
5563
logger.warning(
56-
"Skipping Llama test because of lack of input. To run use --llama_inputs <.pt> <.json>"
64+
"Skipping Llama tests because of missing --llama_inputs. {usage}"
5765
)
5866
return None, None, None
5967

@@ -71,7 +79,7 @@ def prepare_model(self):
7179
"-p",
7280
params_file,
7381
"--model",
74-
"stories110m",
82+
model_name,
7583
]
7684
parser = build_args_parser()
7785
args = parser.parse_args(args)
@@ -122,6 +130,7 @@ def test_llama_tosa_BI(self):
122130
.quantize()
123131
.export()
124132
.to_edge_transform_and_lower()
133+
.check_count({"torch.ops.higher_order.executorch_call_delegate": 1})
125134
.to_executorch()
126135
.run_method_and_compare_outputs(
127136
inputs=llama_inputs,

0 commit comments

Comments
 (0)