Skip to content

Commit 5e83d26

Browse files
Rename optimized->optimize to match numpy/jax signature
1 parent 1f33753 commit 5e83d26

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

pytensor/tensor/einsum.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,12 +35,12 @@ class Einsum(OpFromGraph):
3535
Wrapper Op for Einsum graphs
3636
"""
3737

38-
__props__ = ("subscripts", "path", "optimized")
38+
__props__ = ("subscripts", "path", "optimize")
3939

40-
def __init__(self, *args, subscripts: str, path: str, optimized: bool, **kwargs):
40+
def __init__(self, *args, subscripts: str, path: str, optimize: bool, **kwargs):
4141
self.subscripts = subscripts
4242
self.path = path
43-
self.optimized = optimized
43+
self.optimize = optimize
4444
super().__init__(*args, **kwargs, strict=True)
4545

4646

@@ -223,7 +223,7 @@ def einsum(subscripts: str, *operands: "TensorLike") -> TensorVariable:
223223
shapes = [operand.type.shape for operand in operands]
224224

225225
if None in itertools.chain.from_iterable(shapes):
226-
# We mark optimized = False, even in cases where there is no ordering optimization to be done
226+
# We mark optimize = False, even in cases where there is no ordering optimization to be done
227227
# because the inner graph may have to accommodate dynamic shapes.
228228
# If those shapes become known later we will likely want to rebuild the Op (unless we inline it)
229229
if len(operands) == 1:
@@ -234,7 +234,7 @@ def einsum(subscripts: str, *operands: "TensorLike") -> TensorVariable:
234234
# We use (1,0) and not (0,1) because that's what opt_einsum tends to prefer, and so the Op signatures will match more often
235235
path = [(1, 0) for i in range(len(operands) - 1)]
236236
contraction_list = contraction_list_from_path(subscripts, operands, path)
237-
optimized = (
237+
optimize = (
238238
len(operands) <= 2
239239
) # If there are only 1 or 2 operands, there is no optimization to be done?
240240
else:
@@ -247,7 +247,7 @@ def einsum(subscripts: str, *operands: "TensorLike") -> TensorVariable:
247247
shapes=True,
248248
)
249249
path = [contraction[0] for contraction in contraction_list]
250-
optimized = True
250+
optimize = True
251251

252252
def sum_uniques(
253253
operand: TensorVariable, names: str, uniques: list[str]
@@ -412,6 +412,6 @@ def sum_repeats(
412412
inputs=list(operands),
413413
outputs=[einsum_result],
414414
path=tuple(path),
415-
optimized=optimized,
415+
optimize=optimize,
416416
)(*operands)
417417
return cast(TensorVariable, out)

tests/tensor/test_einsum.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def test_einsum_signatures(static_shape_known, signature):
127127
for name, static_shape in zip(ascii_lowercase, static_shapes)
128128
]
129129
out = pt.einsum(signature, *operands)
130-
assert out.owner.op.optimized == static_shape_known or len(operands) <= 2
130+
assert out.owner.op.optimize == static_shape_known or len(operands) <= 2
131131

132132
rng = np.random.default_rng(37)
133133
test_values = [rng.normal(size=shape) for shape in shapes]

0 commit comments

Comments
 (0)