Skip to content

Commit

Permalink
Dont clone inputs if using fake tensor (pytorch#88208)
Browse files Browse the repository at this point in the history
Not sure that this will really reduce memory use but it is an extraneous copy in our stack right now.
Pull Request resolved: pytorch#88208
Approved by: https://github.com/anijain2305
  • Loading branch information
eellison authored and pytorchmergebot committed Nov 3, 2022
1 parent 192e806 commit 60925fc
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions torch/_dynamo/optimizations/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,6 @@ def has_mutation(gm, example_inputs, inputs_only=False):
true, we only check for mutation of inputs"""
# TODO - moco gives bad accuracy with Aliasing. gm is getting mutated in a bad way.

# Clone the inputs such that intermediate tensors (not leaf tensors) with
# requires_grad to True are now converted to False to avoid Runtime Error
# like "leaf variable that requires grad is inplace modified"
example_inputs = clone_inputs(example_inputs)
if fake_tensors_available and config.fake_tensor_propagation:
with FakeTensorMode() as fake_mode:
pass
Expand All @@ -134,6 +130,10 @@ def has_mutation(gm, example_inputs, inputs_only=False):
with fake_mode.restore() if hasattr(fake_mode, "restore") else fake_mode:
ShapeAliasingAndMutationProp(new_gm).run(*example_inputs)
else:
# Clone the inputs such that intermediate tensors (not leaf tensors) with
# requires_grad to True are now converted to False to avoid Runtime Error
# like "leaf variable that requires grad is inplace modified"
example_inputs = clone_inputs(example_inputs)
new_gm = copy.deepcopy(gm)
example_inputs = copy.deepcopy(example_inputs)
ShapeAliasingAndMutationProp(new_gm).run(*example_inputs)
Expand Down

0 comments on commit 60925fc

Please sign in to comment.