File tree Expand file tree Collapse file tree 1 file changed +3
-0
lines changed
python/paddle/distributed/fleet/meta_parallel/pp_utils Expand file tree Collapse file tree 1 file changed +3
-0
lines changed Original file line number Diff line number Diff line change 20
20
from paddle import _legacy_C_ops
21
21
from paddle .distributed .parallel import _split_tensors
22
22
from paddle .fluid import core
23
+ from paddle .framework import base as imperative_base
23
24
24
25
__all__ = []
25
26
@@ -165,6 +166,7 @@ def add_grad(self, param):
165
166
if self ._all_params_checked_in :
166
167
self ._fused_allreduce_grads ()
167
168
169
+ @imperative_base .no_grad
168
170
def _fused_allreduce_grads (self ):
169
171
assert self ._all_params_checked_in
170
172
flattened_vars = []
@@ -188,6 +190,7 @@ def _fused_allreduce_grads(self):
188
190
)
189
191
)
190
192
193
+ @imperative_base .no_grad
191
194
def scale_and_split_grads (self ):
192
195
for task in self ._tasks :
193
196
task .wait ()
You can’t perform that action at this time.
0 commit comments