Skip to content

Commit c75c614

Browse files
authored
Fix bug for backward vlog(#75590)
1 parent 74f65f4 commit c75c614

File tree

1 file changed

+10
-6
lines changed

1 file changed

+10
-6
lines changed

paddle/fluid/eager/backward.cc

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -508,7 +508,8 @@ std::vector<paddle::Tensor> RunBackward(
508508
auto* next_node = next_node_shared.get();
509509

510510
// Construct backward graph for debug
511-
if (need_debug_backward_graph) {
511+
if (need_debug_backward_graph && grad_output_tensor.defined() &&
512+
grad_output_tensor.has_allocation()) {
512513
std::string dot_next_node_label = CreateNodeLabelInDot(next_node);
513514
if (!dot.ContainsNode(dot_next_node_label)) {
514515
if (next_node->name() == "GradNodeAccumulation") {
@@ -539,11 +540,14 @@ std::vector<paddle::Tensor> RunBackward(
539540

540541
VLOG(7) << "RunBackward: Sum or Move grad inputs for edge slot: "
541542
<< edge_rank.first << ", rank: " << edge_rank.second;
542-
VLOG(6) << "RunBackward: Add grad_output_tensor to GradTensorHolder, "
543-
"grad_output_tensor info "
544-
<< grad_output_tensor.place() << ","
545-
<< grad_output_tensor.dtype() << ", ("
546-
<< grad_output_tensor.dims() << ")";
543+
VLOG_IF(6,
544+
grad_output_tensor.defined() &&
545+
grad_output_tensor.has_allocation())
546+
<< "RunBackward: Add grad_output_tensor to GradTensorHolder, "
547+
<< "grad_output_tensor info " << grad_output_tensor.place() << ","
548+
<< grad_output_tensor.dtype() << ", ("
549+
<< grad_output_tensor.dims() << ")";
550+
547551
node_input_buffers_dict[next_node]->add(edge_rank.first,
548552
edge_rank.second,
549553
grad_output_tensor,

0 commit comments

Comments
 (0)