@@ -707,7 +707,9 @@ bool FindContractionWithBiasAddAndAdd(const RemapperContext& ctx,
707
707
bool SharedInputWithMatMul (const RemapperContext& ctx, int node_index,
708
708
int node_dz) {
709
709
const auto * node_view = ctx.graph_view .GetNode (node_index);
710
+ if (node_view == nullptr ) return false ;
710
711
const auto * shared_input = node_view->GetRegularFanin (0 ).node_view ();
712
+ if (shared_input == nullptr ) return false ;
711
713
if (shared_input->node_index () == node_dz) {
712
714
shared_input = node_view->GetRegularFanin (1 ).node_view ();
713
715
}
@@ -733,6 +735,7 @@ bool SharedInputWithMatMul(const RemapperContext& ctx, int node_index,
733
735
bool FindContractionWithBiasAddGrad (const RemapperContext& ctx, int node_index,
734
736
ContractionWithBiasAddGrad* matched) {
735
737
const auto * node_view = ctx.graph_view .GetNode (node_index);
738
+ if (node_view == nullptr ) return false ;
736
739
// TODO(lyandy): Forward controls for patterns with control dependencies.
737
740
if (HasControlFaninOrFanout (*node_view)) return false ;
738
741
@@ -764,6 +767,7 @@ bool FindContractionWithBiasAddGrad(const RemapperContext& ctx, int node_index,
764
767
// the forward input with index 0.
765
768
766
769
const auto * dz = node_view->GetRegularFanin (0 ).node_view ();
770
+ if (dz == nullptr ) return false ;
767
771
// The node index for MatMulGradFilter if found.
768
772
int matmul_grad_filter_idx = -1 ;
769
773
@@ -1838,6 +1842,7 @@ Status AddBatchNormNodes(RemapperContext* ctx, const FusedBatchNorm& matched) {
1838
1842
#ifdef INTEL_MKL
1839
1843
bool IsConv2DWithAdd (const RemapperContext& ctx, int node_index) {
1840
1844
const auto * node_view = ctx.graph_view .GetNode (node_index);
1845
+ if (node_view == nullptr ) return false ;
1841
1846
const auto * node_def = node_view->node ();
1842
1847
1843
1848
// Candidate for Conv2D + Add or Conv2D + BiasAdd + Add fusion.
0 commit comments