Searched refs:activation_grad (Results 1 – 4 of 4) sorted by relevance
186 Output activation_grad; in RunFusedBatchNorm() local188 activation_grad = ops::internal::ReluGrad( in RunFusedBatchNorm()191 activation_grad = in RunFusedBatchNorm()196 root.WithOpName("fused_batch_norm_grad"), activation_grad, input, scale, in RunFusedBatchNorm()282 NodeDef activation_grad; in RunFusedBatchNormEx() local311 .Finalize(&activation_grad)); in RunFusedBatchNormEx()316 .Finalize(&activation_grad)); in RunFusedBatchNormEx()320 .Input({activation_grad.name(), 0, t_dtype}) in RunFusedBatchNormEx()332 add_nodes = {&fused_batch_norm_ex, &activation_grad, in RunFusedBatchNormEx()
128 int activation_grad = kMissingIndex; member1864 matched->activation_grad = regular_fanin_0.node_index(); in FindFusedBatchNormGradEx()1893 matched->activation_grad = regular_fanin_0.node_index(); in FindFusedBatchNormGradEx()2840 const NodeDef& activation_grad = graph->node(matched.activation_grad); in AddFusedBatchNormGradExNode() local2844 VLOG(2) << "Fuse FusedBatchNormGrad with " << activation_grad.op() << ": " in AddFusedBatchNormGradExNode()2850 << " activation=" << activation_grad.name() in AddFusedBatchNormGradExNode()2858 fused_op.add_input(activation_grad.input(0)); // 0: y_backprop in AddFusedBatchNormGradExNode()2865 fused_op.add_input(activation_grad.input(1)); // 7: y in AddFusedBatchNormGradExNode()2881 identity_op.set_name(activation_grad.name()); in AddFusedBatchNormGradExNode()2898 (*invalidated_nodes)[matched.activation_grad] = true; in AddFusedBatchNormGradExNode()[all …]
580 activation_grad = (583 return dy * activation_grad
319 activation_grad = (321 return grad * activation_grad