Searched refs:has_scaling (Results 1 – 2 of 2) sorted by relevance
/external/tensorflow/tensorflow/contrib/quantize/python/ |
D | fold_batch_norms_test.py | 74 def _TestFoldConv2d(self, relu, relu_op_name, with_bypass, has_scaling, argument 115 scale=has_scaling, 131 scale=has_scaling, fused=fused_batch_norm), 145 self._BatchNormMultiplierName(conv_name, has_scaling, fused_batch_norm) 175 has_scaling=True, argument 211 layer1, scale=has_scaling, fused=fused_batch_norm, scope='layer1') 222 scale=has_scaling, fused=fused_batch_norm), 226 layer2, scale=has_scaling, fused=fused_batch_norm, scope='layer2') 237 self._BatchNormMultiplierName(scope, has_scaling, fused_batch_norm) 266 has_scaling, argument [all …]
|
D | fold_batch_norms.py | 520 has_scaling = _HasScaling(graph, input_to_ops_map, bn) 529 has_scaling=has_scaling, 613 def _GetBatchNormParams(graph, context, has_scaling): argument 692 if has_scaling: 695 if not has_scaling: 716 def _CreateFoldedOp(graph, context, has_scaling, freeze_batch_norm_delay, argument 742 mul_scale_name = 'mul_1' if has_scaling else 'mul' 753 graph=graph, context=context, has_scaling=has_scaling) 767 scale_name = 'mul' if has_scaling else 'Rsqrt'
|