/external/XNNPACK/tools/ |
D | generate-vbinary-test.py | 57 activation_type = match.group(4) 58 if activation_type is None: 59 activation_type = "LINEAR" 61 activation_type = activation_type.upper() 64 return op_type, activation_type, batch_tile, arch, isa 257 def generate_test_cases(ukernel, op_type, activation_type, tester, batch_tile, isa): argument 287 "ACTIVATION_TYPE": activation_type, 336 op_type, activation_type, batch_tile, arch, isa = split_ukernel_name(name) 341 test_case = generate_test_cases(name, op_type, activation_type,
|
/external/tensorflow/tensorflow/lite/delegates/nnapi/ |
D | nnapi_delegate_nnapi_failure_handling_test.cc | 57 ActivationFunctionType activation_type, in AddSubOpsAcceleratedModel() argument 66 Init(input1, input2, input3, output, activation_type, in AddSubOpsAcceleratedModel() 88 ActivationFunctionType activation_type, in Init() argument 96 CreateAddOptions(builder_, activation_type).Union(), in Init() 99 CreateSubOptions(builder_, activation_type).Union(), in Init()
|
D | nnapi_delegate_errno_test.cc | 54 ActivationFunctionType activation_type, in FloatAddOpModel() argument 57 Init(input1, input2, output, activation_type, allow_fp32_relax_to_fp16); in FloatAddOpModel() 73 const TensorData& output, ActivationFunctionType activation_type, in Init() argument 79 CreateAddOptions(builder_, activation_type).Union()); in Init()
|
D | nnapi_delegate_device_selection_test.cc | 46 const TensorData& output, ActivationFunctionType activation_type, in Init() argument 55 CreateAddOptions(builder_, activation_type).Union()); in Init() 376 ActivationFunctionType activation_type, in AddSubOpsAcceleratedModel() argument 382 Init(input1, input2, input3, output, activation_type, in AddSubOpsAcceleratedModel() 402 ActivationFunctionType activation_type, in Init() argument 410 CreateAddOptions(builder_, activation_type).Union(), in Init() 413 CreateSubOptions(builder_, activation_type).Union(), in Init() 564 ActivationFunctionType activation_type, in HardSwishAddOpsAcceleratedModel() argument 570 Init(input1, input2, output, activation_type, allow_fp32_relax_to_fp16); in HardSwishAddOpsAcceleratedModel() 586 const TensorData& output, ActivationFunctionType activation_type, in Init() argument [all …]
|
D | nnapi_delegate_test.cc | 156 ActivationFunctionType activation_type, in FloatAddOpModel() argument 158 Init(input1, input2, output, activation_type, allow_fp32_relax_to_fp16); in FloatAddOpModel() 164 ActivationFunctionType activation_type, in FloatAddOpModel() argument 167 Init(input1, input2, output, activation_type, allow_fp32_relax_to_fp16); in FloatAddOpModel() 183 const TensorData& output, ActivationFunctionType activation_type, in Init() argument 189 CreateAddOptions(builder_, activation_type).Union()); in Init() 492 ActivationFunctionType activation_type) { in FloatMulOpModel() argument 497 CreateMulOptions(builder_, activation_type).Union()); in FloatMulOpModel() 1512 ActivationFunctionType activation_type) { in L2NormOpModel() argument 1516 CreateL2NormOptions(builder_, activation_type).Union()); in L2NormOpModel() [all …]
|
/external/tensorflow/tensorflow/core/kernels/ |
D | conv_ops_test.cc | 635 const Tensor& bias_data, const string& activation_type, in RunConv2DWithBiasAndActivation() argument 651 if (activation_type == "Relu") { in RunConv2DWithBiasAndActivation() 653 } else if (activation_type == "Relu6") { in RunConv2DWithBiasAndActivation() 655 } else if (activation_type == "Elu") { in RunConv2DWithBiasAndActivation() 657 } else if (activation_type == "LeakyRelu") { in RunConv2DWithBiasAndActivation() 699 const string& activation_type, const std::string& padding, in RunConv2DWithBatchNormAndActivation() argument 722 if (activation_type == "Relu") { in RunConv2DWithBatchNormAndActivation() 724 } else if (activation_type == "Relu6") { in RunConv2DWithBatchNormAndActivation() 726 } else if (activation_type == "Elu") { in RunConv2DWithBatchNormAndActivation() 728 } else if (activation_type == "LeakyRelu") { in RunConv2DWithBatchNormAndActivation()
|
D | matmul_op_test.cc | 121 bool transpose_a, bool transpose_b, const string& activation_type, in RunMatMulWithBiasAndActivation() argument 135 if (activation_type == "Relu") { in RunMatMulWithBiasAndActivation() 137 } else if (activation_type == "Relu6") { in RunMatMulWithBiasAndActivation() 139 } else if (activation_type == "Elu") { in RunMatMulWithBiasAndActivation() 141 } else if (activation_type == "LeakyRelu") { in RunMatMulWithBiasAndActivation()
|
D | conv_ops_benchmark_test.cc | 135 int out_depth, const string& activation_type, in Conv2DWithBiasAndActivation() argument 146 TF_CHECK_OK(NodeBuilder(graph->NewName("activation"), activation_type) in Conv2DWithBiasAndActivation() 195 int out_depth, const string& activation_type, in Conv2DWithBatchNormAndActivation() argument 206 TF_CHECK_OK(NodeBuilder(graph->NewName("activation"), activation_type) in Conv2DWithBatchNormAndActivation()
|
/external/tensorflow/tensorflow/lite/kernels/ |
D | l2norm_test.cc | 35 const ActivationFunctionType activation_type) { in L2NormOpModel() argument 50 CreateL2NormOptions(builder_, activation_type).Union()); in L2NormOpModel()
|
D | add_test.cc | 34 ActivationFunctionType activation_type) { in BaseAddOpModel() argument 39 CreateAddOptions(builder_, activation_type).Union()); in BaseAddOpModel() 69 ActivationFunctionType activation_type) in QuantizedAddOpModel() argument 73 activation_type) {} in QuantizedAddOpModel()
|
D | sub_test.cc | 34 ActivationFunctionType activation_type) { in BaseSubOpModel() argument 39 CreateSubOptions(builder_, activation_type).Union()); in BaseSubOpModel() 76 ActivationFunctionType activation_type) in QuantizedSubOpModel() argument 80 activation_type) {} in QuantizedSubOpModel()
|
D | div_test.cc | 34 ActivationFunctionType activation_type) { in BaseDivOpModel() argument 39 CreateDivOptions(builder_, activation_type).Union()); in BaseDivOpModel()
|
D | mul_test.cc | 35 ActivationFunctionType activation_type) { in BaseMulOpModel() argument 40 CreateMulOptions(builder_, activation_type).Union()); in BaseMulOpModel()
|
/external/tensorflow/tensorflow/lite/micro/kernels/ |
D | div_test.cc | 32 ActivationFunctionType activation_type) { in BaseDivOpModel() argument 37 CreateDivOptions(builder_, activation_type).Union()); in BaseDivOpModel()
|
/external/tensorflow/tensorflow/lite/kernels/internal/ |
D | types.h | 769 FusedActivationFunctionType activation_type; member
|
/external/tensorflow/tensorflow/lite/micro/kernels/ceva/ |
D | types.h | 918 FusedActivationFunctionType activation_type;
|
/external/tensorflow/tensorflow/compiler/tf2tensorrt/convert/ |
D | convert_nodes.cc | 1797 auto activation_type = static_cast<const nvinfer1::IActivationLayer*>(layer) in IsClipOrRelu() local 1800 return activation_type == nvinfer1::ActivationType::kRELU || in IsClipOrRelu() 1801 activation_type == nvinfer1::ActivationType::kCLIP; in IsClipOrRelu() 1803 return activation_type == nvinfer1::ActivationType::kRELU; in IsClipOrRelu()
|