Home
last modified time | relevance | path

Searched refs:activation_type (Results 1 – 17 of 17) sorted by relevance

/external/XNNPACK/tools/
Dgenerate-vbinary-test.py57 activation_type = match.group(4)
58 if activation_type is None:
59 activation_type = "LINEAR"
61 activation_type = activation_type.upper()
64 return op_type, activation_type, batch_tile, arch, isa
257 def generate_test_cases(ukernel, op_type, activation_type, tester, batch_tile, isa): argument
287 "ACTIVATION_TYPE": activation_type,
336 op_type, activation_type, batch_tile, arch, isa = split_ukernel_name(name)
341 test_case = generate_test_cases(name, op_type, activation_type,
/external/tensorflow/tensorflow/lite/delegates/nnapi/
Dnnapi_delegate_nnapi_failure_handling_test.cc57 ActivationFunctionType activation_type, in AddSubOpsAcceleratedModel() argument
66 Init(input1, input2, input3, output, activation_type, in AddSubOpsAcceleratedModel()
88 ActivationFunctionType activation_type, in Init() argument
96 CreateAddOptions(builder_, activation_type).Union(), in Init()
99 CreateSubOptions(builder_, activation_type).Union(), in Init()
Dnnapi_delegate_errno_test.cc54 ActivationFunctionType activation_type, in FloatAddOpModel() argument
57 Init(input1, input2, output, activation_type, allow_fp32_relax_to_fp16); in FloatAddOpModel()
73 const TensorData& output, ActivationFunctionType activation_type, in Init() argument
79 CreateAddOptions(builder_, activation_type).Union()); in Init()
Dnnapi_delegate_device_selection_test.cc46 const TensorData& output, ActivationFunctionType activation_type, in Init() argument
55 CreateAddOptions(builder_, activation_type).Union()); in Init()
376 ActivationFunctionType activation_type, in AddSubOpsAcceleratedModel() argument
382 Init(input1, input2, input3, output, activation_type, in AddSubOpsAcceleratedModel()
402 ActivationFunctionType activation_type, in Init() argument
410 CreateAddOptions(builder_, activation_type).Union(), in Init()
413 CreateSubOptions(builder_, activation_type).Union(), in Init()
564 ActivationFunctionType activation_type, in HardSwishAddOpsAcceleratedModel() argument
570 Init(input1, input2, output, activation_type, allow_fp32_relax_to_fp16); in HardSwishAddOpsAcceleratedModel()
586 const TensorData& output, ActivationFunctionType activation_type, in Init() argument
[all …]
Dnnapi_delegate_test.cc156 ActivationFunctionType activation_type, in FloatAddOpModel() argument
158 Init(input1, input2, output, activation_type, allow_fp32_relax_to_fp16); in FloatAddOpModel()
164 ActivationFunctionType activation_type, in FloatAddOpModel() argument
167 Init(input1, input2, output, activation_type, allow_fp32_relax_to_fp16); in FloatAddOpModel()
183 const TensorData& output, ActivationFunctionType activation_type, in Init() argument
189 CreateAddOptions(builder_, activation_type).Union()); in Init()
492 ActivationFunctionType activation_type) { in FloatMulOpModel() argument
497 CreateMulOptions(builder_, activation_type).Union()); in FloatMulOpModel()
1512 ActivationFunctionType activation_type) { in L2NormOpModel() argument
1516 CreateL2NormOptions(builder_, activation_type).Union()); in L2NormOpModel()
[all …]
/external/tensorflow/tensorflow/core/kernels/
Dconv_ops_test.cc635 const Tensor& bias_data, const string& activation_type, in RunConv2DWithBiasAndActivation() argument
651 if (activation_type == "Relu") { in RunConv2DWithBiasAndActivation()
653 } else if (activation_type == "Relu6") { in RunConv2DWithBiasAndActivation()
655 } else if (activation_type == "Elu") { in RunConv2DWithBiasAndActivation()
657 } else if (activation_type == "LeakyRelu") { in RunConv2DWithBiasAndActivation()
699 const string& activation_type, const std::string& padding, in RunConv2DWithBatchNormAndActivation() argument
722 if (activation_type == "Relu") { in RunConv2DWithBatchNormAndActivation()
724 } else if (activation_type == "Relu6") { in RunConv2DWithBatchNormAndActivation()
726 } else if (activation_type == "Elu") { in RunConv2DWithBatchNormAndActivation()
728 } else if (activation_type == "LeakyRelu") { in RunConv2DWithBatchNormAndActivation()
Dmatmul_op_test.cc121 bool transpose_a, bool transpose_b, const string& activation_type, in RunMatMulWithBiasAndActivation() argument
135 if (activation_type == "Relu") { in RunMatMulWithBiasAndActivation()
137 } else if (activation_type == "Relu6") { in RunMatMulWithBiasAndActivation()
139 } else if (activation_type == "Elu") { in RunMatMulWithBiasAndActivation()
141 } else if (activation_type == "LeakyRelu") { in RunMatMulWithBiasAndActivation()
Dconv_ops_benchmark_test.cc135 int out_depth, const string& activation_type, in Conv2DWithBiasAndActivation() argument
146 TF_CHECK_OK(NodeBuilder(graph->NewName("activation"), activation_type) in Conv2DWithBiasAndActivation()
195 int out_depth, const string& activation_type, in Conv2DWithBatchNormAndActivation() argument
206 TF_CHECK_OK(NodeBuilder(graph->NewName("activation"), activation_type) in Conv2DWithBatchNormAndActivation()
/external/tensorflow/tensorflow/lite/kernels/
Dl2norm_test.cc35 const ActivationFunctionType activation_type) { in L2NormOpModel() argument
50 CreateL2NormOptions(builder_, activation_type).Union()); in L2NormOpModel()
Dadd_test.cc34 ActivationFunctionType activation_type) { in BaseAddOpModel() argument
39 CreateAddOptions(builder_, activation_type).Union()); in BaseAddOpModel()
69 ActivationFunctionType activation_type) in QuantizedAddOpModel() argument
73 activation_type) {} in QuantizedAddOpModel()
Dsub_test.cc34 ActivationFunctionType activation_type) { in BaseSubOpModel() argument
39 CreateSubOptions(builder_, activation_type).Union()); in BaseSubOpModel()
76 ActivationFunctionType activation_type) in QuantizedSubOpModel() argument
80 activation_type) {} in QuantizedSubOpModel()
Ddiv_test.cc34 ActivationFunctionType activation_type) { in BaseDivOpModel() argument
39 CreateDivOptions(builder_, activation_type).Union()); in BaseDivOpModel()
Dmul_test.cc35 ActivationFunctionType activation_type) { in BaseMulOpModel() argument
40 CreateMulOptions(builder_, activation_type).Union()); in BaseMulOpModel()
/external/tensorflow/tensorflow/lite/micro/kernels/
Ddiv_test.cc32 ActivationFunctionType activation_type) { in BaseDivOpModel() argument
37 CreateDivOptions(builder_, activation_type).Union()); in BaseDivOpModel()
/external/tensorflow/tensorflow/lite/kernels/internal/
Dtypes.h769 FusedActivationFunctionType activation_type; member
/external/tensorflow/tensorflow/lite/micro/kernels/ceva/
Dtypes.h918 FusedActivationFunctionType activation_type;
/external/tensorflow/tensorflow/compiler/tf2tensorrt/convert/
Dconvert_nodes.cc1797 auto activation_type = static_cast<const nvinfer1::IActivationLayer*>(layer) in IsClipOrRelu() local
1800 return activation_type == nvinfer1::ActivationType::kRELU || in IsClipOrRelu()
1801 activation_type == nvinfer1::ActivationType::kCLIP; in IsClipOrRelu()
1803 return activation_type == nvinfer1::ActivationType::kRELU; in IsClipOrRelu()