Home
last modified time | relevance | path

Searched refs:RELU (Results 1 – 25 of 62) sorted by relevance

123

/external/libaom/libaom/av1/encoder/
Dtx_prune_model_weights.h65 RELU, // activation
129 RELU, // activation
195 RELU, // activation
279 RELU, // activation
366 RELU, // activation
430 RELU, // activation
516 RELU, // activation
600 RELU, // activation
686 RELU, // activation
770 RELU, // activation
[all …]
Dpartition_cnn_weights.h1023 RELU, // activation
1043 RELU, // activation
1063 RELU, // activation
1083 RELU, // activation
1103 RELU, // activation
/external/tensorflow/tensorflow/lite/delegates/gpu/gl/kernels/
Drelu_test.cc46 OperationType op_type = OperationType::RELU; in TEST_F()
59 OperationType op_type = OperationType::RELU; in TEST_F()
72 OperationType op_type = OperationType::RELU; in TEST_F()
85 OperationType op_type = OperationType::RELU; in TEST_F()
/external/XNNPACK/src/f32-vbinary/
Dvopc-scalar.c.in9 $assert ACTIVATION in ["LINEAR", "MINMAX", "RELU"]
30 $SUFFIX = {"LINEAR": "", "RELU": "_relu", "MINMAX": "_minmax"}[ACTIVATION]
31 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
69 $elif ACTIVATION == "RELU":
87 $elif ACTIVATION == "RELU":
100 $elif ACTIVATION == "RELU":
113 $elif ACTIVATION == "RELU":
Dvop-scalar.c.in9 $assert ACTIVATION in ["LINEAR", "MINMAX", "RELU"]
28 $SUFFIX = {"LINEAR": "", "RELU": "_relu", "MINMAX": "_minmax"}[ACTIVATION]
29 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
70 $elif ACTIVATION == "RELU":
89 $elif ACTIVATION == "RELU":
103 $elif ACTIVATION == "RELU":
117 $elif ACTIVATION == "RELU":
Dvopc-wasmsimd.c.in10 $assert ACTIVATION in ["LINEAR", "MINMAX", "RELU"]
30 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
31 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] and OP not in ["MIN", "MAX"] else "_x86" if X…
33 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
50 $elif ACTIVATION == "RELU":
96 $elif ACTIVATION == "RELU":
130 $elif ACTIVATION == "RELU":
159 $elif ACTIVATION == "RELU":
Dvop-wasmsimd.c.in10 $assert ACTIVATION in ["LINEAR", "MINMAX", "RELU"]
28 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] and OP not in ["MIN", "MAX"] else "_x86" if X…
30 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
47 $elif ACTIVATION == "RELU":
98 $elif ACTIVATION == "RELU":
135 $elif ACTIVATION == "RELU":
165 $elif ACTIVATION == "RELU":
/external/XNNPACK/scripts/
Dgenerate-f32-vbinary.sh26 …ary/vop-scalar.c.in -D OP=ADD -D BATCH_TILE=1 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
27 …ary/vop-scalar.c.in -D OP=ADD -D BATCH_TILE=2 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
28 …ary/vop-scalar.c.in -D OP=ADD -D BATCH_TILE=4 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
29 …ary/vop-scalar.c.in -D OP=ADD -D BATCH_TILE=8 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
30 …ary/vop-scalar.c.in -D OP=DIV -D BATCH_TILE=1 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
31 …ary/vop-scalar.c.in -D OP=DIV -D BATCH_TILE=2 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
32 …ary/vop-scalar.c.in -D OP=DIV -D BATCH_TILE=4 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
33 …ary/vop-scalar.c.in -D OP=DIV -D BATCH_TILE=8 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
34 …ary/vop-scalar.c.in -D OP=MUL -D BATCH_TILE=1 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
35 …ary/vop-scalar.c.in -D OP=MUL -D BATCH_TILE=2 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/…
[all …]
/external/tensorflow/tensorflow/compiler/mlir/lite/tests/
Draise-custom-ops.mlir8 …%2 = "tf.MyCustomOp"(%1, %0) {fused_activation_function = "RELU", int_attr = 2 : i32} : (tensor<4…
10 …"tf.MyCustomOp"(%1, %0) {fused_activation_function = "RELU", int_attr = 2 : i32} : (tensor<4xf32>…
16 …:.*]] = "tf.MyCustomOp"(%[[MUL]], %[[CST]]) {fused_activation_function = "RELU", int_attr = 2 : i3…
/external/XNNPACK/src/f32-dwconv/
Dup-wasmsimd.c.in17 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
19 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
20 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
39 $elif ACTIVATION == "RELU":
96 $elif ACTIVATION == "RELU":
140 $elif ACTIVATION == "RELU":
175 $elif ACTIVATION == "RELU":
/external/tensorflow/tensorflow/compiler/mlir/lite/tests/flatbuffer2mlir/
Dbasic_lstm.mlir6 …tm"({{.*}}) {cell_clip = 1.000000e+00 : f32, fused_activation_function = "RELU", kernel_type = "BA…
8 …%0:4 = "tfl.basic_lstm"(%arg0, %arg1, %arg2, %arg3, %arg4) {fused_activation_function = "RELU", ce…
/external/tensorflow/tensorflow/lite/delegates/xnnpack/
DREADME.md156 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
163 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
175 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
188 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
194 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
206 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
229 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
253 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
274 ### `RELU`
324 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported,
[all …]
/external/XNNPACK/src/f32-gemm/
Dscalar.c.in12 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
16 $SUFFIX = {"LINEAR": "", "RELU": "_relu", "MINMAX": "_minmax"}[ACTIVATION]
17 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
105 $elif ACTIVATION == "RELU":
DMRx2c4-wasmsimd.c.in15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
133 $elif ACTIVATION == "RELU":
Dwasmsimd-loadsplat.c.in15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
119 $elif ACTIVATION == "RELU":
Dwasmsimd-splat.c.in15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
140 $elif ACTIVATION == "RELU":
Dwasmsimd-s4.c.in15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
142 $elif ACTIVATION == "RELU":
/external/XNNPACK/src/f32-igemm/
Dscalar.c.in12 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
15 $SUFFIX = {"LINEAR": "", "RELU": "_relu", "MINMAX": "_minmax"}[ACTIVATION]
16 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
106 $elif ACTIVATION == "RELU":
DMRx2c4-wasmsimd.c.in15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
144 $elif ACTIVATION == "RELU":
Dwasmsimd-loadsplat.c.in15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
122 $elif ACTIVATION == "RELU":
Dwasmsimd-splat.c.in15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
142 $elif ACTIVATION == "RELU":
Dwasmsimd-s4.c.in15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"]
17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm"
18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi…
145 $elif ACTIVATION == "RELU":
/external/tensorflow/tensorflow/core/util/
Dactivation_mode.h33 RELU = 2, enumerator
Dactivation_mode.cc30 *value = RELU; in GetActivationModeFromString()
/external/tensorflow/tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/
Dsvdf.mlir63 // CHECK-NEXT: fused_activation_function: RELU
96 …%1 = "tfl.svdf"(%arg0, %arg1, %arg2, %arg3, %0) {fused_activation_function = "RELU", rank = 2 : i3…

123