/external/libaom/libaom/av1/encoder/ |
D | tx_prune_model_weights.h | 65 RELU, // activation 129 RELU, // activation 195 RELU, // activation 279 RELU, // activation 366 RELU, // activation 430 RELU, // activation 516 RELU, // activation 600 RELU, // activation 686 RELU, // activation 770 RELU, // activation [all …]
|
D | partition_cnn_weights.h | 1023 RELU, // activation 1043 RELU, // activation 1063 RELU, // activation 1083 RELU, // activation 1103 RELU, // activation
|
/external/tensorflow/tensorflow/lite/delegates/gpu/gl/kernels/ |
D | relu_test.cc | 46 OperationType op_type = OperationType::RELU; in TEST_F() 59 OperationType op_type = OperationType::RELU; in TEST_F() 72 OperationType op_type = OperationType::RELU; in TEST_F() 85 OperationType op_type = OperationType::RELU; in TEST_F()
|
/external/XNNPACK/src/f32-vbinary/ |
D | vopc-scalar.c.in | 9 $assert ACTIVATION in ["LINEAR", "MINMAX", "RELU"] 30 $SUFFIX = {"LINEAR": "", "RELU": "_relu", "MINMAX": "_minmax"}[ACTIVATION] 31 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 69 $elif ACTIVATION == "RELU": 87 $elif ACTIVATION == "RELU": 100 $elif ACTIVATION == "RELU": 113 $elif ACTIVATION == "RELU":
|
D | vop-scalar.c.in | 9 $assert ACTIVATION in ["LINEAR", "MINMAX", "RELU"] 28 $SUFFIX = {"LINEAR": "", "RELU": "_relu", "MINMAX": "_minmax"}[ACTIVATION] 29 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 70 $elif ACTIVATION == "RELU": 89 $elif ACTIVATION == "RELU": 103 $elif ACTIVATION == "RELU": 117 $elif ACTIVATION == "RELU":
|
D | vopc-wasmsimd.c.in | 10 $assert ACTIVATION in ["LINEAR", "MINMAX", "RELU"] 30 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 31 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] and OP not in ["MIN", "MAX"] else "_x86" if X… 33 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 50 $elif ACTIVATION == "RELU": 96 $elif ACTIVATION == "RELU": 130 $elif ACTIVATION == "RELU": 159 $elif ACTIVATION == "RELU":
|
D | vop-wasmsimd.c.in | 10 $assert ACTIVATION in ["LINEAR", "MINMAX", "RELU"] 28 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] and OP not in ["MIN", "MAX"] else "_x86" if X… 30 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 47 $elif ACTIVATION == "RELU": 98 $elif ACTIVATION == "RELU": 135 $elif ACTIVATION == "RELU": 165 $elif ACTIVATION == "RELU":
|
/external/XNNPACK/scripts/ |
D | generate-f32-vbinary.sh | 26 …ary/vop-scalar.c.in -D OP=ADD -D BATCH_TILE=1 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 27 …ary/vop-scalar.c.in -D OP=ADD -D BATCH_TILE=2 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 28 …ary/vop-scalar.c.in -D OP=ADD -D BATCH_TILE=4 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 29 …ary/vop-scalar.c.in -D OP=ADD -D BATCH_TILE=8 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 30 …ary/vop-scalar.c.in -D OP=DIV -D BATCH_TILE=1 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 31 …ary/vop-scalar.c.in -D OP=DIV -D BATCH_TILE=2 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 32 …ary/vop-scalar.c.in -D OP=DIV -D BATCH_TILE=4 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 33 …ary/vop-scalar.c.in -D OP=DIV -D BATCH_TILE=8 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 34 …ary/vop-scalar.c.in -D OP=MUL -D BATCH_TILE=1 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… 35 …ary/vop-scalar.c.in -D OP=MUL -D BATCH_TILE=2 -D WASM=0 -D ACTIVATION=RELU -o src/f32-vbinary/… [all …]
|
/external/tensorflow/tensorflow/compiler/mlir/lite/tests/ |
D | raise-custom-ops.mlir | 8 …%2 = "tf.MyCustomOp"(%1, %0) {fused_activation_function = "RELU", int_attr = 2 : i32} : (tensor<4… 10 …"tf.MyCustomOp"(%1, %0) {fused_activation_function = "RELU", int_attr = 2 : i32} : (tensor<4xf32>… 16 …:.*]] = "tf.MyCustomOp"(%[[MUL]], %[[CST]]) {fused_activation_function = "RELU", int_attr = 2 : i3…
|
/external/XNNPACK/src/f32-dwconv/ |
D | up-wasmsimd.c.in | 17 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 19 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 20 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 39 $elif ACTIVATION == "RELU": 96 $elif ACTIVATION == "RELU": 140 $elif ACTIVATION == "RELU": 175 $elif ACTIVATION == "RELU":
|
/external/tensorflow/tensorflow/compiler/mlir/lite/tests/flatbuffer2mlir/ |
D | basic_lstm.mlir | 6 …tm"({{.*}}) {cell_clip = 1.000000e+00 : f32, fused_activation_function = "RELU", kernel_type = "BA… 8 …%0:4 = "tfl.basic_lstm"(%arg0, %arg1, %arg2, %arg3, %arg4) {fused_activation_function = "RELU", ce…
|
/external/tensorflow/tensorflow/lite/delegates/xnnpack/ |
D | README.md | 156 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, 163 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, 175 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, 188 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, 194 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, 206 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, 229 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, 253 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, 274 ### `RELU` 324 * Fused `NONE`, `RELU`, `RELU_N1_TO_1`, and `RELU6` activations are supported, [all …]
|
/external/XNNPACK/src/f32-gemm/ |
D | scalar.c.in | 12 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 16 $SUFFIX = {"LINEAR": "", "RELU": "_relu", "MINMAX": "_minmax"}[ACTIVATION] 17 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 105 $elif ACTIVATION == "RELU":
|
D | MRx2c4-wasmsimd.c.in | 15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 133 $elif ACTIVATION == "RELU":
|
D | wasmsimd-loadsplat.c.in | 15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 119 $elif ACTIVATION == "RELU":
|
D | wasmsimd-splat.c.in | 15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 140 $elif ACTIVATION == "RELU":
|
D | wasmsimd-s4.c.in | 15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 142 $elif ACTIVATION == "RELU":
|
/external/XNNPACK/src/f32-igemm/ |
D | scalar.c.in | 12 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 15 $SUFFIX = {"LINEAR": "", "RELU": "_relu", "MINMAX": "_minmax"}[ACTIVATION] 16 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 106 $elif ACTIVATION == "RELU":
|
D | MRx2c4-wasmsimd.c.in | 15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 144 $elif ACTIVATION == "RELU":
|
D | wasmsimd-loadsplat.c.in | 15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 122 $elif ACTIVATION == "RELU":
|
D | wasmsimd-splat.c.in | 15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 142 $elif ACTIVATION == "RELU":
|
D | wasmsimd-s4.c.in | 15 $assert ACTIVATION in ["LINEAR", "RELU", "MINMAX"] 17 $ARCH_SUFFIX = "" if ACTIVATION in ["LINEAR", "RELU"] else "_x86" if X86 else "_arm" 18 $PARAMS = {"LINEAR": "xnn_f32_default_params", "RELU": "xnn_f32_relu_params", "MINMAX": "xnn_f32_mi… 145 $elif ACTIVATION == "RELU":
|
/external/tensorflow/tensorflow/core/util/ |
D | activation_mode.h | 33 RELU = 2, enumerator
|
D | activation_mode.cc | 30 *value = RELU; in GetActivationModeFromString()
|
/external/tensorflow/tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/ |
D | svdf.mlir | 63 // CHECK-NEXT: fused_activation_function: RELU 96 …%1 = "tfl.svdf"(%arg0, %arg1, %arg2, %arg3, %0) {fused_activation_function = "RELU", rank = 2 : i3…
|