Home
last modified time | relevance | path

Searched refs:vacc30 (Results 1 – 22 of 22) sorted by relevance

/external/XNNPACK/src/f32-gemm/gen/
D4x2-minmax-wasm.c68 float vacc30 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__wasm() local
88 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
100 vacc30 = __builtin_wasm_max_f32(vacc30, vmin); in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
109 vacc30 = __builtin_wasm_min_f32(vacc30, vmax); in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
113 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
134 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
D4x2-minmax-scalar.c68 float vacc30 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__scalar() local
88 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
100 vacc30 = math_max_f32(vacc30, vmin); in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
109 vacc30 = math_min_f32(vacc30, vmax); in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
113 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
134 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
D4x4-minmax-wasm.c74 float vacc30 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__wasm() local
104 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
124 vacc30 = __builtin_wasm_max_f32(vacc30, vmin); in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
141 vacc30 = __builtin_wasm_min_f32(vacc30, vmax); in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
147 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
176 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
178 vacc30 = vacc32; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
194 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
D4x4-minmax-scalar.c74 float vacc30 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__scalar() local
104 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
124 vacc30 = math_max_f32(vacc30, vmin); in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
141 vacc30 = math_min_f32(vacc30, vmax); in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
147 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
176 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
178 vacc30 = vacc32; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
194 c3[0] = vacc30; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
D4x4-relu-wasm.c72 float vacc30 = vacc00; in xnn_f32_gemm_relu_ukernel_4x4__wasm() local
102 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_relu_ukernel_4x4__wasm()
122 vacc30 = __builtin_wasm_max_f32(vacc30, 0.0f); in xnn_f32_gemm_relu_ukernel_4x4__wasm()
128 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
157 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
159 vacc30 = vacc32; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
175 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
D4x4-relu-scalar.c72 float vacc30 = vacc00; in xnn_f32_gemm_relu_ukernel_4x4__scalar() local
102 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_relu_ukernel_4x4__scalar()
122 vacc30 = math_max_f32(vacc30, 0.0f); in xnn_f32_gemm_relu_ukernel_4x4__scalar()
128 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
157 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
159 vacc30 = vacc32; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
175 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
D4x2-relu-scalar.c66 float vacc30 = vacc00; in xnn_f32_gemm_relu_ukernel_4x2__scalar() local
86 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
98 vacc30 = math_max_f32(vacc30, 0.0f); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
102 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x2__scalar()
123 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x2__scalar()
D4x2-relu-wasm.c66 float vacc30 = vacc00; in xnn_f32_gemm_relu_ukernel_4x2__wasm() local
86 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
98 vacc30 = __builtin_wasm_max_f32(vacc30, 0.0f); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
102 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x2__wasm()
123 c3[0] = vacc30; in xnn_f32_gemm_relu_ukernel_4x2__wasm()
D4x4-scalar.c72 float vacc30 = vacc00; in xnn_f32_gemm_ukernel_4x4__scalar() local
102 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_ukernel_4x4__scalar()
112 c3[0] = vacc30; in xnn_f32_gemm_ukernel_4x4__scalar()
141 c3[0] = vacc30; in xnn_f32_gemm_ukernel_4x4__scalar()
143 vacc30 = vacc32; in xnn_f32_gemm_ukernel_4x4__scalar()
159 c3[0] = vacc30; in xnn_f32_gemm_ukernel_4x4__scalar()
D4x2-scalar.c66 float vacc30 = vacc00; in xnn_f32_gemm_ukernel_4x2__scalar() local
86 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemm_ukernel_4x2__scalar()
94 c3[0] = vacc30; in xnn_f32_gemm_ukernel_4x2__scalar()
115 c3[0] = vacc30; in xnn_f32_gemm_ukernel_4x2__scalar()
/external/XNNPACK/src/f32-igemm/gen/
D4x2-minmax-scalar.c65 float vacc30 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__scalar() local
110 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
124 vacc30 = math_max_f32(vacc30, vmin); in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
133 vacc30 = math_min_f32(vacc30, vmax); in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
137 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
154 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
D4x2-minmax-wasm.c65 float vacc30 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__wasm() local
110 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
124 vacc30 = __builtin_wasm_max_f32(vacc30, vmin); in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
133 vacc30 = __builtin_wasm_min_f32(vacc30, vmax); in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
137 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
154 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
D4x4-minmax-wasm.c71 float vacc30 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x4__wasm() local
126 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
148 vacc30 = __builtin_wasm_max_f32(vacc30, vmin); in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
165 vacc30 = __builtin_wasm_min_f32(vacc30, vmax); in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
171 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
196 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
198 vacc30 = vacc32; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
214 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
D4x4-minmax-scalar.c71 float vacc30 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x4__scalar() local
126 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
148 vacc30 = math_max_f32(vacc30, vmin); in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
165 vacc30 = math_min_f32(vacc30, vmax); in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
171 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
196 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
198 vacc30 = vacc32; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
214 c3[0] = vacc30; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
D4x4-relu-wasm.c69 float vacc30 = vacc00; in xnn_f32_igemm_relu_ukernel_4x4__wasm() local
124 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_relu_ukernel_4x4__wasm()
146 vacc30 = __builtin_wasm_max_f32(vacc30, 0.0f); in xnn_f32_igemm_relu_ukernel_4x4__wasm()
152 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x4__wasm()
177 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x4__wasm()
179 vacc30 = vacc32; in xnn_f32_igemm_relu_ukernel_4x4__wasm()
195 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x4__wasm()
D4x4-relu-scalar.c69 float vacc30 = vacc00; in xnn_f32_igemm_relu_ukernel_4x4__scalar() local
124 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_relu_ukernel_4x4__scalar()
146 vacc30 = math_max_f32(vacc30, 0.0f); in xnn_f32_igemm_relu_ukernel_4x4__scalar()
152 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x4__scalar()
177 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x4__scalar()
179 vacc30 = vacc32; in xnn_f32_igemm_relu_ukernel_4x4__scalar()
195 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x4__scalar()
D4x2-relu-scalar.c63 float vacc30 = vacc00; in xnn_f32_igemm_relu_ukernel_4x2__scalar() local
108 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_relu_ukernel_4x2__scalar()
122 vacc30 = math_max_f32(vacc30, 0.0f); in xnn_f32_igemm_relu_ukernel_4x2__scalar()
126 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x2__scalar()
143 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x2__scalar()
D4x2-relu-wasm.c63 float vacc30 = vacc00; in xnn_f32_igemm_relu_ukernel_4x2__wasm() local
108 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_relu_ukernel_4x2__wasm()
122 vacc30 = __builtin_wasm_max_f32(vacc30, 0.0f); in xnn_f32_igemm_relu_ukernel_4x2__wasm()
126 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x2__wasm()
143 c3[0] = vacc30; in xnn_f32_igemm_relu_ukernel_4x2__wasm()
D4x4-scalar.c69 float vacc30 = vacc00; in xnn_f32_igemm_ukernel_4x4__scalar() local
124 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_ukernel_4x4__scalar()
136 c3[0] = vacc30; in xnn_f32_igemm_ukernel_4x4__scalar()
161 c3[0] = vacc30; in xnn_f32_igemm_ukernel_4x4__scalar()
163 vacc30 = vacc32; in xnn_f32_igemm_ukernel_4x4__scalar()
179 c3[0] = vacc30; in xnn_f32_igemm_ukernel_4x4__scalar()
D4x2-scalar.c63 float vacc30 = vacc00; in xnn_f32_igemm_ukernel_4x2__scalar() local
108 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_igemm_ukernel_4x2__scalar()
118 c3[0] = vacc30; in xnn_f32_igemm_ukernel_4x2__scalar()
135 c3[0] = vacc30; in xnn_f32_igemm_ukernel_4x2__scalar()
/external/XNNPACK/src/f32-gemm/gen-inc/
D4x4inc-minmax-wasm.c75 float vacc30 = acc[12]; in xnn_f32_gemminc_minmax_ukernel_4x4__wasm() local
106 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemminc_minmax_ukernel_4x4__wasm()
126 vacc30 = __builtin_wasm_max_f32(vacc30, vmin); in xnn_f32_gemminc_minmax_ukernel_4x4__wasm()
143 vacc30 = __builtin_wasm_min_f32(vacc30, vmax); in xnn_f32_gemminc_minmax_ukernel_4x4__wasm()
149 c3[0] = vacc30; in xnn_f32_gemminc_minmax_ukernel_4x4__wasm()
178 c3[0] = vacc30; in xnn_f32_gemminc_minmax_ukernel_4x4__wasm()
180 vacc30 = vacc32; in xnn_f32_gemminc_minmax_ukernel_4x4__wasm()
196 c3[0] = vacc30; in xnn_f32_gemminc_minmax_ukernel_4x4__wasm()
D4x4inc-minmax-scalar.c75 float vacc30 = acc[12]; in xnn_f32_gemminc_minmax_ukernel_4x4__scalar() local
106 vacc30 = math_muladd_f32(va3, vb0, vacc30); in xnn_f32_gemminc_minmax_ukernel_4x4__scalar()
126 vacc30 = math_max_f32(vacc30, vmin); in xnn_f32_gemminc_minmax_ukernel_4x4__scalar()
143 vacc30 = math_min_f32(vacc30, vmax); in xnn_f32_gemminc_minmax_ukernel_4x4__scalar()
149 c3[0] = vacc30; in xnn_f32_gemminc_minmax_ukernel_4x4__scalar()
178 c3[0] = vacc30; in xnn_f32_gemminc_minmax_ukernel_4x4__scalar()
180 vacc30 = vacc32; in xnn_f32_gemminc_minmax_ukernel_4x4__scalar()
196 c3[0] = vacc30; in xnn_f32_gemminc_minmax_ukernel_4x4__scalar()