Home
last modified time | relevance | path

Searched refs:vacc00 (Results 1 – 25 of 55) sorted by relevance

123

/external/XNNPACK/src/f32-gemm/gen/
D1x4-minmax-wasm.c43 float vacc00 = w[0]; in xnn_f32_gemm_minmax_ukernel_1x4__wasm() local
59 vacc00 += va0 * vb0; in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
67 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
72 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
78 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
89 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
91 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
95 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
D1x4-minmax-scalar.c43 float vacc00 = w[0]; in xnn_f32_gemm_minmax_ukernel_1x4__scalar() local
59 vacc00 += va0 * vb0; in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
67 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
72 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
78 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
89 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
91 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
95 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
D2x4-minmax-wasm.c49 float vacc00 = w[0]; in xnn_f32_gemm_minmax_ukernel_2x4__wasm() local
54 float vacc10 = vacc00; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
70 vacc00 += va0 * vb0; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
82 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
91 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
106 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
122 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
124 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
129 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
D2x4-minmax-scalar.c49 float vacc00 = w[0]; in xnn_f32_gemm_minmax_ukernel_2x4__scalar() local
54 float vacc10 = vacc00; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
70 vacc00 += va0 * vb0; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
82 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
91 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
106 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
122 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
124 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
129 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
D1x4-relu-scalar.c41 float vacc00 = w[0]; in xnn_f32_gemm_relu_ukernel_1x4__scalar() local
57 vacc00 += va0 * vb0; in xnn_f32_gemm_relu_ukernel_1x4__scalar()
65 vacc00 = math_max_f32(vacc00, 0.0f); in xnn_f32_gemm_relu_ukernel_1x4__scalar()
71 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_1x4__scalar()
82 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_1x4__scalar()
84 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_1x4__scalar()
88 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_1x4__scalar()
D1x4-relu-wasm.c41 float vacc00 = w[0]; in xnn_f32_gemm_relu_ukernel_1x4__wasm() local
57 vacc00 += va0 * vb0; in xnn_f32_gemm_relu_ukernel_1x4__wasm()
65 vacc00 = __builtin_wasm_max_f32(vacc00, 0.0f); in xnn_f32_gemm_relu_ukernel_1x4__wasm()
71 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_1x4__wasm()
82 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_1x4__wasm()
84 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_1x4__wasm()
88 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_1x4__wasm()
D4x2-minmax-scalar.c61 float vacc00 = w[0]; in xnn_f32_gemm_minmax_ukernel_4x2__scalar() local
64 float vacc10 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
66 float vacc20 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
68 float vacc30 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
82 vacc00 += va0 * vb0; in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
94 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
103 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
122 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
137 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
D4x2-minmax-wasm.c61 float vacc00 = w[0]; in xnn_f32_gemm_minmax_ukernel_4x2__wasm() local
64 float vacc10 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
66 float vacc20 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
68 float vacc30 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
82 vacc00 += va0 * vb0; in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
94 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
103 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
122 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
137 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
D2x4-relu-scalar.c47 float vacc00 = w[0]; in xnn_f32_gemm_relu_ukernel_2x4__scalar() local
52 float vacc10 = vacc00; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
68 vacc00 += va0 * vb0; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
80 vacc00 = math_max_f32(vacc00, 0.0f); in xnn_f32_gemm_relu_ukernel_2x4__scalar()
95 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
111 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
113 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
118 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
D2x4-relu-wasm.c47 float vacc00 = w[0]; in xnn_f32_gemm_relu_ukernel_2x4__wasm() local
52 float vacc10 = vacc00; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
68 vacc00 += va0 * vb0; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
80 vacc00 = __builtin_wasm_max_f32(vacc00, 0.0f); in xnn_f32_gemm_relu_ukernel_2x4__wasm()
95 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
111 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
113 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
118 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
D4x4-minmax-scalar.c61 float vacc00 = w[0]; in xnn_f32_gemm_minmax_ukernel_4x4__scalar() local
66 float vacc10 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
70 float vacc20 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
74 float vacc30 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
92 vacc00 += va0 * vb0; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
112 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
129 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
162 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
188 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
190 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
[all …]
D4x4-minmax-wasm.c61 float vacc00 = w[0]; in xnn_f32_gemm_minmax_ukernel_4x4__wasm() local
66 float vacc10 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
70 float vacc20 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
74 float vacc30 = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
92 vacc00 += va0 * vb0; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
112 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
129 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
162 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
188 c0[0] = vacc00; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
190 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
[all …]
D4x4-relu-wasm.c59 float vacc00 = w[0]; in xnn_f32_gemm_relu_ukernel_4x4__wasm() local
64 float vacc10 = vacc00; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
68 float vacc20 = vacc00; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
72 float vacc30 = vacc00; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
90 vacc00 += va0 * vb0; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
110 vacc00 = __builtin_wasm_max_f32(vacc00, 0.0f); in xnn_f32_gemm_relu_ukernel_4x4__wasm()
143 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
169 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
171 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
178 c0[0] = vacc00; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
/external/XNNPACK/src/f32-gemm/gen-inc/
D1x4inc-minmax-wasm.c45 float vacc00 = acc[0]; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm() local
61 vacc00 += va0 * vb0; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
69 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
74 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
80 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
91 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
93 vacc00 = vacc02; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
97 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
D1x4inc-minmax-scalar.c45 float vacc00 = acc[0]; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar() local
61 vacc00 += va0 * vb0; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
69 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
74 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
80 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
91 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
93 vacc00 = vacc02; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
97 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
D2x4inc-minmax-scalar.c51 float vacc00 = acc[0]; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar() local
72 vacc00 += va0 * vb0; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
84 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
93 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
108 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
124 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
126 vacc00 = vacc02; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
131 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
D2x4inc-minmax-wasm.c51 float vacc00 = acc[0]; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm() local
72 vacc00 += va0 * vb0; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
84 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
93 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
108 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
124 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
126 vacc00 = vacc02; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
131 c0[0] = vacc00; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
/external/XNNPACK/src/f32-igemm/gen/
D1x4-minmax-scalar.c47 float vacc00 = w[0]; in xnn_f32_igemm_minmax_ukernel_1x4__scalar() local
72 vacc00 += va0 * vb0; in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
82 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
87 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
93 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
103 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
105 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
109 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
D1x4-minmax-wasm.c47 float vacc00 = w[0]; in xnn_f32_igemm_minmax_ukernel_1x4__wasm() local
72 vacc00 += va0 * vb0; in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
82 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
87 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
93 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
103 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
105 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
109 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
D2x4-minmax-wasm.c51 float vacc00 = w[0]; in xnn_f32_igemm_minmax_ukernel_2x4__wasm() local
55 float vacc10 = vacc00; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
86 vacc00 += va0 * vb0; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
100 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
109 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
124 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
138 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
140 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
145 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
D2x4-minmax-scalar.c51 float vacc00 = w[0]; in xnn_f32_igemm_minmax_ukernel_2x4__scalar() local
55 float vacc10 = vacc00; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
86 vacc00 += va0 * vb0; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
100 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
109 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
124 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
138 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
140 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
145 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
D4x2-minmax-wasm.c59 float vacc00 = w[0]; in xnn_f32_igemm_minmax_ukernel_4x2__wasm() local
61 float vacc10 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
63 float vacc20 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
65 float vacc30 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
104 vacc00 += va0 * vb0; in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
118 vacc00 = __builtin_wasm_max_f32(vacc00, vmin); in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
127 vacc00 = __builtin_wasm_min_f32(vacc00, vmax); in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
146 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
157 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__wasm()
D4x2-minmax-scalar.c59 float vacc00 = w[0]; in xnn_f32_igemm_minmax_ukernel_4x2__scalar() local
61 float vacc10 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
63 float vacc20 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
65 float vacc30 = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
104 vacc00 += va0 * vb0; in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
118 vacc00 = math_max_f32(vacc00, vmin); in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
127 vacc00 = math_min_f32(vacc00, vmax); in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
146 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
157 c0[0] = vacc00; in xnn_f32_igemm_minmax_ukernel_4x2__scalar()
D1x4-relu-wasm.c45 float vacc00 = w[0]; in xnn_f32_igemm_relu_ukernel_1x4__wasm() local
70 vacc00 += va0 * vb0; in xnn_f32_igemm_relu_ukernel_1x4__wasm()
80 vacc00 = __builtin_wasm_max_f32(vacc00, 0.0f); in xnn_f32_igemm_relu_ukernel_1x4__wasm()
86 c0[0] = vacc00; in xnn_f32_igemm_relu_ukernel_1x4__wasm()
96 c0[0] = vacc00; in xnn_f32_igemm_relu_ukernel_1x4__wasm()
98 vacc00 = vacc02; in xnn_f32_igemm_relu_ukernel_1x4__wasm()
102 c0[0] = vacc00; in xnn_f32_igemm_relu_ukernel_1x4__wasm()
D1x4-relu-scalar.c45 float vacc00 = w[0]; in xnn_f32_igemm_relu_ukernel_1x4__scalar() local
70 vacc00 += va0 * vb0; in xnn_f32_igemm_relu_ukernel_1x4__scalar()
80 vacc00 = math_max_f32(vacc00, 0.0f); in xnn_f32_igemm_relu_ukernel_1x4__scalar()
86 c0[0] = vacc00; in xnn_f32_igemm_relu_ukernel_1x4__scalar()
96 c0[0] = vacc00; in xnn_f32_igemm_relu_ukernel_1x4__scalar()
98 vacc00 = vacc02; in xnn_f32_igemm_relu_ukernel_1x4__scalar()
102 c0[0] = vacc00; in xnn_f32_igemm_relu_ukernel_1x4__scalar()

123