• Home
  • Raw
  • Download

Lines Matching refs:vmlaq_f32

107       vacc0x0123 = vmlaq_f32(vacc0x0123, va0c0, vb0123c0);  in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
108 vacc1x0123 = vmlaq_f32(vacc1x0123, va1c0, vb0123c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
109 vacc2x0123 = vmlaq_f32(vacc2x0123, va2c0, vb0123c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
110 vacc3x0123 = vmlaq_f32(vacc3x0123, va3c0, vb0123c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
111 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c0, vb0123c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
112 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c0, vb0123c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
113 vacc0x4567 = vmlaq_f32(vacc0x4567, va0c0, vb4567c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
114 vacc1x4567 = vmlaq_f32(vacc1x4567, va1c0, vb4567c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
115 vacc2x4567 = vmlaq_f32(vacc2x4567, va2c0, vb4567c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
116 vacc3x4567 = vmlaq_f32(vacc3x4567, va3c0, vb4567c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
117 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c0, vb4567c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
118 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c0, vb4567c0); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
129 vacc0x0123 = vmlaq_f32(vacc0x0123, va0c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
130 vacc1x0123 = vmlaq_f32(vacc1x0123, va1c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
131 vacc2x0123 = vmlaq_f32(vacc2x0123, va2c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
132 vacc3x0123 = vmlaq_f32(vacc3x0123, va3c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
133 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
134 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
135 vacc0x4567 = vmlaq_f32(vacc0x4567, va0c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
136 vacc1x4567 = vmlaq_f32(vacc1x4567, va1c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
137 vacc2x4567 = vmlaq_f32(vacc2x4567, va2c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
138 vacc3x4567 = vmlaq_f32(vacc3x4567, va3c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
139 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
140 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
151 vacc0x0123 = vmlaq_f32(vacc0x0123, va0c2, vb0123c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
152 vacc1x0123 = vmlaq_f32(vacc1x0123, va1c2, vb0123c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
153 vacc2x0123 = vmlaq_f32(vacc2x0123, va2c2, vb0123c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
154 vacc3x0123 = vmlaq_f32(vacc3x0123, va3c2, vb0123c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
155 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c2, vb0123c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
156 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c2, vb0123c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
157 vacc0x4567 = vmlaq_f32(vacc0x4567, va0c2, vb4567c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
158 vacc1x4567 = vmlaq_f32(vacc1x4567, va1c2, vb4567c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
159 vacc2x4567 = vmlaq_f32(vacc2x4567, va2c2, vb4567c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
160 vacc3x4567 = vmlaq_f32(vacc3x4567, va3c2, vb4567c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
161 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c2, vb4567c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
162 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c2, vb4567c2); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
173 vacc0x0123 = vmlaq_f32(vacc0x0123, va0c3, vb0123c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
174 vacc1x0123 = vmlaq_f32(vacc1x0123, va1c3, vb0123c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
175 vacc2x0123 = vmlaq_f32(vacc2x0123, va2c3, vb0123c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
176 vacc3x0123 = vmlaq_f32(vacc3x0123, va3c3, vb0123c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
177 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c3, vb0123c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
178 vacc5x0123 = vmlaq_f32(vacc5x0123, va5c3, vb0123c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
179 vacc0x4567 = vmlaq_f32(vacc0x4567, va0c3, vb4567c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
180 vacc1x4567 = vmlaq_f32(vacc1x4567, va1c3, vb4567c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
181 vacc2x4567 = vmlaq_f32(vacc2x4567, va2c3, vb4567c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
182 vacc3x4567 = vmlaq_f32(vacc3x4567, va3c3, vb4567c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
183 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c3, vb4567c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
184 vacc5x4567 = vmlaq_f32(vacc5x4567, va5c3, vb4567c3); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
198 vacc0x0123 = vmlaq_f32(vacc0x0123, va0, vb0123); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
199 vacc1x0123 = vmlaq_f32(vacc1x0123, va1, vb0123); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
200 vacc2x0123 = vmlaq_f32(vacc2x0123, va2, vb0123); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
201 vacc3x0123 = vmlaq_f32(vacc3x0123, va3, vb0123); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
202 vacc4x0123 = vmlaq_f32(vacc4x0123, va4, vb0123); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
203 vacc5x0123 = vmlaq_f32(vacc5x0123, va5, vb0123); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
204 vacc0x4567 = vmlaq_f32(vacc0x4567, va0, vb4567); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
205 vacc1x4567 = vmlaq_f32(vacc1x4567, va1, vb4567); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
206 vacc2x4567 = vmlaq_f32(vacc2x4567, va2, vb4567); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
207 vacc3x4567 = vmlaq_f32(vacc3x4567, va3, vb4567); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
208 vacc4x4567 = vmlaq_f32(vacc4x4567, va4, vb4567); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
209 vacc5x4567 = vmlaq_f32(vacc5x4567, va5, vb4567); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()