Home
last modified time | relevance | path

Searched refs:_mm_movehl_ps (Results 1 – 25 of 656) sorted by relevance

12345678910>>...27

/external/speex/libspeexdsp/
Dresample_sse.h50 sum = _mm_add_ps(sum, _mm_movehl_ps(sum, sum)); in inner_product_single()
68 sum = _mm_add_ps(sum, _mm_movehl_ps(sum, sum)); in interpolate_product_single()
88 sum = _mm_add_pd(sum, _mm_cvtps_pd(_mm_movehl_ps(t, t))); in inner_product_double()
92 sum = _mm_add_pd(sum, _mm_cvtps_pd(_mm_movehl_ps(t, t))); in inner_product_double()
108 __m128d f2 = _mm_cvtps_pd(_mm_movehl_ps(f,f)); in interpolate_product_double()
114 sum2 = _mm_add_pd(sum2, _mm_cvtps_pd(_mm_movehl_ps(t, t))); in interpolate_product_double()
118 sum2 = _mm_add_pd(sum2, _mm_cvtps_pd(_mm_movehl_ps(t, t))); in interpolate_product_double()
/external/XNNPACK/src/f32-igemm/gen/
D8x8-minmax-fma3-broadcast.c250 vacc7x0123 = _mm_movehl_ps(vacc7x0123, vacc7x0123); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
251 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
252 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
253 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
254 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
255 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
256 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
257 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
D7x8-minmax-fma3-broadcast.c228 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
229 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
230 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
231 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
232 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
233 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
234 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
D7x8-minmax-avx-broadcast.c228 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
229 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
230 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
231 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
232 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
233 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
234 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
D6x8-minmax-avx-broadcast.c206 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_igemm_minmax_ukernel_6x8__avx_broadcast()
207 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_igemm_minmax_ukernel_6x8__avx_broadcast()
208 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_igemm_minmax_ukernel_6x8__avx_broadcast()
209 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_igemm_minmax_ukernel_6x8__avx_broadcast()
210 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_igemm_minmax_ukernel_6x8__avx_broadcast()
211 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_igemm_minmax_ukernel_6x8__avx_broadcast()
D6x8-minmax-fma3-broadcast.c206 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_igemm_minmax_ukernel_6x8__fma3_broadcast()
207 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_igemm_minmax_ukernel_6x8__fma3_broadcast()
208 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_igemm_minmax_ukernel_6x8__fma3_broadcast()
209 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_igemm_minmax_ukernel_6x8__fma3_broadcast()
210 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_igemm_minmax_ukernel_6x8__fma3_broadcast()
211 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_igemm_minmax_ukernel_6x8__fma3_broadcast()
D5x8-minmax-fma3-broadcast.c184 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_igemm_minmax_ukernel_5x8__fma3_broadcast()
185 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_igemm_minmax_ukernel_5x8__fma3_broadcast()
186 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_igemm_minmax_ukernel_5x8__fma3_broadcast()
187 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_igemm_minmax_ukernel_5x8__fma3_broadcast()
188 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_igemm_minmax_ukernel_5x8__fma3_broadcast()
D5x8-minmax-avx-broadcast.c184 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_igemm_minmax_ukernel_5x8__avx_broadcast()
185 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_igemm_minmax_ukernel_5x8__avx_broadcast()
186 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_igemm_minmax_ukernel_5x8__avx_broadcast()
187 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_igemm_minmax_ukernel_5x8__avx_broadcast()
188 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_igemm_minmax_ukernel_5x8__avx_broadcast()
/external/XNNPACK/src/f32-gemm/gen-inc/
D8x8inc-minmax-fma3-broadcast.c225 vacc7x0123 = _mm_movehl_ps(vacc7x0123, vacc7x0123); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
226 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
227 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
228 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
229 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
230 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
231 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
232 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
D7x8inc-minmax-avx-broadcast.c205 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
206 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
207 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
208 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
209 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
210 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
211 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
D7x8inc-minmax-fma3-broadcast.c205 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
206 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
207 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
208 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
209 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
210 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
211 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
D6x8inc-minmax-avx-broadcast.c185 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__avx_broadcast()
186 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__avx_broadcast()
187 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__avx_broadcast()
188 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__avx_broadcast()
189 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__avx_broadcast()
190 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__avx_broadcast()
D6x8inc-minmax-fma3-broadcast.c185 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__fma3_broadcast()
186 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__fma3_broadcast()
187 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__fma3_broadcast()
188 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__fma3_broadcast()
189 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__fma3_broadcast()
190 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemminc_minmax_ukernel_6x8__fma3_broadcast()
D5x8inc-minmax-fma3-broadcast.c165 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__fma3_broadcast()
166 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__fma3_broadcast()
167 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__fma3_broadcast()
168 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__fma3_broadcast()
169 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__fma3_broadcast()
D5x8inc-minmax-avx-broadcast.c165 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__avx_broadcast()
166 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__avx_broadcast()
167 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__avx_broadcast()
168 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__avx_broadcast()
169 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__avx_broadcast()
D5x8inc-minmax-sse-load1.c186 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__sse_load1()
187 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__sse_load1()
188 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__sse_load1()
189 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__sse_load1()
190 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemminc_minmax_ukernel_5x8__sse_load1()
/external/XNNPACK/src/f32-gemm/gen/
D8x8-minmax-fma3-broadcast.c223 vacc7x0123 = _mm_movehl_ps(vacc7x0123, vacc7x0123); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
224 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
225 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
226 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
227 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
228 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
229 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
230 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
D7x8-minmax-fma3-broadcast.c203 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
204 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
205 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
206 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
207 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
208 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
209 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
D7x8-minmax-avx-broadcast.c203 vacc6x0123 = _mm_movehl_ps(vacc6x0123, vacc6x0123); in xnn_f32_gemm_minmax_ukernel_7x8__avx_broadcast()
204 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemm_minmax_ukernel_7x8__avx_broadcast()
205 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemm_minmax_ukernel_7x8__avx_broadcast()
206 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_7x8__avx_broadcast()
207 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_7x8__avx_broadcast()
208 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_7x8__avx_broadcast()
209 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_7x8__avx_broadcast()
D6x8-minmax-avx-broadcast.c183 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemm_minmax_ukernel_6x8__avx_broadcast()
184 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemm_minmax_ukernel_6x8__avx_broadcast()
185 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_6x8__avx_broadcast()
186 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_6x8__avx_broadcast()
187 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_6x8__avx_broadcast()
188 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_6x8__avx_broadcast()
D6x8-minmax-fma3-broadcast.c183 vacc5x0123 = _mm_movehl_ps(vacc5x0123, vacc5x0123); in xnn_f32_gemm_minmax_ukernel_6x8__fma3_broadcast()
184 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemm_minmax_ukernel_6x8__fma3_broadcast()
185 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_6x8__fma3_broadcast()
186 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_6x8__fma3_broadcast()
187 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_6x8__fma3_broadcast()
188 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_6x8__fma3_broadcast()
D5x8-minmax-fma3-broadcast.c163 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemm_minmax_ukernel_5x8__fma3_broadcast()
164 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_5x8__fma3_broadcast()
165 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_5x8__fma3_broadcast()
166 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_5x8__fma3_broadcast()
167 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_5x8__fma3_broadcast()
D5x8-minmax-avx-broadcast.c163 vacc4x0123 = _mm_movehl_ps(vacc4x0123, vacc4x0123); in xnn_f32_gemm_minmax_ukernel_5x8__avx_broadcast()
164 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_5x8__avx_broadcast()
165 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_5x8__avx_broadcast()
166 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_5x8__avx_broadcast()
167 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_5x8__avx_broadcast()
D4x8-minmax-avx-broadcast.c143 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_4x8__avx_broadcast()
144 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_4x8__avx_broadcast()
145 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_4x8__avx_broadcast()
146 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_4x8__avx_broadcast()
D4x8-minmax-fma3-broadcast.c143 vacc3x0123 = _mm_movehl_ps(vacc3x0123, vacc3x0123); in xnn_f32_gemm_minmax_ukernel_4x8__fma3_broadcast()
144 vacc2x0123 = _mm_movehl_ps(vacc2x0123, vacc2x0123); in xnn_f32_gemm_minmax_ukernel_4x8__fma3_broadcast()
145 vacc1x0123 = _mm_movehl_ps(vacc1x0123, vacc1x0123); in xnn_f32_gemm_minmax_ukernel_4x8__fma3_broadcast()
146 vacc0x0123 = _mm_movehl_ps(vacc0x0123, vacc0x0123); in xnn_f32_gemm_minmax_ukernel_4x8__fma3_broadcast()

12345678910>>...27