Home
last modified time | relevance | path

Searched refs:va0c3333 (Results 1 – 24 of 24) sorted by relevance

/external/XNNPACK/src/f32-gemm/gen-inc/
D1x8inc-minmax-sse-dup.c78 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemminc_minmax_ukernel_1x8__sse_dup() local
83 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemminc_minmax_ukernel_1x8__sse_dup()
84 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemminc_minmax_ukernel_1x8__sse_dup()
D1x8inc-minmax-sse2-dup.c78 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemminc_minmax_ukernel_1x8__sse2_dup() local
83 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemminc_minmax_ukernel_1x8__sse2_dup()
84 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemminc_minmax_ukernel_1x8__sse2_dup()
D3x8inc-minmax-sse-dup.c116 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemminc_minmax_ukernel_3x8__sse_dup() local
123 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemminc_minmax_ukernel_3x8__sse_dup()
126 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemminc_minmax_ukernel_3x8__sse_dup()
D3x8inc-minmax-sse2-dup.c116 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemminc_minmax_ukernel_3x8__sse2_dup() local
123 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemminc_minmax_ukernel_3x8__sse2_dup()
126 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemminc_minmax_ukernel_3x8__sse2_dup()
D4x8inc-minmax-sse-dup.c135 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemminc_minmax_ukernel_4x8__sse_dup() local
143 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemminc_minmax_ukernel_4x8__sse_dup()
147 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemminc_minmax_ukernel_4x8__sse_dup()
D4x8inc-minmax-sse2-dup.c135 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemminc_minmax_ukernel_4x8__sse2_dup() local
143 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemminc_minmax_ukernel_4x8__sse2_dup()
147 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemminc_minmax_ukernel_4x8__sse2_dup()
D5x8inc-minmax-sse2-dup.c154 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemminc_minmax_ukernel_5x8__sse2_dup() local
163 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemminc_minmax_ukernel_5x8__sse2_dup()
168 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemminc_minmax_ukernel_5x8__sse2_dup()
D5x8inc-minmax-sse-dup.c154 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemminc_minmax_ukernel_5x8__sse_dup() local
163 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemminc_minmax_ukernel_5x8__sse_dup()
168 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemminc_minmax_ukernel_5x8__sse_dup()
/external/XNNPACK/src/f32-gemm/gen/
D1x8-minmax-sse-dup.c76 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemm_minmax_ukernel_1x8__sse_dup() local
81 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemm_minmax_ukernel_1x8__sse_dup()
82 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemm_minmax_ukernel_1x8__sse_dup()
D1x8-minmax-sse2-dup.c76 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemm_minmax_ukernel_1x8__sse2_dup() local
81 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemm_minmax_ukernel_1x8__sse2_dup()
82 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemm_minmax_ukernel_1x8__sse2_dup()
D3x8-minmax-sse-dup.c114 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemm_minmax_ukernel_3x8__sse_dup() local
121 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemm_minmax_ukernel_3x8__sse_dup()
124 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemm_minmax_ukernel_3x8__sse_dup()
D3x8-minmax-sse2-dup.c114 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemm_minmax_ukernel_3x8__sse2_dup() local
121 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemm_minmax_ukernel_3x8__sse2_dup()
124 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemm_minmax_ukernel_3x8__sse2_dup()
D4x8-minmax-sse2-dup.c133 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemm_minmax_ukernel_4x8__sse2_dup() local
141 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemm_minmax_ukernel_4x8__sse2_dup()
145 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemm_minmax_ukernel_4x8__sse2_dup()
D4x8-minmax-sse-dup.c133 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemm_minmax_ukernel_4x8__sse_dup() local
141 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemm_minmax_ukernel_4x8__sse_dup()
145 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemm_minmax_ukernel_4x8__sse_dup()
D5x8-minmax-sse-dup.c152 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemm_minmax_ukernel_5x8__sse_dup() local
161 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemm_minmax_ukernel_5x8__sse_dup()
166 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemm_minmax_ukernel_5x8__sse_dup()
D5x8-minmax-sse2-dup.c152 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_gemm_minmax_ukernel_5x8__sse2_dup() local
161 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_gemm_minmax_ukernel_5x8__sse2_dup()
166 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_gemm_minmax_ukernel_5x8__sse2_dup()
/external/XNNPACK/src/f32-igemm/gen/
D1x8-minmax-sse-dup.c89 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_igemm_minmax_ukernel_1x8__sse_dup() local
94 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_igemm_minmax_ukernel_1x8__sse_dup()
95 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_igemm_minmax_ukernel_1x8__sse_dup()
D1x8-minmax-sse2-dup.c89 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_igemm_minmax_ukernel_1x8__sse2_dup() local
94 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_igemm_minmax_ukernel_1x8__sse2_dup()
95 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_igemm_minmax_ukernel_1x8__sse2_dup()
D3x8-minmax-sse-dup.c133 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_igemm_minmax_ukernel_3x8__sse_dup() local
140 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_igemm_minmax_ukernel_3x8__sse_dup()
143 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_igemm_minmax_ukernel_3x8__sse_dup()
D3x8-minmax-sse2-dup.c133 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_igemm_minmax_ukernel_3x8__sse2_dup() local
140 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_igemm_minmax_ukernel_3x8__sse2_dup()
143 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_igemm_minmax_ukernel_3x8__sse2_dup()
D4x8-minmax-sse-dup.c155 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_igemm_minmax_ukernel_4x8__sse_dup() local
163 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_igemm_minmax_ukernel_4x8__sse_dup()
167 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_igemm_minmax_ukernel_4x8__sse_dup()
D4x8-minmax-sse2-dup.c155 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_igemm_minmax_ukernel_4x8__sse2_dup() local
163 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_igemm_minmax_ukernel_4x8__sse2_dup()
167 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_igemm_minmax_ukernel_4x8__sse2_dup()
D5x8-minmax-sse-dup.c177 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_igemm_minmax_ukernel_5x8__sse_dup() local
186 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_igemm_minmax_ukernel_5x8__sse_dup()
191 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_igemm_minmax_ukernel_5x8__sse_dup()
D5x8-minmax-sse2-dup.c177 const __m128 va0c3333 = _mm_shuffle_ps(va0, va0, _MM_SHUFFLE(3, 3, 3, 3)); in xnn_f32_igemm_minmax_ukernel_5x8__sse2_dup() local
186 vacc0x0123 = _mm_add_ps(vacc0x0123, _mm_mul_ps(va0c3333, vb0123c3)); in xnn_f32_igemm_minmax_ukernel_5x8__sse2_dup()
191 vacc0x4567 = _mm_add_ps(vacc0x4567, _mm_mul_ps(va0c3333, vb4567c3)); in xnn_f32_igemm_minmax_ukernel_5x8__sse2_dup()