Home
last modified time | relevance | path

Searched refs:vb5x1 (Results 1 – 16 of 16) sorted by relevance

/external/XNNPACK/src/qs8-gemm/gen/
D3x8c8-minmax-neon-mlal-padal.c150 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mlal_padal() local
154 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mlal_padal()
155 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mlal_padal()
156 vprod2x5 = vmlal_s8(vprod2x5, vb5x1, va2x1); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mlal_padal()
D4x8c8-minmax-neon-mlal-padal.c181 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mlal_padal() local
186 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mlal_padal()
187 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mlal_padal()
188 vprod2x5 = vmlal_s8(vprod2x5, vb5x1, va2x1); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mlal_padal()
189 vprod3x5 = vmlal_s8(vprod3x5, vb5x1, va3x1); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mlal_padal()
D2x8c8-minmax-neon-mlal-padal.c119 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal() local
122 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal()
123 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal()
D1x8c8-minmax-neon-mlal-padal.c88 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mlal_padal() local
90 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mlal_padal()
D1x16c8-minmax-neon-mlal-padal.c104 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mlal_padal() local
106 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mlal_padal()
D2x16c8-minmax-neon-mlal-padal.c143 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mlal_padal() local
146 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mlal_padal()
147 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mlal_padal()
D3x16c8-minmax-neon-mlal-padal.c182 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_ukernel_3x16c8__neon_mlal_padal() local
186 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_gemm_minmax_ukernel_3x16c8__neon_mlal_padal()
187 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_gemm_minmax_ukernel_3x16c8__neon_mlal_padal()
188 vprod2x5 = vmlal_s8(vprod2x5, vb5x1, va2x1); in xnn_qs8_gemm_minmax_ukernel_3x16c8__neon_mlal_padal()
D4x16c8-minmax-neon-mlal-padal.c221 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_ukernel_4x16c8__neon_mlal_padal() local
226 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_gemm_minmax_ukernel_4x16c8__neon_mlal_padal()
227 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_gemm_minmax_ukernel_4x16c8__neon_mlal_padal()
228 vprod2x5 = vmlal_s8(vprod2x5, vb5x1, va2x1); in xnn_qs8_gemm_minmax_ukernel_4x16c8__neon_mlal_padal()
229 vprod3x5 = vmlal_s8(vprod3x5, vb5x1, va3x1); in xnn_qs8_gemm_minmax_ukernel_4x16c8__neon_mlal_padal()
/external/XNNPACK/src/qs8-igemm/gen/
D4x8c8-minmax-neon-mlal-padal.c198 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mlal_padal() local
203 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mlal_padal()
204 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mlal_padal()
205 vprod2x5 = vmlal_s8(vprod2x5, vb5x1, va2x1); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mlal_padal()
206 vprod3x5 = vmlal_s8(vprod3x5, vb5x1, va3x1); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mlal_padal()
D2x8c8-minmax-neon-mlal-padal.c132 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal() local
135 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal()
136 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal()
D3x8c8-minmax-neon-mlal-padal.c165 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mlal_padal() local
169 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mlal_padal()
170 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mlal_padal()
171 vprod2x5 = vmlal_s8(vprod2x5, vb5x1, va2x1); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mlal_padal()
D1x8c8-minmax-neon-mlal-padal.c99 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mlal_padal() local
101 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mlal_padal()
D1x16c8-minmax-neon-mlal-padal.c115 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mlal_padal() local
117 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mlal_padal()
D2x16c8-minmax-neon-mlal-padal.c156 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mlal_padal() local
159 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mlal_padal()
160 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mlal_padal()
D4x16c8-minmax-neon-mlal-padal.c238 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal() local
243 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
244 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
245 vprod2x5 = vmlal_s8(vprod2x5, vb5x1, va2x1); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
246 vprod3x5 = vmlal_s8(vprod3x5, vb5x1, va3x1); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
D3x16c8-minmax-neon-mlal-padal.c197 const int8x8_t vb5x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_ukernel_3x16c8__neon_mlal_padal() local
201 vprod0x5 = vmlal_s8(vprod0x5, vb5x1, va0x1); in xnn_qs8_igemm_minmax_ukernel_3x16c8__neon_mlal_padal()
202 vprod1x5 = vmlal_s8(vprod1x5, vb5x1, va1x1); in xnn_qs8_igemm_minmax_ukernel_3x16c8__neon_mlal_padal()
203 vprod2x5 = vmlal_s8(vprod2x5, vb5x1, va2x1); in xnn_qs8_igemm_minmax_ukernel_3x16c8__neon_mlal_padal()