Home
last modified time | relevance | path

Searched refs:vb10x1 (Results 1 – 8 of 8) sorted by relevance

/external/XNNPACK/src/qs8-igemm/gen/
D3x16c8-minmax-rndnu-neon-mlal.c247 … const int8x8_t vb10x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local
251 vprod0x10 = vmlal_s8(vprod0x10, vb10x1, va0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
252 vprod1x10 = vmlal_s8(vprod1x10, vb10x1, va1x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
253 vprod2x10 = vmlal_s8(vprod2x10, vb10x1, va2x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
D2x16c8-minmax-rndnu-neon-mlal.c191 … const int8x8_t vb10x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local
194 vprod0x10 = vmlal_s8(vprod0x10, vb10x1, va0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
195 vprod1x10 = vmlal_s8(vprod1x10, vb10x1, va1x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
D1x16c8-minmax-rndnu-neon-mlal.c135 … const int8x8_t vb10x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mlal() local
137 vprod0x10 = vmlal_s8(vprod0x10, vb10x1, va0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mlal()
D4x16c8-minmax-rndnu-neon-mlal.c303 … const int8x8_t vb10x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal() local
308 vprod0x10 = vmlal_s8(vprod0x10, vb10x1, va0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
309 vprod1x10 = vmlal_s8(vprod1x10, vb10x1, va1x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
310 vprod2x10 = vmlal_s8(vprod2x10, vb10x1, va2x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
311 vprod3x10 = vmlal_s8(vprod3x10, vb10x1, va3x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
/external/XNNPACK/src/qs8-gemm/gen/
D3x16c8-minmax-rndnu-neon-mlal.c232 const int8x8_t vb10x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local
236 vprod0x10 = vmlal_s8(vprod0x10, vb10x1, va0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
237 vprod1x10 = vmlal_s8(vprod1x10, vb10x1, va1x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
238 vprod2x10 = vmlal_s8(vprod2x10, vb10x1, va2x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
D2x16c8-minmax-rndnu-neon-mlal.c178 const int8x8_t vb10x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local
181 vprod0x10 = vmlal_s8(vprod0x10, vb10x1, va0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
182 vprod1x10 = vmlal_s8(vprod1x10, vb10x1, va1x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
D1x16c8-minmax-rndnu-neon-mlal.c124 const int8x8_t vb10x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mlal() local
126 vprod0x10 = vmlal_s8(vprod0x10, vb10x1, va0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mlal()
D4x16c8-minmax-rndnu-neon-mlal.c286 const int8x8_t vb10x1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal() local
291 vprod0x10 = vmlal_s8(vprod0x10, vb10x1, va0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
292 vprod1x10 = vmlal_s8(vprod1x10, vb10x1, va1x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
293 vprod2x10 = vmlal_s8(vprod2x10, vb10x1, va2x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
294 vprod3x10 = vmlal_s8(vprod3x10, vb10x1, va3x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()