Home
last modified time | relevance | path

Searched refs:vb14x0 (Results 1 – 8 of 8) sorted by relevance

/external/XNNPACK/src/qs8-igemm/gen/
D3x16c8-minmax-rndnu-neon-mlal.c144 … const int8x8_t vb14x0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local
288 int16x8_t vprod0x14 = vmull_s8(vb14x0, va0x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
289 int16x8_t vprod1x14 = vmull_s8(vb14x0, va1x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
290 int16x8_t vprod2x14 = vmull_s8(vb14x0, va2x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
D2x16c8-minmax-rndnu-neon-mlal.c118 … const int8x8_t vb14x0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local
220 int16x8_t vprod0x14 = vmull_s8(vb14x0, va0x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
221 int16x8_t vprod1x14 = vmull_s8(vb14x0, va1x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
D1x16c8-minmax-rndnu-neon-mlal.c92 … const int8x8_t vb14x0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mlal() local
152 int16x8_t vprod0x14 = vmull_s8(vb14x0, va0x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mlal()
D4x16c8-minmax-rndnu-neon-mlal.c170 … const int8x8_t vb14x0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal() local
356 int16x8_t vprod0x14 = vmull_s8(vb14x0, va0x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
357 int16x8_t vprod1x14 = vmull_s8(vb14x0, va1x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
358 int16x8_t vprod2x14 = vmull_s8(vb14x0, va2x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
359 int16x8_t vprod3x14 = vmull_s8(vb14x0, va3x0); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
/external/XNNPACK/src/qs8-gemm/gen/
D3x16c8-minmax-rndnu-neon-mlal.c129 const int8x8_t vb14x0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local
273 int16x8_t vprod0x14 = vmull_s8(vb14x0, va0x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
274 int16x8_t vprod1x14 = vmull_s8(vb14x0, va1x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
275 int16x8_t vprod2x14 = vmull_s8(vb14x0, va2x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
D2x16c8-minmax-rndnu-neon-mlal.c105 const int8x8_t vb14x0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local
207 int16x8_t vprod0x14 = vmull_s8(vb14x0, va0x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
208 int16x8_t vprod1x14 = vmull_s8(vb14x0, va1x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
D1x16c8-minmax-rndnu-neon-mlal.c81 const int8x8_t vb14x0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mlal() local
141 int16x8_t vprod0x14 = vmull_s8(vb14x0, va0x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mlal()
D4x16c8-minmax-rndnu-neon-mlal.c153 const int8x8_t vb14x0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof( int8_t)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal() local
339 int16x8_t vprod0x14 = vmull_s8(vb14x0, va0x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
340 int16x8_t vprod1x14 = vmull_s8(vb14x0, va1x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
341 int16x8_t vprod2x14 = vmull_s8(vb14x0, va2x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
342 int16x8_t vprod3x14 = vmull_s8(vb14x0, va3x0); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()