Home
last modified time | relevance | path

Searched refs:vb0c13 (Results 1 – 24 of 24) sorted by relevance

/external/XNNPACK/src/qc8-gemm/gen/
D2x1c4-minmax-fp32-armsimd32.c69 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qc8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32() local
70 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qc8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
71 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qc8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D2x2c4-minmax-fp32-armsimd32.c71 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qc8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32() local
72 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qc8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
73 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qc8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c58 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qc8_gemm_minmax_fp32_ukernel_1x1c4__armsimd32() local
59 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qc8_gemm_minmax_fp32_ukernel_1x1c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c59 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qc8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32() local
60 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qc8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32()
/external/XNNPACK/src/qs8-gemm/gen/
D2x1c4-minmax-fp32-armsimd32.c70 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qs8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32() local
71 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qs8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
72 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qs8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D2x2c4-minmax-fp32-armsimd32.c72 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qs8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32() local
73 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qs8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
74 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qs8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c59 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qs8_gemm_minmax_fp32_ukernel_1x1c4__armsimd32() local
60 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qs8_gemm_minmax_fp32_ukernel_1x1c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c60 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qs8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32() local
61 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qs8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32()
/external/XNNPACK/src/qu8-gemm/gen/
D2x1c4-minmax-fp32-armsimd32.c71 const int16x2_t vb0c13 = __uxtab16(vb_minus_zero_point, __ror(vb0, 8)); in xnn_qu8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32() local
72 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qu8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
73 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qu8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D2x2c4-minmax-fp32-armsimd32.c73 const int16x2_t vb0c13 = __uxtab16(vb_minus_zero_point, __ror(vb0, 8)); in xnn_qu8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32() local
74 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qu8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
75 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qu8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c60 const int16x2_t vb0c13 = __uxtab16(vb_minus_zero_point, __ror(vb0, 8)); in xnn_qu8_gemm_minmax_fp32_ukernel_1x1c4__armsimd32() local
61 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qu8_gemm_minmax_fp32_ukernel_1x1c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c61 const int16x2_t vb0c13 = __uxtab16(vb_minus_zero_point, __ror(vb0, 8)); in xnn_qu8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32() local
62 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qu8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32()
/external/XNNPACK/src/qs8-igemm/gen/
D2x1c4-minmax-fp32-armsimd32.c88 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qs8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32() local
89 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
90 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D2x2c4-minmax-fp32-armsimd32.c90 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qs8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32() local
91 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
92 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c74 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qs8_igemm_minmax_fp32_ukernel_1x1c4__armsimd32() local
75 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_1x1c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c75 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qs8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32() local
76 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32()
/external/XNNPACK/src/qc8-igemm/gen/
D2x1c4-minmax-fp32-armsimd32.c87 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qc8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32() local
88 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qc8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
89 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qc8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D2x2c4-minmax-fp32-armsimd32.c89 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qc8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32() local
90 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qc8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
91 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qc8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c73 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qc8_igemm_minmax_fp32_ukernel_1x1c4__armsimd32() local
74 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qc8_igemm_minmax_fp32_ukernel_1x1c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c74 const int16x2_t vb0c13 = __sxtb16(__ror(vb0, 8)); in xnn_qc8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32() local
75 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qc8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32()
/external/XNNPACK/src/qu8-igemm/gen/
D2x1c4-minmax-fp32-armsimd32.c89 const int16x2_t vb0c13 = __uxtab16(vb_minus_zero_point, __ror(vb0, 8)); in xnn_qu8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32() local
90 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qu8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
91 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qu8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D2x2c4-minmax-fp32-armsimd32.c91 const int16x2_t vb0c13 = __uxtab16(vb_minus_zero_point, __ror(vb0, 8)); in xnn_qu8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32() local
92 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qu8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
93 vacc1x0 = __smlad(va1c13, vb0c13, vacc1x0); in xnn_qu8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c75 const int16x2_t vb0c13 = __uxtab16(vb_minus_zero_point, __ror(vb0, 8)); in xnn_qu8_igemm_minmax_fp32_ukernel_1x1c4__armsimd32() local
76 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qu8_igemm_minmax_fp32_ukernel_1x1c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c76 const int16x2_t vb0c13 = __uxtab16(vb_minus_zero_point, __ror(vb0, 8)); in xnn_qu8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32() local
77 vacc0x0 = __smlad(va0c13, vb0c13, vacc0x0); in xnn_qu8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32()