/external/XNNPACK/src/qs8-gemm/gen/ |
D | 1x8c8-minmax-rndnu-neon-mull.c | 97 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c8__neon_mull() local 100 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c8__neon_mull()
|
D | 1x8c16-minmax-rndnu-neon-mlal.c | 104 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c16__neon_mlal() local 107 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c16__neon_mlal()
|
D | 1x8c8-minmax-rndnu-neon-mlal.c | 146 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c8__neon_mlal() local 149 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c8__neon_mlal()
|
D | 1x8c8-minmax-fp32-neon-mlal.c | 146 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_fp32_ukernel_1x8c8__neon_mlal() local 149 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_fp32_ukernel_1x8c8__neon_mlal()
|
D | 1x8c8-minmax-fp32-neonv8-mlal.c | 147 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_fp32_ukernel_1x8c8__neonv8_mlal() local 150 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_fp32_ukernel_1x8c8__neonv8_mlal()
|
D | 2x8c8-minmax-rndnu-neon-mull.c | 134 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c8__neon_mull() local 137 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c8__neon_mull()
|
D | 1x16c8-minmax-rndnu-neon-mull.c | 135 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mull() local 138 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mull()
|
D | 2x8c16-minmax-rndnu-neon-mlal.c | 149 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c16__neon_mlal() local 152 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c16__neon_mlal()
|
D | 3x8c8-minmax-rndnu-neon-mull.c | 171 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mull() local 174 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mull()
|
D | 1x16c16-minmax-rndnu-neon-mlal.c | 150 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c16__neon_mlal() local 153 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c16__neon_mlal()
|
D | 2x8c8-minmax-fp32-neonv8-mlal.c | 210 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_gemm_minmax_fp32_ukernel_2x8c8__neonv8_mlal() local 213 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_gemm_minmax_fp32_ukernel_2x8c8__neonv8_mlal()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 1x8c8-minmax-rndnu-neon-mull.c | 111 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c8__neon_mull() local 114 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c8__neon_mull()
|
D | 1x8c16-minmax-rndnu-neon-mlal.c | 118 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c16__neon_mlal() local 121 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c16__neon_mlal()
|
D | 1x8c8-minmax-fp32-neonv8-mlal.c | 161 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_fp32_ukernel_1x8c8__neonv8_mlal() local 164 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_fp32_ukernel_1x8c8__neonv8_mlal()
|
D | 1x8c8-minmax-rndnu-neon-mlal.c | 160 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c8__neon_mlal() local 163 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c8__neon_mlal()
|
D | 1x8c8-minmax-fp32-neon-mlal.c | 160 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_fp32_ukernel_1x8c8__neon_mlal() local 163 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_fp32_ukernel_1x8c8__neon_mlal()
|
D | 2x8c8-minmax-rndnu-neon-mull.c | 150 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c8__neon_mull() local 153 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c8__neon_mull()
|
D | 1x16c8-minmax-rndnu-neon-mull.c | 149 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mull() local 152 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mull()
|
D | 2x8c16-minmax-rndnu-neon-mlal.c | 165 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c16__neon_mlal() local 168 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c16__neon_mlal()
|
D | 1x16c16-minmax-rndnu-neon-mlal.c | 164 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c16__neon_mlal() local 167 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c16__neon_mlal()
|
D | 2x8c8-minmax-rndnu-neon-mlal.c | 225 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c8__neon_mlal() local 228 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c8__neon_mlal()
|
/external/XNNPACK/src/qc8-gemm/gen/ |
D | 1x8c8-minmax-fp32-neon-mlal.c | 146 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qc8_gemm_minmax_fp32_ukernel_1x8c8__neon_mlal() local 149 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qc8_gemm_minmax_fp32_ukernel_1x8c8__neon_mlal()
|
D | 1x8c8-minmax-fp32-neonv8-mlal.c | 147 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qc8_gemm_minmax_fp32_ukernel_1x8c8__neonv8_mlal() local 150 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qc8_gemm_minmax_fp32_ukernel_1x8c8__neonv8_mlal()
|
/external/XNNPACK/src/qc8-igemm/gen/ |
D | 1x8c8-minmax-fp32-neon-mlal.c | 160 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qc8_igemm_minmax_fp32_ukernel_1x8c8__neon_mlal() local 163 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qc8_igemm_minmax_fp32_ukernel_1x8c8__neon_mlal()
|
D | 1x8c8-minmax-fp32-neonv8-mlal.c | 161 const int32x2_t vpsum0x1 = vadd_s32(vget_low_s32(vacc0x1), vget_high_s32(vacc0x1)); in xnn_qc8_igemm_minmax_fp32_ukernel_1x8c8__neonv8_mlal() local 164 const int32x2_t vsum0x01 = vpadd_s32(vpsum0x0, vpsum0x1); in xnn_qc8_igemm_minmax_fp32_ukernel_1x8c8__neonv8_mlal()
|