/external/XNNPACK/src/qs8-gemm/gen/ |
D | 1x8-minmax-rndnu-neon-mull-addw-dup.c | 52 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x8__neon_mull_addw_dup() local 98 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x8__neon_mull_addw_dup() local
|
D | 2x8-minmax-rndnu-neon-mull-addw-dup.c | 61 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x8__neon_mull_addw_dup() local 132 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x8__neon_mull_addw_dup() local
|
D | 1x16-minmax-rndnu-neon-mull-addw-dup.c | 54 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16__neon_mull_addw_dup() local 141 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16__neon_mull_addw_dup() local
|
D | 3x8-minmax-rndnu-neon-mull-addw-dup.c | 70 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8__neon_mull_addw_dup() local 166 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8__neon_mull_addw_dup() local
|
D | 4x8-minmax-rndnu-neon-mull-addw-dup.c | 79 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8__neon_mull_addw_dup() local 200 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8__neon_mull_addw_dup() local
|
D | 2x16-minmax-rndnu-neon-mull-addw-dup.c | 65 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16__neon_mull_addw_dup() local 201 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16__neon_mull_addw_dup() local
|
D | 3x16-minmax-rndnu-neon-mull-addw-dup.c | 76 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16__neon_mull_addw_dup() local 261 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16__neon_mull_addw_dup() local
|
D | 4x16-minmax-rndnu-neon-mull-addw-dup.c | 87 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16__neon_mull_addw_dup() local 321 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16__neon_mull_addw_dup() local
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 1x8-minmax-rndnu-neon-mull-addw-dup.c | 63 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x8__neon_mull_addw_dup() local 109 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x8__neon_mull_addw_dup() local
|
D | 2x8-minmax-rndnu-neon-mull-addw-dup.c | 74 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x8__neon_mull_addw_dup() local 145 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x8__neon_mull_addw_dup() local
|
D | 1x16-minmax-rndnu-neon-mull-addw-dup.c | 65 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16__neon_mull_addw_dup() local 152 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16__neon_mull_addw_dup() local
|
D | 3x8-minmax-rndnu-neon-mull-addw-dup.c | 85 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8__neon_mull_addw_dup() local 181 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8__neon_mull_addw_dup() local
|
D | 4x8-minmax-rndnu-neon-mull-addw-dup.c | 96 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8__neon_mull_addw_dup() local 217 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8__neon_mull_addw_dup() local
|
D | 2x16-minmax-rndnu-neon-mull-addw-dup.c | 78 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16__neon_mull_addw_dup() local 214 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16__neon_mull_addw_dup() local
|
D | 3x16-minmax-rndnu-neon-mull-addw-dup.c | 91 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16__neon_mull_addw_dup() local 276 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16__neon_mull_addw_dup() local
|
D | 4x16-minmax-rndnu-neon-mull-addw-dup.c | 104 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16__neon_mull_addw_dup() local 338 const int16x8_t vprod0x01234567c0 = vmull_s8(vb01234567c0, vdup_lane_s8(va0, 0)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16__neon_mull_addw_dup() local
|