/external/XNNPACK/src/f32-gemm/gen/ |
D | 6x8-neonfma-dup-ld64.c | 123 const float32x4_t va4c1 = vdupq_lane_f32(va4, 1); in xnn_f32_gemm_ukernel_6x8__neonfma_dup_ld64() local 129 vacc4x0123 = vfmaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemm_ukernel_6x8__neonfma_dup_ld64() 135 vacc4x4567 = vfmaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemm_ukernel_6x8__neonfma_dup_ld64()
|
D | 6x8-neon-dup-ld64.c | 123 const float32x4_t va4c1 = vdupq_lane_f32(va4, 1); in xnn_f32_gemm_ukernel_6x8__neon_dup_ld64() local 129 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemm_ukernel_6x8__neon_dup_ld64() 135 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemm_ukernel_6x8__neon_dup_ld64()
|
D | 6x8-psimd-splat.c | 127 const psimd_f32 va4c1 = psimd_splat1_f32(va4); in xnn_f32_gemm_ukernel_6x8__psimd_splat() local 137 vacc4x0123 = psimd_qfma_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemm_ukernel_6x8__psimd_splat() 143 vacc4x4567 = psimd_qfma_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemm_ukernel_6x8__psimd_splat()
|
D | 6x8-neonfma-dup-ld128.c | 125 const float32x4_t va4c1 = vdupq_lane_f32(vget_low_f32(va4), 1); in xnn_f32_gemm_ukernel_6x8__neonfma_dup_ld128() local 131 vacc4x0123 = vfmaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemm_ukernel_6x8__neonfma_dup_ld128() 137 vacc4x4567 = vfmaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemm_ukernel_6x8__neonfma_dup_ld128()
|
D | 6x8-neon-dup-ld128.c | 125 const float32x4_t va4c1 = vdupq_lane_f32(vget_low_f32(va4), 1); in xnn_f32_gemm_ukernel_6x8__neon_dup_ld128() local 131 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemm_ukernel_6x8__neon_dup_ld128() 137 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemm_ukernel_6x8__neon_dup_ld128()
|
/external/XNNPACK/src/f32-igemm/gen/ |
D | 6x8-neonfma-dup-ld64.c | 151 const float32x4_t va4c1 = vdupq_lane_f32(va4, 1); in xnn_f32_igemm_ukernel_6x8__neonfma_dup_ld64() local 157 vacc4x0123 = vfmaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_igemm_ukernel_6x8__neonfma_dup_ld64() 163 vacc4x4567 = vfmaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_igemm_ukernel_6x8__neonfma_dup_ld64()
|
D | 6x8-neon-dup-ld64.c | 151 const float32x4_t va4c1 = vdupq_lane_f32(va4, 1); in xnn_f32_igemm_ukernel_6x8__neon_dup_ld64() local 157 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_igemm_ukernel_6x8__neon_dup_ld64() 163 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_igemm_ukernel_6x8__neon_dup_ld64()
|
D | 6x8-neonfma-dup-ld128.c | 153 const float32x4_t va4c1 = vdupq_lane_f32(vget_low_f32(va4), 1); in xnn_f32_igemm_ukernel_6x8__neonfma_dup_ld128() local 159 vacc4x0123 = vfmaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_igemm_ukernel_6x8__neonfma_dup_ld128() 165 vacc4x4567 = vfmaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_igemm_ukernel_6x8__neonfma_dup_ld128()
|
D | 6x8-psimd-splat.c | 155 const psimd_f32 va4c1 = psimd_splat1_f32(va4); in xnn_f32_igemm_ukernel_6x8__psimd_splat() local 165 vacc4x0123 = psimd_qfma_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_igemm_ukernel_6x8__psimd_splat() 171 vacc4x4567 = psimd_qfma_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_igemm_ukernel_6x8__psimd_splat()
|
D | 6x8-neon-dup-ld128.c | 153 const float32x4_t va4c1 = vdupq_lane_f32(vget_low_f32(va4), 1); in xnn_f32_igemm_ukernel_6x8__neon_dup_ld128() local 159 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_igemm_ukernel_6x8__neon_dup_ld128() 165 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_igemm_ukernel_6x8__neon_dup_ld128()
|
/external/XNNPACK/src/f32-gemm/gen-inc/ |
D | 6x8-neon-dup-ld64.c | 125 const float32x4_t va4c1 = vdupq_lane_f32(va4, 1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld64() local 131 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld64() 137 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld64()
|
D | 6x8-neonfma-dup-ld64.c | 125 const float32x4_t va4c1 = vdupq_lane_f32(va4, 1); in xnn_f32_gemminc_ukernel_6x8__neonfma_dup_ld64() local 131 vacc4x0123 = vfmaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neonfma_dup_ld64() 137 vacc4x4567 = vfmaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neonfma_dup_ld64()
|
D | 6x8-psimd-splat.c | 129 const psimd_f32 va4c1 = psimd_splat1_f32(va4); in xnn_f32_gemminc_ukernel_6x8__psimd_splat() local 139 vacc4x0123 = psimd_qfma_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__psimd_splat() 145 vacc4x4567 = psimd_qfma_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__psimd_splat()
|
D | 6x8-neon-dup-ld128.c | 127 const float32x4_t va4c1 = vdupq_lane_f32(vget_low_f32(va4), 1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128() local 133 vacc4x0123 = vmlaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128() 139 vacc4x4567 = vmlaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neon_dup_ld128()
|
D | 6x8-neonfma-dup-ld128.c | 127 const float32x4_t va4c1 = vdupq_lane_f32(vget_low_f32(va4), 1); in xnn_f32_gemminc_ukernel_6x8__neonfma_dup_ld128() local 133 vacc4x0123 = vfmaq_f32(vacc4x0123, va4c1, vb0123c1); in xnn_f32_gemminc_ukernel_6x8__neonfma_dup_ld128() 139 vacc4x4567 = vfmaq_f32(vacc4x4567, va4c1, vb4567c1); in xnn_f32_gemminc_ukernel_6x8__neonfma_dup_ld128()
|
/external/XNNPACK/src/f16-gemm/gen/ |
D | 6x8-neonfp16arith-ld64.c | 127 const float16x8_t va4c1 = vdupq_lane_f16(va4, 1); in xnn_f16_gemm_ukernel_6x8__neonfp16arith_ld64() local 134 vacc4x01234567 = vfmaq_f16(vacc4x01234567, va4c1, vb01234567c1); in xnn_f16_gemm_ukernel_6x8__neonfp16arith_ld64()
|
D | 8x8-neonfp16arith-ld64.c | 151 const float16x8_t va4c1 = vdupq_lane_f16(va4, 1); in xnn_f16_gemm_ukernel_8x8__neonfp16arith_ld64() local 160 vacc4x01234567 = vfmaq_f16(vacc4x01234567, va4c1, vb01234567c1); in xnn_f16_gemm_ukernel_8x8__neonfp16arith_ld64()
|