Home
last modified time | relevance | path

Searched defs:vb0o (Results 1 – 10 of 10) sorted by relevance

/external/XNNPACK/src/bf16-gemm/gen/
D1x4c8-minmax-neonfma-zip.c73 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_1x4c8__neonfma_zip() local
116 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_1x4c8__neonfma_zip() local
D1x4c8-minmax-neonfma-shland.c73 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_1x4c8__neonfma_shland() local
116 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_1x4c8__neonfma_shland() local
D2x4c8-minmax-neonfma-shland.c90 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_2x4c8__neonfma_shland() local
150 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_2x4c8__neonfma_shland() local
D2x4c8-minmax-neonfma-zip.c90 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_2x4c8__neonfma_zip() local
150 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_2x4c8__neonfma_zip() local
D3x4c8-minmax-neonfma-zip.c107 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_3x4c8__neonfma_zip() local
184 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_3x4c8__neonfma_zip() local
D3x4c8-minmax-neonfma-shland.c107 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_3x4c8__neonfma_shland() local
184 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_3x4c8__neonfma_shland() local
D4x4c8-minmax-neonfma-shland.c124 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_4x4c8__neonfma_shland() local
218 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_4x4c8__neonfma_shland() local
D4x4c8-minmax-neonfma-zip.c124 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_4x4c8__neonfma_zip() local
218 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_4x4c8__neonfma_zip() local
D5x4c8-minmax-neonfma-zip.c141 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_5x4c8__neonfma_zip() local
252 const float32x4_t vb0o = vreinterpretq_f32_u16(vzip2q_u16(vzero, vb0)); in xnn_bf16_gemm_minmax_ukernel_5x4c8__neonfma_zip() local
D5x4c8-minmax-neonfma-shland.c141 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_5x4c8__neonfma_shland() local
252 const float32x4_t vb0o = vreinterpretq_f32_u16(vandq_u16(vb0, vmask)); in xnn_bf16_gemm_minmax_ukernel_5x4c8__neonfma_shland() local