Home
last modified time | relevance | path

Searched refs:vmaxq_f16 (Results 1 – 25 of 129) sorted by relevance

123456

/external/XNNPACK/src/f16-maxpool/
D9p8x-minmax-neonfp16arith-c8.c88 const float16x8_t vmax018 = vmaxq_f16(vmaxq_f16(vi0, vi1), vi8); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
89 const float16x8_t vmax23 = vmaxq_f16(vi2, vi3); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
90 const float16x8_t vmax45 = vmaxq_f16(vi4, vi5); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
91 const float16x8_t vmax67 = vmaxq_f16(vi6, vi7); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
93 const float16x8_t vmax2345 = vmaxq_f16(vmax23, vmax45); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
94 const float16x8_t vmax01678 = vmaxq_f16(vmax018, vmax67); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
95 const float16x8_t vmax = vmaxq_f16(vmax2345, vmax01678); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
96 const float16x8_t vout = vmaxq_f16(vminq_f16(vmax, voutput_max), voutput_min); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
111 const float16x8_t vmax018 = vmaxq_f16(vmaxq_f16(vi0, vi1), vi8); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
112 const float16x8_t vmax23 = vmaxq_f16(vi2, vi3); in xnn_f16_maxpool_minmax_ukernel_9p8x__neonfp16arith_c8()
[all …]
/external/XNNPACK/src/f16-rmax/
Dneonfp16arith.c34 vmax0 = vmaxq_f16(vmax0, vx0); in xnn_f16_rmax_ukernel__neonfp16arith()
35 vmax1 = vmaxq_f16(vmax1, vx1); in xnn_f16_rmax_ukernel__neonfp16arith()
36 vmax2 = vmaxq_f16(vmax2, vx2); in xnn_f16_rmax_ukernel__neonfp16arith()
37 vmax3 = vmaxq_f16(vmax3, vx3); in xnn_f16_rmax_ukernel__neonfp16arith()
39 float16x8_t vmax = vmaxq_f16(vmaxq_f16(vmax0, vmax1), vmaxq_f16(vmax2, vmax3)); in xnn_f16_rmax_ukernel__neonfp16arith()
42 vmax = vmaxq_f16(vmax, vx); in xnn_f16_rmax_ukernel__neonfp16arith()
/external/XNNPACK/src/f16-vmulcaddc/gen/
Dc16-minmax-neonfp16arith-2x.c67 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_vmulcaddc_minmax_ukernel_c16__neonfp16arith_2x()
68 vacc0x89ABCDEF = vmaxq_f16(vacc0x89ABCDEF, vmin); in xnn_f16_vmulcaddc_minmax_ukernel_c16__neonfp16arith_2x()
69 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_vmulcaddc_minmax_ukernel_c16__neonfp16arith_2x()
70 vacc1x89ABCDEF = vmaxq_f16(vacc1x89ABCDEF, vmin); in xnn_f16_vmulcaddc_minmax_ukernel_c16__neonfp16arith_2x()
94 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_vmulcaddc_minmax_ukernel_c16__neonfp16arith_2x()
95 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_vmulcaddc_minmax_ukernel_c16__neonfp16arith_2x()
114 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_vmulcaddc_minmax_ukernel_c16__neonfp16arith_2x()
115 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_vmulcaddc_minmax_ukernel_c16__neonfp16arith_2x()
/external/XNNPACK/src/f16-vbinary/gen/
Dvmaxc-neonfp16arith-x16.c41 float16x8_t vy01234567 = vmaxq_f16(va01234567, vb); in xnn_f16_vmaxc_ukernel__neonfp16arith_x16()
42 float16x8_t vy456789AB = vmaxq_f16(va456789AB, vb); in xnn_f16_vmaxc_ukernel__neonfp16arith_x16()
52 float16x8_t vy01234567 = vmaxq_f16(va01234567, vb); in xnn_f16_vmaxc_ukernel__neonfp16arith_x16()
58 float16x8_t vy01234567 = vmaxq_f16(va01234567, vb); in xnn_f16_vmaxc_ukernel__neonfp16arith_x16()
Dvmax-neonfp16arith-x16.c42 float16x8_t vy01234567 = vmaxq_f16(va01234567, vb01234567); in xnn_f16_vmax_ukernel__neonfp16arith_x16()
43 float16x8_t vy456789AB = vmaxq_f16(va456789AB, vb456789AB); in xnn_f16_vmax_ukernel__neonfp16arith_x16()
54 float16x8_t vy01234567 = vmaxq_f16(va01234567, vb01234567); in xnn_f16_vmax_ukernel__neonfp16arith_x16()
61 float16x8_t vy01234567 = vmaxq_f16(va01234567, vb01234567); in xnn_f16_vmax_ukernel__neonfp16arith_x16()
Dvmulc-minmax-neonfp16arith-x16.c47 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vmulc_minmax_ukernel__neonfp16arith_x16()
48 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vmulc_minmax_ukernel__neonfp16arith_x16()
60 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vmulc_minmax_ukernel__neonfp16arith_x16()
68 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vmulc_minmax_ukernel__neonfp16arith_x16()
Dvdivc-minmax-neonfp16arith-x16.c47 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vdivc_minmax_ukernel__neonfp16arith_x16()
48 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vdivc_minmax_ukernel__neonfp16arith_x16()
60 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vdivc_minmax_ukernel__neonfp16arith_x16()
68 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vdivc_minmax_ukernel__neonfp16arith_x16()
Dvrdivc-minmax-neonfp16arith-x16.c47 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vrdivc_minmax_ukernel__neonfp16arith_x16()
48 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vrdivc_minmax_ukernel__neonfp16arith_x16()
60 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vrdivc_minmax_ukernel__neonfp16arith_x16()
68 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vrdivc_minmax_ukernel__neonfp16arith_x16()
Dvrsubc-minmax-neonfp16arith-x16.c47 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vrsubc_minmax_ukernel__neonfp16arith_x16()
48 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vrsubc_minmax_ukernel__neonfp16arith_x16()
60 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vrsubc_minmax_ukernel__neonfp16arith_x16()
68 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vrsubc_minmax_ukernel__neonfp16arith_x16()
Dvsubc-minmax-neonfp16arith-x16.c47 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vsubc_minmax_ukernel__neonfp16arith_x16()
48 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vsubc_minmax_ukernel__neonfp16arith_x16()
60 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vsubc_minmax_ukernel__neonfp16arith_x16()
68 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vsubc_minmax_ukernel__neonfp16arith_x16()
Dvaddc-minmax-neonfp16arith-x16.c47 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vaddc_minmax_ukernel__neonfp16arith_x16()
48 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vaddc_minmax_ukernel__neonfp16arith_x16()
60 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vaddc_minmax_ukernel__neonfp16arith_x16()
68 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vaddc_minmax_ukernel__neonfp16arith_x16()
Dvdiv-minmax-neonfp16arith-x16.c48 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vdiv_minmax_ukernel__neonfp16arith_x16()
49 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vdiv_minmax_ukernel__neonfp16arith_x16()
62 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vdiv_minmax_ukernel__neonfp16arith_x16()
71 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vdiv_minmax_ukernel__neonfp16arith_x16()
Dvsub-minmax-neonfp16arith-x16.c48 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vsub_minmax_ukernel__neonfp16arith_x16()
49 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vsub_minmax_ukernel__neonfp16arith_x16()
62 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vsub_minmax_ukernel__neonfp16arith_x16()
71 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vsub_minmax_ukernel__neonfp16arith_x16()
Dvadd-minmax-neonfp16arith-x16.c48 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vadd_minmax_ukernel__neonfp16arith_x16()
49 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vadd_minmax_ukernel__neonfp16arith_x16()
62 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vadd_minmax_ukernel__neonfp16arith_x16()
71 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vadd_minmax_ukernel__neonfp16arith_x16()
Dvmul-minmax-neonfp16arith-x16.c48 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vmul_minmax_ukernel__neonfp16arith_x16()
49 vy456789AB = vmaxq_f16(vy456789AB, vy_min); in xnn_f16_vmul_minmax_ukernel__neonfp16arith_x16()
62 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vmul_minmax_ukernel__neonfp16arith_x16()
71 vy01234567 = vmaxq_f16(vy01234567, vy_min); in xnn_f16_vmul_minmax_ukernel__neonfp16arith_x16()
/external/XNNPACK/src/f16-vclamp/gen/
Dvclamp-neonfp16arith-x16.c39 vacc01234567 = vmaxq_f16(vacc01234567, vy_min); in xnn_f16_vclamp_ukernel__neonfp16arith_x16()
40 vacc89ABCDEF = vmaxq_f16(vacc89ABCDEF, vy_min); in xnn_f16_vclamp_ukernel__neonfp16arith_x16()
50 vacc = vmaxq_f16(vacc, vy_min); in xnn_f16_vclamp_ukernel__neonfp16arith_x16()
56 vacc = vmaxq_f16(vacc, vy_min); in xnn_f16_vclamp_ukernel__neonfp16arith_x16()
/external/XNNPACK/src/f16-gemm/gen/
D8x16-minmax-neonfp16arith-ld64.c361 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
362 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
363 vacc2x01234567 = vmaxq_f16(vacc2x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
364 vacc3x01234567 = vmaxq_f16(vacc3x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
365 vacc4x01234567 = vmaxq_f16(vacc4x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
366 vacc5x01234567 = vmaxq_f16(vacc5x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
367 vacc6x01234567 = vmaxq_f16(vacc6x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
368 vacc7x01234567 = vmaxq_f16(vacc7x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
369 vacc0x89ABCDEF = vmaxq_f16(vacc0x89ABCDEF, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
370 vacc1x89ABCDEF = vmaxq_f16(vacc1x89ABCDEF, vmin); in xnn_f16_gemm_minmax_ukernel_8x16__neonfp16arith_ld64()
[all …]
D6x16-minmax-neonfp16arith-ld64.c293 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
294 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
295 vacc2x01234567 = vmaxq_f16(vacc2x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
296 vacc3x01234567 = vmaxq_f16(vacc3x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
297 vacc4x01234567 = vmaxq_f16(vacc4x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
298 vacc5x01234567 = vmaxq_f16(vacc5x01234567, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
299 vacc0x89ABCDEF = vmaxq_f16(vacc0x89ABCDEF, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
300 vacc1x89ABCDEF = vmaxq_f16(vacc1x89ABCDEF, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
301 vacc2x89ABCDEF = vmaxq_f16(vacc2x89ABCDEF, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
302 vacc3x89ABCDEF = vmaxq_f16(vacc3x89ABCDEF, vmin); in xnn_f16_gemm_minmax_ukernel_6x16__neonfp16arith_ld64()
[all …]
/external/XNNPACK/src/f16-igemm/gen/
D8x16-minmax-neonfp16arith-ld64.c393 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
394 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
395 vacc2x01234567 = vmaxq_f16(vacc2x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
396 vacc3x01234567 = vmaxq_f16(vacc3x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
397 vacc4x01234567 = vmaxq_f16(vacc4x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
398 vacc5x01234567 = vmaxq_f16(vacc5x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
399 vacc6x01234567 = vmaxq_f16(vacc6x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
400 vacc7x01234567 = vmaxq_f16(vacc7x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
401 vacc0x89ABCDEF = vmaxq_f16(vacc0x89ABCDEF, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
402 vacc1x89ABCDEF = vmaxq_f16(vacc1x89ABCDEF, vmin); in xnn_f16_igemm_minmax_ukernel_8x16__neonfp16arith_ld64()
[all …]
D6x16-minmax-neonfp16arith-ld64.c319 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
320 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
321 vacc2x01234567 = vmaxq_f16(vacc2x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
322 vacc3x01234567 = vmaxq_f16(vacc3x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
323 vacc4x01234567 = vmaxq_f16(vacc4x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
324 vacc5x01234567 = vmaxq_f16(vacc5x01234567, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
325 vacc0x89ABCDEF = vmaxq_f16(vacc0x89ABCDEF, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
326 vacc1x89ABCDEF = vmaxq_f16(vacc1x89ABCDEF, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
327 vacc2x89ABCDEF = vmaxq_f16(vacc2x89ABCDEF, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
328 vacc3x89ABCDEF = vmaxq_f16(vacc3x89ABCDEF, vmin); in xnn_f16_igemm_minmax_ukernel_6x16__neonfp16arith_ld64()
[all …]
/external/XNNPACK/src/f16-dwconv/gen/
Dup32x3-minmax-neonfp16arith.c103 float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith()
104 float16x8_t vacc89ABCDEF = vmaxq_f16(vacc89ABCDEFp0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith()
105 float16x8_t vaccGHIJKLMN = vmaxq_f16(vaccGHIJKLMNp0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith()
106 float16x8_t vaccOPQRSTUV = vmaxq_f16(vaccOPQRSTUVp0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith()
134 float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith()
156 float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith()
Dup32x3-minmax-neonfp16arith-acc2.c108 float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith_acc2()
109 float16x8_t vacc89ABCDEF = vmaxq_f16(vacc89ABCDEFp0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith_acc2()
110 float16x8_t vaccGHIJKLMN = vmaxq_f16(vaccGHIJKLMNp0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith_acc2()
111 float16x8_t vaccOPQRSTUV = vmaxq_f16(vaccOPQRSTUVp0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith_acc2()
141 float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith_acc2()
165 float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); in xnn_f16_dwconv_minmax_ukernel_up32x3__neonfp16arith_acc2()
/external/XNNPACK/src/f16-spmm/gen/
D32x1-minmax-neonfp16arith.c69 vout01234567 = vmaxq_f16(vout01234567, vmin); in xnn_f16_spmm_minmax_ukernel_32x1__neonfp16arith()
70 vout89ABCDEF = vmaxq_f16(vout89ABCDEF, vmin); in xnn_f16_spmm_minmax_ukernel_32x1__neonfp16arith()
71 voutGHIJKLMN = vmaxq_f16(voutGHIJKLMN, vmin); in xnn_f16_spmm_minmax_ukernel_32x1__neonfp16arith()
72 voutOPQRSTUV = vmaxq_f16(voutOPQRSTUV, vmin); in xnn_f16_spmm_minmax_ukernel_32x1__neonfp16arith()
107 vout01234567 = vmaxq_f16(vout01234567, vmin); in xnn_f16_spmm_minmax_ukernel_32x1__neonfp16arith()
108 vout89ABCDEF = vmaxq_f16(vout89ABCDEF, vmin); in xnn_f16_spmm_minmax_ukernel_32x1__neonfp16arith()
135 vout01234567 = vmaxq_f16(vout01234567, vmin); in xnn_f16_spmm_minmax_ukernel_32x1__neonfp16arith()
/external/XNNPACK/src/f16-gemm/gen-inc/
D8x16inc-minmax-neonfp16arith-ld64.c363 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
364 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
365 vacc2x01234567 = vmaxq_f16(vacc2x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
366 vacc3x01234567 = vmaxq_f16(vacc3x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
367 vacc4x01234567 = vmaxq_f16(vacc4x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
368 vacc5x01234567 = vmaxq_f16(vacc5x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
369 vacc6x01234567 = vmaxq_f16(vacc6x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
370 vacc7x01234567 = vmaxq_f16(vacc7x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
371 vacc0x89ABCDEF = vmaxq_f16(vacc0x89ABCDEF, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
372 vacc1x89ABCDEF = vmaxq_f16(vacc1x89ABCDEF, vmin); in xnn_f16_gemminc_minmax_ukernel_8x16__neonfp16arith_ld64()
[all …]
D6x16inc-minmax-neonfp16arith-ld64.c295 vacc0x01234567 = vmaxq_f16(vacc0x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
296 vacc1x01234567 = vmaxq_f16(vacc1x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
297 vacc2x01234567 = vmaxq_f16(vacc2x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
298 vacc3x01234567 = vmaxq_f16(vacc3x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
299 vacc4x01234567 = vmaxq_f16(vacc4x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
300 vacc5x01234567 = vmaxq_f16(vacc5x01234567, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
301 vacc0x89ABCDEF = vmaxq_f16(vacc0x89ABCDEF, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
302 vacc1x89ABCDEF = vmaxq_f16(vacc1x89ABCDEF, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
303 vacc2x89ABCDEF = vmaxq_f16(vacc2x89ABCDEF, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
304 vacc3x89ABCDEF = vmaxq_f16(vacc3x89ABCDEF, vmin); in xnn_f16_gemminc_minmax_ukernel_6x16__neonfp16arith_ld64()
[all …]

123456