/external/XNNPACK/src/f16-vsigmoid/gen/ |
D | vsigmoid-neonfp16arith-rr2-p2-div-x64.c | 47 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 48 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 49 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 50 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 51 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 52 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 53 const float16x8_t vz6 = vabsq_f16(vx6); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 54 const float16x8_t vz7 = vabsq_f16(vx7); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 185 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64() 209 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x64()
|
D | vsigmoid-neonfp16arith-rr2-p2-div-x56.c | 46 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56() 47 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56() 48 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56() 49 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56() 50 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56() 51 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56() 52 const float16x8_t vz6 = vabsq_f16(vx6); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56() 169 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56() 193 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x56()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x64.c | 47 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 48 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 49 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 50 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 51 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 52 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 53 const float16x8_t vz6 = vabsq_f16(vx6); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 54 const float16x8_t vz7 = vabsq_f16(vx7); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 212 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64() 240 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x64()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x64.c | 47 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 48 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 49 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 50 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 51 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 52 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 53 const float16x8_t vz6 = vabsq_f16(vx6); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 54 const float16x8_t vz7 = vabsq_f16(vx7); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 212 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64() 240 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x64()
|
D | vsigmoid-neonfp16arith-rr2-p2-div-x48.c | 45 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x48() 46 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x48() 47 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x48() 48 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x48() 49 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x48() 50 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x48() 153 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x48() 177 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x48()
|
D | vsigmoid-neonfp16arith-rr2-p2-div-x40.c | 44 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x40() 45 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x40() 46 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x40() 47 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x40() 48 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x40() 137 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x40() 161 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x40()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x56.c | 46 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56() 47 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56() 48 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56() 49 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56() 50 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56() 51 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56() 52 const float16x8_t vz6 = vabsq_f16(vx6); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56() 193 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56() 221 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x56()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x56.c | 46 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56() 47 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56() 48 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56() 49 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56() 50 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56() 51 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56() 52 const float16x8_t vz6 = vabsq_f16(vx6); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56() 193 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56() 221 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x56()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x48.c | 45 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x48() 46 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x48() 47 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x48() 48 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x48() 49 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x48() 50 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x48() 174 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x48() 202 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x48()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x48.c | 45 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x48() 46 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x48() 47 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x48() 48 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x48() 49 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x48() 50 const float16x8_t vz5 = vabsq_f16(vx5); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x48() 174 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x48() 202 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x48()
|
D | vsigmoid-neonfp16arith-rr2-p2-div-x32.c | 43 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x32() 44 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x32() 45 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x32() 46 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x32() 121 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x32() 145 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x32()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x40.c | 44 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x40() 45 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x40() 46 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x40() 47 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x40() 48 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x40() 155 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x40() 183 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x40()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x40.c | 44 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x40() 45 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x40() 46 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x40() 47 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x40() 48 const float16x8_t vz4 = vabsq_f16(vx4); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x40() 155 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x40() 183 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x40()
|
D | vsigmoid-neonfp16arith-rr2-p2-div-x24.c | 42 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x24() 43 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x24() 44 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x24() 105 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x24() 129 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x24()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x32.c | 43 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x32() 44 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x32() 45 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x32() 46 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x32() 136 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x32() 164 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x32()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x32.c | 43 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x32() 44 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x32() 45 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x32() 46 const float16x8_t vz3 = vabsq_f16(vx3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x32() 136 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x32() 164 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x32()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x24.c | 42 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x24() 43 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x24() 44 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x24() 117 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x24() 145 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x24()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x24.c | 42 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x24() 43 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x24() 44 const float16x8_t vz2 = vabsq_f16(vx2); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x24() 117 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x24() 145 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x24()
|
D | vsigmoid-neonfp16arith-rr2-p2-div-x16.c | 41 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x16() 42 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x16() 89 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x16() 113 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x16()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x16.c | 41 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x16() 42 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x16() 98 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x16() 126 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x16()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x16.c | 41 const float16x8_t vz0 = vabsq_f16(vx0); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x16() 42 const float16x8_t vz1 = vabsq_f16(vx1); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x16() 98 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x16() 126 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x16()
|
D | vsigmoid-neonfp16arith-rr2-p2-div-x8.c | 40 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x8() 64 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_div_x8()
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x8.c | 40 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x8() 68 const float16x8_t vz = vabsq_f16(vx); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x8()
|
/external/XNNPACK/src/f16-vunary/gen/ |
D | vabs-neonfp16arith-x8.c | 33 vacc = vabsq_f16(vacc); in xnn_f16_vabs_ukernel__neonfp16arith_x8() 38 vacc = vabsq_f16(vacc); in xnn_f16_vabs_ukernel__neonfp16arith_x8()
|
D | vabs-neonfp16arith-x16.c | 33 vacc = vabsq_f16(vacc); in xnn_f16_vabs_ukernel__neonfp16arith_x16() 38 vacc = vabsq_f16(vacc); in xnn_f16_vabs_ukernel__neonfp16arith_x16()
|