/external/XNNPACK/src/qs8-gemm/gen/ |
D | 1x4-minmax-fp32-scalar-imagic.c | 80 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qs8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() local 86 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qs8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 92 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qs8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 98 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qs8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 104 c0[2] = (int8_t) vout0x2; in xnn_qs8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 116 vout0x0 = vout0x2; in xnn_qs8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic()
|
D | 2x4-minmax-fp32-scalar-imagic.c | 107 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qs8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() local 117 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qs8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 127 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qs8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 137 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qs8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 147 c0[2] = (int8_t) vout0x2; in xnn_qs8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 165 vout0x0 = vout0x2; in xnn_qs8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic()
|
D | 3x4-minmax-fp32-scalar-imagic.c | 134 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qs8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() local 148 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qs8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 162 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qs8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 176 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qs8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 190 c0[2] = (int8_t) vout0x2; in xnn_qs8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 214 vout0x0 = vout0x2; in xnn_qs8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic()
|
D | 4x4-minmax-fp32-scalar-imagic.c | 161 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qs8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() local 179 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qs8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 197 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qs8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 215 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qs8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 233 c0[2] = (int8_t) vout0x2; in xnn_qs8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 263 vout0x0 = vout0x2; in xnn_qs8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic()
|
/external/XNNPACK/src/qc8-gemm/gen/ |
D | 1x4-minmax-fp32-scalar-imagic.c | 84 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qc8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() local 90 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qc8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 96 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qc8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 102 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qc8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 108 c0[2] = (int8_t) vout0x2; in xnn_qc8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 120 vout0x0 = vout0x2; in xnn_qc8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic()
|
D | 2x4-minmax-fp32-scalar-imagic.c | 111 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qc8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() local 121 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qc8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 131 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qc8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 141 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qc8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 151 c0[2] = (int8_t) vout0x2; in xnn_qc8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 169 vout0x0 = vout0x2; in xnn_qc8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic()
|
D | 3x4-minmax-fp32-scalar-imagic.c | 138 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qc8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() local 152 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qc8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 166 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qc8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 180 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qc8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 194 c0[2] = (int8_t) vout0x2; in xnn_qc8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 218 vout0x0 = vout0x2; in xnn_qc8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic()
|
D | 4x4-minmax-fp32-scalar-imagic.c | 165 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qc8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() local 183 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qc8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 201 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qc8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 219 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qc8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 237 c0[2] = (int8_t) vout0x2; in xnn_qc8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 267 vout0x0 = vout0x2; in xnn_qc8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic()
|
/external/XNNPACK/src/qu8-gemm/gen/ |
D | 1x4-minmax-fp32-scalar-imagic.c | 81 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qu8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() local 87 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qu8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 93 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qu8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 99 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qu8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 105 c0[2] = (uint8_t) vout0x2; in xnn_qu8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic() 117 vout0x0 = vout0x2; in xnn_qu8_gemm_minmax_fp32_ukernel_1x4__scalar_imagic()
|
D | 2x4-minmax-fp32-scalar-imagic.c | 108 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qu8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() local 118 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qu8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 128 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qu8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 138 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qu8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 148 c0[2] = (uint8_t) vout0x2; in xnn_qu8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic() 166 vout0x0 = vout0x2; in xnn_qu8_gemm_minmax_fp32_ukernel_2x4__scalar_imagic()
|
D | 3x4-minmax-fp32-scalar-imagic.c | 135 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qu8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() local 149 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qu8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 163 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qu8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 177 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qu8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 191 c0[2] = (uint8_t) vout0x2; in xnn_qu8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic() 215 vout0x0 = vout0x2; in xnn_qu8_gemm_minmax_fp32_ukernel_3x4__scalar_imagic()
|
D | 4x4-minmax-fp32-scalar-imagic.c | 162 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qu8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() local 180 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qu8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 198 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qu8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 216 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qu8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 234 c0[2] = (uint8_t) vout0x2; in xnn_qu8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic() 264 vout0x0 = vout0x2; in xnn_qu8_gemm_minmax_fp32_ukernel_4x4__scalar_imagic()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 1x4-minmax-fp32-scalar-imagic.c | 97 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qs8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() local 103 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qs8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 109 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qs8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 115 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qs8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 121 c0[2] = (int8_t) vout0x2; in xnn_qs8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 132 vout0x0 = vout0x2; in xnn_qs8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic()
|
D | 2x4-minmax-fp32-scalar-imagic.c | 127 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qs8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() local 137 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qs8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 147 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qs8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 157 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qs8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 171 c0[2] = (int8_t) vout0x2; in xnn_qs8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 187 vout0x0 = vout0x2; in xnn_qs8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic()
|
D | 3x4-minmax-fp32-scalar-imagic.c | 157 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qs8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() local 171 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qs8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 185 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qs8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 199 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qs8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 221 c0[2] = (int8_t) vout0x2; in xnn_qs8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 242 vout0x0 = vout0x2; in xnn_qs8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic()
|
D | 4x4-minmax-fp32-scalar-imagic.c | 187 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qs8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() local 205 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qs8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 223 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qs8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 241 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qs8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 271 c0[2] = (int8_t) vout0x2; in xnn_qs8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 297 vout0x0 = vout0x2; in xnn_qs8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic()
|
/external/XNNPACK/src/qu8-igemm/gen/ |
D | 1x4-minmax-fp32-scalar-imagic.c | 98 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qu8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() local 104 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qu8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 110 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qu8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 116 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qu8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 122 c0[2] = (uint8_t) vout0x2; in xnn_qu8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 133 vout0x0 = vout0x2; in xnn_qu8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic()
|
D | 2x4-minmax-fp32-scalar-imagic.c | 128 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qu8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() local 138 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qu8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 148 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qu8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 158 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qu8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 172 c0[2] = (uint8_t) vout0x2; in xnn_qu8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 188 vout0x0 = vout0x2; in xnn_qu8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic()
|
D | 3x4-minmax-fp32-scalar-imagic.c | 158 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qu8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() local 172 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qu8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 186 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qu8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 200 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qu8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 222 c0[2] = (uint8_t) vout0x2; in xnn_qu8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 243 vout0x0 = vout0x2; in xnn_qu8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic()
|
D | 4x4-minmax-fp32-scalar-imagic.c | 188 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qu8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() local 206 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qu8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 224 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qu8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 242 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qu8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 272 c0[2] = (uint8_t) vout0x2; in xnn_qu8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 298 vout0x0 = vout0x2; in xnn_qu8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic()
|
/external/XNNPACK/src/qc8-igemm/gen/ |
D | 1x4-minmax-fp32-scalar-imagic.c | 101 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qc8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() local 107 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qc8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 113 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qc8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 119 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qc8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 125 c0[2] = (int8_t) vout0x2; in xnn_qc8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic() 136 vout0x0 = vout0x2; in xnn_qc8_igemm_minmax_fp32_ukernel_1x4__scalar_imagic()
|
D | 2x4-minmax-fp32-scalar-imagic.c | 131 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qc8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() local 141 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qc8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 151 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qc8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 161 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qc8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 175 c0[2] = (int8_t) vout0x2; in xnn_qc8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic() 191 vout0x0 = vout0x2; in xnn_qc8_igemm_minmax_fp32_ukernel_2x4__scalar_imagic()
|
D | 3x4-minmax-fp32-scalar-imagic.c | 161 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qc8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() local 175 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qc8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 189 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qc8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 203 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qc8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 225 c0[2] = (int8_t) vout0x2; in xnn_qc8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic() 246 vout0x0 = vout0x2; in xnn_qc8_igemm_minmax_fp32_ukernel_3x4__scalar_imagic()
|
D | 4x4-minmax-fp32-scalar-imagic.c | 191 int32_t vout0x2 = (int32_t) float_as_uint32(vfpacc0x2); in xnn_qc8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() local 209 vout0x2 = math_max_s32(vout0x2, vmagic_min); in xnn_qc8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 227 vout0x2 = math_min_s32(vout0x2, vmagic_max); in xnn_qc8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 245 vout0x2 -= vmagic_bias_less_zero_point; in xnn_qc8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 275 c0[2] = (int8_t) vout0x2; in xnn_qc8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic() 301 vout0x0 = vout0x2; in xnn_qc8_igemm_minmax_fp32_ukernel_4x4__scalar_imagic()
|
/external/XNNPACK/src/f32-spmm/gen/ |
D | 8x4-minmax-scalar.c | 139 float vout0x2 = math_min_f32(vacc0x2, vmax); in xnn_f32_spmm_minmax_ukernel_8x4__scalar() local 171 vout0x2 = math_max_f32(vout0x2, vmin); in xnn_f32_spmm_minmax_ukernel_8x4__scalar() 213 output[0] = vout0x2; in xnn_f32_spmm_minmax_ukernel_8x4__scalar() 362 float vout0x2 = math_min_f32(vacc0x2, vmax); in xnn_f32_spmm_minmax_ukernel_8x4__scalar() local 378 vout0x2 = math_max_f32(vout0x2, vmin); in xnn_f32_spmm_minmax_ukernel_8x4__scalar() 396 output[0] = vout0x2; in xnn_f32_spmm_minmax_ukernel_8x4__scalar() 489 float vout0x2 = math_min_f32(vacc0x2, vmax); in xnn_f32_spmm_minmax_ukernel_8x4__scalar() local 497 vout0x2 = math_max_f32(vout0x2, vmin); in xnn_f32_spmm_minmax_ukernel_8x4__scalar() 507 output[0] = vout0x2; in xnn_f32_spmm_minmax_ukernel_8x4__scalar() 573 float vout0x2 = math_min_f32(vacc0x2, vmax); in xnn_f32_spmm_minmax_ukernel_8x4__scalar() local [all …]
|