Home
last modified time | relevance | path

Searched refs:vout0 (Results 1 – 25 of 414) sorted by relevance

12345678910>>...17

/external/XNNPACK/src/f32-spmm/gen/
D4x1-minmax-scalar-pipelined.c68 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined() local
72 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined()
76 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined()
114 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined() local
116 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined()
118 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined()
148 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined() local
149 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined()
150 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_4x1__scalar_pipelined()
D8x1-minmax-scalar-pipelined.c84 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined() local
92 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined()
100 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined()
150 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined() local
154 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined()
158 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined()
194 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined() local
196 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined()
198 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined()
228 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_8x1__scalar_pipelined() local
[all …]
D2x1-minmax-scalar-pipelined.c60 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_2x1__scalar_pipelined() local
62 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_2x1__scalar_pipelined()
64 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_2x1__scalar_pipelined()
96 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_2x1__scalar_pipelined() local
97 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_2x1__scalar_pipelined()
98 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_2x1__scalar_pipelined()
D4x1-minmax-scalar.c101 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_4x1__scalar() local
105 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_4x1__scalar()
109 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_4x1__scalar()
168 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_4x1__scalar() local
170 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_4x1__scalar()
172 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_4x1__scalar()
218 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_4x1__scalar() local
219 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_4x1__scalar()
220 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_4x1__scalar()
D2x1-minmax-scalar.c81 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_2x1__scalar() local
83 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_2x1__scalar()
85 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_2x1__scalar()
133 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_2x1__scalar() local
134 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_2x1__scalar()
135 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_2x1__scalar()
D8x1-minmax-scalar.c141 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_8x1__scalar() local
149 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_8x1__scalar()
157 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_8x1__scalar()
238 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_8x1__scalar() local
242 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_8x1__scalar()
246 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_8x1__scalar()
303 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_8x1__scalar() local
305 vout0 = math_max_f32(vout0, vmin); in xnn_f32_spmm_minmax_ukernel_8x1__scalar()
307 output[0] = vout0; in xnn_f32_spmm_minmax_ukernel_8x1__scalar()
353 float vout0 = math_min_f32(vacc0, vmax); in xnn_f32_spmm_minmax_ukernel_8x1__scalar() local
[all …]
/external/XNNPACK/src/s16-window/gen/
Dscalar-x2.c45 int32_t vout0 = (int32_t) vi0 * (int32_t) w0; in xnn_s16_window_ukernel__scalar_x2() local
48 vout0 = math_asr_s32(vout0, shift); in xnn_s16_window_ukernel__scalar_x2()
51 vout0 = math_max_s32(vout0, INT16_MIN); in xnn_s16_window_ukernel__scalar_x2()
54 vout0 = math_min_s32(vout0, INT16_MAX); in xnn_s16_window_ukernel__scalar_x2()
57 output[0] = (int16_t) vout0; in xnn_s16_window_ukernel__scalar_x2()
Dscalar-x3.c47 int32_t vout0 = (int32_t) vi0 * (int32_t) w0; in xnn_s16_window_ukernel__scalar_x3() local
51 vout0 = math_asr_s32(vout0, shift); in xnn_s16_window_ukernel__scalar_x3()
55 vout0 = math_max_s32(vout0, INT16_MIN); in xnn_s16_window_ukernel__scalar_x3()
59 vout0 = math_min_s32(vout0, INT16_MAX); in xnn_s16_window_ukernel__scalar_x3()
63 output[0] = (int16_t) vout0; in xnn_s16_window_ukernel__scalar_x3()
Dscalar-x4.c49 int32_t vout0 = (int32_t) vi0 * (int32_t) w0; in xnn_s16_window_ukernel__scalar_x4() local
54 vout0 = math_asr_s32(vout0, shift); in xnn_s16_window_ukernel__scalar_x4()
59 vout0 = math_max_s32(vout0, INT16_MIN); in xnn_s16_window_ukernel__scalar_x4()
64 vout0 = math_min_s32(vout0, INT16_MAX); in xnn_s16_window_ukernel__scalar_x4()
69 output[0] = (int16_t) vout0; in xnn_s16_window_ukernel__scalar_x4()
/external/XNNPACK/src/qs8-vaddc/gen/
Dminmax-scalar-x2.c39 int32_t vout0 = math_asr_s32(vacc0, vshift); in xnn_qs8_vaddc_minmax_ukernel__scalar_x2() local
42 vout0 = math_max_s32(vout0, voutput_min_less_zero_point); in xnn_qs8_vaddc_minmax_ukernel__scalar_x2()
45 vout0 = math_min_s32(vout0, voutput_max_less_zero_point); in xnn_qs8_vaddc_minmax_ukernel__scalar_x2()
48 vout0 += voutput_zero_point; in xnn_qs8_vaddc_minmax_ukernel__scalar_x2()
51 output[0] = (int8_t) vout0; in xnn_qs8_vaddc_minmax_ukernel__scalar_x2()
Dminmax-scalar-x4.c43 int32_t vout0 = math_asr_s32(vacc0, vshift); in xnn_qs8_vaddc_minmax_ukernel__scalar_x4() local
48 vout0 = math_max_s32(vout0, voutput_min_less_zero_point); in xnn_qs8_vaddc_minmax_ukernel__scalar_x4()
53 vout0 = math_min_s32(vout0, voutput_max_less_zero_point); in xnn_qs8_vaddc_minmax_ukernel__scalar_x4()
58 vout0 += voutput_zero_point; in xnn_qs8_vaddc_minmax_ukernel__scalar_x4()
63 output[0] = (int8_t) vout0; in xnn_qs8_vaddc_minmax_ukernel__scalar_x4()
/external/XNNPACK/src/qu8-vaddc/gen/
Dminmax-scalar-x2.c39 int32_t vout0 = math_asr_s32(vacc0, vshift); in xnn_qu8_vaddc_minmax_ukernel__scalar_x2() local
42 vout0 = math_max_s32(vout0, voutput_min_less_zero_point); in xnn_qu8_vaddc_minmax_ukernel__scalar_x2()
45 vout0 = math_min_s32(vout0, voutput_max_less_zero_point); in xnn_qu8_vaddc_minmax_ukernel__scalar_x2()
48 vout0 += voutput_zero_point; in xnn_qu8_vaddc_minmax_ukernel__scalar_x2()
51 output[0] = (uint8_t) vout0; in xnn_qu8_vaddc_minmax_ukernel__scalar_x2()
Dminmax-scalar-x4.c43 int32_t vout0 = math_asr_s32(vacc0, vshift); in xnn_qu8_vaddc_minmax_ukernel__scalar_x4() local
48 vout0 = math_max_s32(vout0, voutput_min_less_zero_point); in xnn_qu8_vaddc_minmax_ukernel__scalar_x4()
53 vout0 = math_min_s32(vout0, voutput_max_less_zero_point); in xnn_qu8_vaddc_minmax_ukernel__scalar_x4()
58 vout0 += voutput_zero_point; in xnn_qu8_vaddc_minmax_ukernel__scalar_x4()
63 output[0] = (uint8_t) vout0; in xnn_qu8_vaddc_minmax_ukernel__scalar_x4()
/external/XNNPACK/src/qs8-vadd/gen/
Dminmax-scalar-x2.c45 int32_t vout0 = math_asr_s32(vacc0, vshift); in xnn_qs8_vadd_minmax_ukernel__scalar_x2() local
48 vout0 = math_max_s32(vout0, voutput_min_less_zero_point); in xnn_qs8_vadd_minmax_ukernel__scalar_x2()
51 vout0 = math_min_s32(vout0, voutput_max_less_zero_point); in xnn_qs8_vadd_minmax_ukernel__scalar_x2()
54 vout0 += voutput_zero_point; in xnn_qs8_vadd_minmax_ukernel__scalar_x2()
57 output[0] = (int8_t) vout0; in xnn_qs8_vadd_minmax_ukernel__scalar_x2()
Dminmax-scalar-x4.c53 int32_t vout0 = math_asr_s32(vacc0, vshift); in xnn_qs8_vadd_minmax_ukernel__scalar_x4() local
58 vout0 = math_max_s32(vout0, voutput_min_less_zero_point); in xnn_qs8_vadd_minmax_ukernel__scalar_x4()
63 vout0 = math_min_s32(vout0, voutput_max_less_zero_point); in xnn_qs8_vadd_minmax_ukernel__scalar_x4()
68 vout0 += voutput_zero_point; in xnn_qs8_vadd_minmax_ukernel__scalar_x4()
73 output[0] = (int8_t) vout0; in xnn_qs8_vadd_minmax_ukernel__scalar_x4()
/external/XNNPACK/src/qu8-vadd/gen/
Dminmax-scalar-x2.c45 int32_t vout0 = math_asr_s32(vacc0, vshift); in xnn_qu8_vadd_minmax_ukernel__scalar_x2() local
48 vout0 = math_max_s32(vout0, voutput_min_less_zero_point); in xnn_qu8_vadd_minmax_ukernel__scalar_x2()
51 vout0 = math_min_s32(vout0, voutput_max_less_zero_point); in xnn_qu8_vadd_minmax_ukernel__scalar_x2()
54 vout0 += voutput_zero_point; in xnn_qu8_vadd_minmax_ukernel__scalar_x2()
57 output[0] = (uint8_t) vout0; in xnn_qu8_vadd_minmax_ukernel__scalar_x2()
Dminmax-scalar-x4.c53 int32_t vout0 = math_asr_s32(vacc0, vshift); in xnn_qu8_vadd_minmax_ukernel__scalar_x4() local
58 vout0 = math_max_s32(vout0, voutput_min_less_zero_point); in xnn_qu8_vadd_minmax_ukernel__scalar_x4()
63 vout0 = math_min_s32(vout0, voutput_max_less_zero_point); in xnn_qu8_vadd_minmax_ukernel__scalar_x4()
68 vout0 += voutput_zero_point; in xnn_qu8_vadd_minmax_ukernel__scalar_x4()
73 output[0] = (uint8_t) vout0; in xnn_qu8_vadd_minmax_ukernel__scalar_x4()
/external/XNNPACK/src/qu8-vcvt/gen/
Dvcvt-scalar-x2.c32 int32_t vout0 = math_asr_s32(vacc0, 8); in xnn_qu8_vcvt_ukernel__scalar_x2() local
35 vout0 = math_max_s32(vout0, 0); in xnn_qu8_vcvt_ukernel__scalar_x2()
38 vout0 = math_min_s32(vout0, 255); in xnn_qu8_vcvt_ukernel__scalar_x2()
41 y[0] = (uint8_t) vout0; in xnn_qu8_vcvt_ukernel__scalar_x2()
Dvcvt-scalar-x4.c36 int32_t vout0 = math_asr_s32(vacc0, 8); in xnn_qu8_vcvt_ukernel__scalar_x4() local
41 vout0 = math_max_s32(vout0, 0); in xnn_qu8_vcvt_ukernel__scalar_x4()
46 vout0 = math_min_s32(vout0, 255); in xnn_qu8_vcvt_ukernel__scalar_x4()
51 y[0] = (uint8_t) vout0; in xnn_qu8_vcvt_ukernel__scalar_x4()
/external/XNNPACK/src/qs8-vcvt/gen/
Dvcvt-scalar-x2.c32 int32_t vout0 = math_asr_s32(vacc0, 8); in xnn_qs8_vcvt_ukernel__scalar_x2() local
35 vout0 = math_max_s32(vout0, -128); in xnn_qs8_vcvt_ukernel__scalar_x2()
38 vout0 = math_min_s32(vout0, 127); in xnn_qs8_vcvt_ukernel__scalar_x2()
41 y[0] = (int8_t) vout0; in xnn_qs8_vcvt_ukernel__scalar_x2()
Dvcvt-scalar-x4.c36 int32_t vout0 = math_asr_s32(vacc0, 8); in xnn_qs8_vcvt_ukernel__scalar_x4() local
41 vout0 = math_max_s32(vout0, -128); in xnn_qs8_vcvt_ukernel__scalar_x4()
46 vout0 = math_min_s32(vout0, 127); in xnn_qs8_vcvt_ukernel__scalar_x4()
51 y[0] = (int8_t) vout0; in xnn_qs8_vcvt_ukernel__scalar_x4()
/external/XNNPACK/src/qs8-vlrelu/gen/
Dvlrelu-scalar-select-x2.c40 int32_t vout0 = math_asr_s32(vacc0, 8); in xnn_qs8_vlrelu_ukernel__scalar_select_x2() local
43 vout0 = math_max_s32(vout0, -128); in xnn_qs8_vlrelu_ukernel__scalar_select_x2()
46 vout0 = math_min_s32(vout0, 127); in xnn_qs8_vlrelu_ukernel__scalar_select_x2()
49 y[0] = (int8_t) vout0; in xnn_qs8_vlrelu_ukernel__scalar_select_x2()
Dvlrelu-scalar-andxor-x2.c46 int32_t vout0 = math_asr_s32(vacc0, 8); in xnn_qs8_vlrelu_ukernel__scalar_andxor_x2() local
49 vout0 = math_max_s32(vout0, -128); in xnn_qs8_vlrelu_ukernel__scalar_andxor_x2()
52 vout0 = math_min_s32(vout0, 127); in xnn_qs8_vlrelu_ukernel__scalar_andxor_x2()
55 y[0] = (int8_t) vout0; in xnn_qs8_vlrelu_ukernel__scalar_andxor_x2()
/external/XNNPACK/src/qu8-vlrelu/gen/
Dvlrelu-scalar-select-x2.c40 int32_t vout0 = math_asr_s32(vacc0, 8); in xnn_qu8_vlrelu_ukernel__scalar_select_x2() local
43 vout0 = math_max_s32(vout0, 0); in xnn_qu8_vlrelu_ukernel__scalar_select_x2()
46 vout0 = math_min_s32(vout0, 255); in xnn_qu8_vlrelu_ukernel__scalar_select_x2()
49 y[0] = (uint8_t) vout0; in xnn_qu8_vlrelu_ukernel__scalar_select_x2()
Dvlrelu-scalar-andxor-x2.c46 int32_t vout0 = math_asr_s32(vacc0, 8); in xnn_qu8_vlrelu_ukernel__scalar_andxor_x2() local
49 vout0 = math_max_s32(vout0, 0); in xnn_qu8_vlrelu_ukernel__scalar_andxor_x2()
52 vout0 = math_min_s32(vout0, 255); in xnn_qu8_vlrelu_ukernel__scalar_andxor_x2()
55 y[0] = (uint8_t) vout0; in xnn_qu8_vlrelu_ukernel__scalar_andxor_x2()

12345678910>>...17