Home
last modified time | relevance | path

Searched refs:__ssat (Results 1 – 25 of 31) sorted by relevance

12

/external/XNNPACK/src/qs8-vcvt/gen/
Dvcvt-armsimd32-x8.c48 vacc0 = __ssat(math_asr_s32(vacc0, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
49 vacc1 = __ssat(math_asr_s32(vacc1, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
50 vacc2 = __ssat(math_asr_s32(vacc2, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
51 vacc3 = __ssat(math_asr_s32(vacc3, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
52 vacc4 = __ssat(math_asr_s32(vacc4, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
53 vacc5 = __ssat(math_asr_s32(vacc5, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
54 vacc6 = __ssat(math_asr_s32(vacc6, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
55 vacc7 = __ssat(math_asr_s32(vacc7, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
79 vacc0 = __ssat(math_asr_s32(vacc0, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
80 vacc1 = __ssat(math_asr_s32(vacc1, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x8()
[all …]
Dvcvt-armsimd32-x4.c41 vacc0 = __ssat(math_asr_s32(vacc0, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x4()
42 vacc1 = __ssat(math_asr_s32(vacc1, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x4()
43 vacc2 = __ssat(math_asr_s32(vacc2, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x4()
44 vacc3 = __ssat(math_asr_s32(vacc3, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x4()
62 vacc0 = __ssat(math_asr_s32(vacc0, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x4()
63 vacc1 = __ssat(math_asr_s32(vacc1, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x4()
68 vacc0 = __ssat(math_asr_s32(vacc2, 1), 8); in xnn_qs8_vcvt_ukernel__armsimd32_x4()
/external/XNNPACK/src/qs8-vlrelu/gen/
Dvlrelu-armsimd32-x8.c58 vacc0 = __ssat(math_asr_s32(vacc0, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
59 vacc1 = __ssat(math_asr_s32(vacc1, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
60 vacc2 = __ssat(math_asr_s32(vacc2, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
61 vacc3 = __ssat(math_asr_s32(vacc3, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
62 vacc4 = __ssat(math_asr_s32(vacc4, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
63 vacc5 = __ssat(math_asr_s32(vacc5, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
64 vacc6 = __ssat(math_asr_s32(vacc6, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
65 vacc7 = __ssat(math_asr_s32(vacc7, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
94 vacc0 = __ssat(math_asr_s32(vacc0, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
95 vacc1 = __ssat(math_asr_s32(vacc1, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x8()
[all …]
Dvlrelu-armsimd32-x4.c47 vacc0 = __ssat(math_asr_s32(vacc0, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x4()
48 vacc1 = __ssat(math_asr_s32(vacc1, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x4()
49 vacc2 = __ssat(math_asr_s32(vacc2, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x4()
50 vacc3 = __ssat(math_asr_s32(vacc3, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x4()
73 vacc0 = __ssat(math_asr_s32(vacc0, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x4()
74 vacc1 = __ssat(math_asr_s32(vacc1, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x4()
79 vacc0 = __ssat(math_asr_s32(vacc2, 8), 8); in xnn_qs8_vlrelu_ukernel__armsimd32_x4()
/external/arm-neon-tests/
Dref_integer.c219 sres = __ssat(svar1, 30); in exec_integer()
225 sres = __ssat(svar1, 19); in exec_integer()
231 sres = __ssat(svar1, 29); in exec_integer()
237 sres = __ssat(svar1, 12); in exec_integer()
243 sres = __ssat(svar1, 32); in exec_integer()
249 sres = __ssat(svar1, 1); in exec_integer()
/external/XNNPACK/src/qs8-gemm/gen/
D2x2c4-minmax-fp32-armsimd32.c114 vout0x0 = __ssat(vout0x0, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
115 vout0x1 = __ssat(vout0x1, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
116 vout1x0 = __ssat(vout1x0, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
117 vout1x1 = __ssat(vout1x1, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c89 vout0x0 = __ssat(vout0x0, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32()
90 vout0x1 = __ssat(vout0x1, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32()
D2x1c4-minmax-fp32-armsimd32.c93 vout0x0 = __ssat(vout0x0, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
94 vout1x0 = __ssat(vout1x0, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c76 vout0x0 = __ssat(vout0x0, 8); in xnn_qs8_gemm_minmax_fp32_ukernel_1x1c4__armsimd32()
/external/XNNPACK/src/qc8-gemm/gen/
D2x2c4-minmax-fp32-armsimd32.c116 vout0x0 = __ssat(vout0x0, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
117 vout0x1 = __ssat(vout0x1, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
118 vout1x0 = __ssat(vout1x0, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
119 vout1x1 = __ssat(vout1x1, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c91 vout0x0 = __ssat(vout0x0, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32()
92 vout0x1 = __ssat(vout0x1, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_1x2c4__armsimd32()
D2x1c4-minmax-fp32-armsimd32.c94 vout0x0 = __ssat(vout0x0, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
95 vout1x0 = __ssat(vout1x0, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c77 vout0x0 = __ssat(vout0x0, 8); in xnn_qc8_gemm_minmax_fp32_ukernel_1x1c4__armsimd32()
/external/XNNPACK/src/qs8-igemm/gen/
D2x2c4-minmax-fp32-armsimd32.c134 vout0x0 = __ssat(vout0x0, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
135 vout0x1 = __ssat(vout0x1, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
136 vout1x0 = __ssat(vout1x0, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
137 vout1x1 = __ssat(vout1x1, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c106 vout0x0 = __ssat(vout0x0, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32()
107 vout0x1 = __ssat(vout0x1, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32()
D2x1c4-minmax-fp32-armsimd32.c113 vout0x0 = __ssat(vout0x0, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
114 vout1x0 = __ssat(vout1x0, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c93 vout0x0 = __ssat(vout0x0, 8); in xnn_qs8_igemm_minmax_fp32_ukernel_1x1c4__armsimd32()
/external/XNNPACK/src/qc8-igemm/gen/
D2x2c4-minmax-fp32-armsimd32.c136 vout0x0 = __ssat(vout0x0, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
137 vout0x1 = __ssat(vout0x1, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
138 vout1x0 = __ssat(vout1x0, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
139 vout1x1 = __ssat(vout1x1, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_2x2c4__armsimd32()
D1x2c4-minmax-fp32-armsimd32.c108 vout0x0 = __ssat(vout0x0, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32()
109 vout0x1 = __ssat(vout0x1, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_1x2c4__armsimd32()
D2x1c4-minmax-fp32-armsimd32.c114 vout0x0 = __ssat(vout0x0, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
115 vout1x0 = __ssat(vout1x0, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_2x1c4__armsimd32()
D1x1c4-minmax-fp32-armsimd32.c94 vout0x0 = __ssat(vout0x0, 8); in xnn_qc8_igemm_minmax_fp32_ukernel_1x1c4__armsimd32()
/external/clang/test/Sema/
Darm_acle.c27 return __ssat(t, v); // expected-error-re {{argument to {{.*}} must be a constant integer}} in test_ssat_const_diag()
/external/clang/test/CodeGen/
Darm_acle.c275 return __ssat(t, 1); in test_ssat()
/external/XNNPACK/src/qs8-vcvt/
Darmsimd32.c.in24 $__XSAT = {"QS8": "__ssat", "QU8": "__usat"}[DATATYPE]
/external/clang/lib/Headers/
Darm_acle.h235 #define __ssat(x, y) __builtin_arm_ssat(x, y) macro

12