/arch/x86/include/asm/ |
D | atomic64_64.h | 20 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() 32 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() 44 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add() 58 static inline void arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub() 74 static inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v) in arch_atomic64_sub_and_test() 145 static inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v) in arch_atomic64_add_negative() 158 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return() 163 static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return() 168 static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) in arch_atomic64_fetch_add() 173 static inline s64 arch_atomic64_fetch_sub(s64 i, atomic64_t *v) in arch_atomic64_fetch_sub() [all …]
|
D | atomic64_32.h | 12 s64 __aligned(8) counter; 74 static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n) in arch_atomic64_cmpxchg() 87 static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) in arch_atomic64_xchg() 89 s64 o; in arch_atomic64_xchg() 105 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() 120 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() 122 s64 r; in arch_atomic64_read() 134 static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return() 145 static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return() 153 static inline s64 arch_atomic64_inc_return(atomic64_t *v) in arch_atomic64_inc_return() [all …]
|
/arch/mips/fw/cfe/ |
D | cfe_api_int.h | 43 typedef s64 cfe_xptr_t; 58 s64 enum_idx; /* 0-based enumeration index */ 60 s64 name_length; /* size of name buffer */ 62 s64 val_length; /* size of value string buffer */ 75 s64 ticks; /* current time in ticks */ 79 s64 status; 83 s64 mi_idx; /* 0-based enumeration index */ 84 s64 mi_type; /* type of memory block */ 90 s64 fwi_version; /* major, minor, eco version */ 91 s64 fwi_totalmem; /* total installed mem */ [all …]
|
/arch/s390/include/asm/ |
D | atomic.h | 87 static inline s64 atomic64_read(const atomic64_t *v) in atomic64_read() 89 s64 c; in atomic64_read() 97 static inline void atomic64_set(atomic64_t *v, s64 i) in atomic64_set() 104 static inline s64 atomic64_add_return(s64 i, atomic64_t *v) in atomic64_add_return() 109 static inline s64 atomic64_fetch_add(s64 i, atomic64_t *v) in atomic64_fetch_add() 114 static inline void atomic64_add(s64 i, atomic64_t *v) in atomic64_add() 127 static inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in atomic64_cmpxchg() 133 static inline void atomic64_##op(s64 i, atomic64_t *v) \ 137 static inline long atomic64_fetch_##op(s64 i, atomic64_t *v) \ 148 #define atomic64_sub_return(_i, _v) atomic64_add_return(-(s64)(_i), _v) [all …]
|
/arch/arm/include/asm/ |
D | atomic.h | 249 s64 counter; 255 static inline s64 atomic64_read(const atomic64_t *v) in atomic64_read() 257 s64 result; in atomic64_read() 268 static inline void atomic64_set(atomic64_t *v, s64 i) in atomic64_set() 277 static inline s64 atomic64_read(const atomic64_t *v) in atomic64_read() 279 s64 result; in atomic64_read() 290 static inline void atomic64_set(atomic64_t *v, s64 i) in atomic64_set() 292 s64 tmp; in atomic64_set() 307 static inline void atomic64_##op(s64 i, atomic64_t *v) \ 309 s64 result; \ [all …]
|
/arch/arc/include/asm/ |
D | atomic.h | 324 s64 __aligned(8) counter; 329 static inline s64 atomic64_read(const atomic64_t *v) 331 s64 val; 341 static inline void atomic64_set(atomic64_t *v, s64 a) 362 static inline void atomic64_##op(s64 a, atomic64_t *v) \ 364 s64 val; \ 379 static inline s64 atomic64_##op##_return(s64 a, atomic64_t *v) \ 381 s64 val; \ 402 static inline s64 atomic64_fetch_##op(s64 a, atomic64_t *v) \ 404 s64 val, orig; \ [all …]
|
/arch/powerpc/include/asm/ |
D | atomic.h | 300 static __inline__ s64 atomic64_read(const atomic64_t *v) in atomic64_read() 302 s64 t; in atomic64_read() 309 static __inline__ void atomic64_set(atomic64_t *v, s64 i) in atomic64_set() 315 static __inline__ void atomic64_##op(s64 a, atomic64_t *v) \ 317 s64 t; \ 330 static inline s64 \ 331 atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \ 333 s64 t; \ 348 static inline s64 \ 349 atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \ [all …]
|
D | asm-prototypes.h | 129 extern s64 __lshrdi3(s64, int); 130 extern s64 __ashldi3(s64, int); 131 extern s64 __ashrdi3(s64, int); 132 extern int __cmpdi2(s64, s64);
|
D | vio.h | 70 s64 in; 71 s64 inlen; 72 s64 out; 73 s64 outlen;
|
/arch/powerpc/platforms/powernv/ |
D | opal-call.c | 15 static void __trace_opal_entry(s64 a0, s64 a1, s64 a2, s64 a3, in __trace_opal_entry() 16 s64 a4, s64 a5, s64 a6, s64 a7, in __trace_opal_entry() 68 static s64 __opal_call_trace(s64 a0, s64 a1, s64 a2, s64 a3, in __opal_call_trace() 69 s64 a4, s64 a5, s64 a6, s64 a7, in __opal_call_trace() 72 s64 ret; in __opal_call_trace() 85 static s64 __opal_call_trace(s64 a0, s64 a1, s64 a2, s64 a3, in __opal_call_trace() 86 s64 a4, s64 a5, s64 a6, s64 a7, in __opal_call_trace()
|
/arch/alpha/include/asm/ |
D | atomic.h | 96 static __inline__ void atomic64_##op(s64 i, atomic64_t * v) \ 98 s64 temp; \ 112 static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v) \ 114 s64 temp, result; \ 131 static __inline__ s64 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v) \ 133 s64 temp, result; \ 249 static __inline__ s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in atomic64_fetch_add_unless() 251 s64 c, new, old; in atomic64_fetch_add_unless() 279 static inline s64 atomic64_dec_if_positive(atomic64_t *v) in atomic64_dec_if_positive() 281 s64 old, tmp; in atomic64_dec_if_positive()
|
/arch/x86/platform/uv/ |
D | bios_uv.c | 21 static s64 __uv_bios_call(enum uv_bios_cmd which, u64 a1, u64 a2, u64 a3, in __uv_bios_call() 25 s64 ret; in __uv_bios_call() 45 s64 uv_bios_call(enum uv_bios_cmd which, u64 a1, u64 a2, u64 a3, u64 a4, u64 a5) in uv_bios_call() 47 s64 ret; in uv_bios_call() 59 s64 uv_bios_call_irqsave(enum uv_bios_cmd which, u64 a1, u64 a2, u64 a3, in uv_bios_call_irqsave() 63 s64 ret; in uv_bios_call_irqsave() 90 s64 uv_bios_get_sn_info(int fc, int *uvtype, long *partid, long *coher, in uv_bios_get_sn_info() 93 s64 ret; in uv_bios_get_sn_info() 122 s64 ret; in uv_bios_mq_watchlist_alloc() 145 s64 [all …]
|
/arch/ia64/include/asm/ |
D | pal.h | 109 typedef s64 pal_status_t; 780 s64 status; 881 static inline s64 898 static inline s64 907 static inline s64 925 static inline s64 948 static inline s64 961 static inline s64 973 static inline s64 983 static inline s64 [all …]
|
D | atomic.h | 127 static __inline__ s64 \ 128 ia64_atomic64_##op (s64 i, atomic64_t *v) \ 130 s64 old, new; \ 142 static __inline__ s64 \ 143 ia64_atomic64_fetch_##op (s64 i, atomic64_t *v) \ 145 s64 old, new; \ 165 s64 __ia64_aar_i = (i); \ 173 s64 __ia64_asr_i = (i); \ 181 s64 __ia64_aar_i = (i); \ 189 s64 __ia64_asr_i = (i); \
|
/arch/sparc/include/asm/ |
D | atomic_64.h | 26 void atomic64_##op(s64, atomic64_t *); 30 s64 atomic64_##op##_return(s64, atomic64_t *); 34 s64 atomic64_fetch_##op(s64, atomic64_t *); 64 s64 atomic64_dec_if_positive(atomic64_t *v);
|
/arch/x86/include/asm/uv/ |
D | bios.h | 129 extern s64 uv_bios_call(enum uv_bios_cmd, u64, u64, u64, u64, u64); 130 extern s64 uv_bios_call_irqsave(enum uv_bios_cmd, u64, u64, u64, u64, u64); 132 extern s64 uv_bios_get_sn_info(int, int *, long *, long *, long *, long *); 133 extern s64 uv_bios_freq_base(u64, u64 *); 137 extern s64 uv_bios_change_memprotect(u64, u64, enum uv_memprotect); 138 extern s64 uv_bios_reserved_page_pa(u64, u64 *, u64 *, u64 *);
|
/arch/mips/include/asm/ |
D | atomic.h | 258 static __inline__ void atomic64_##op(s64 i, atomic64_t * v) \ 261 s64 temp; \ 284 static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v) \ 286 s64 result; \ 289 s64 temp; \ 318 static __inline__ s64 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v) \ 320 s64 result; \ 323 s64 temp; \ 390 static __inline__ s64 atomic64_sub_if_positive(s64 i, atomic64_t * v) in atomic64_sub_if_positive() 392 s64 result; in atomic64_sub_if_positive() [all …]
|
/arch/powerpc/sysdev/xive/ |
D | native.c | 49 s64 rc; in xive_native_populate_irq_data() 103 s64 rc; in xive_native_configure_irq() 118 s64 rc; in xive_native_get_irq_config() 134 s64 rc = 0; in xive_native_configure_queue() 197 s64 rc; in __xive_native_disable_queue() 249 static s64 opal_xive_allocate_irq(u32 chip_id) in opal_xive_allocate_irq() 251 s64 irq = opal_xive_allocate_irq_raw(chip_id); in opal_xive_allocate_irq() 263 s64 irq; in xive_native_get_ipi() 285 s64 rc; in xive_native_alloc_irq() 302 s64 rc = opal_xive_free_irq(irq); in xive_native_free_irq() [all …]
|
/arch/parisc/include/asm/ |
D | atomic.h | 146 static __inline__ void atomic64_##op(s64 i, atomic64_t *v) \ 156 static __inline__ s64 atomic64_##op##_return(s64 i, atomic64_t *v) \ 159 s64 ret; \ 169 static __inline__ s64 atomic64_fetch_##op(s64 i, atomic64_t *v) \ 172 s64 ret; \ 205 atomic64_set(atomic64_t *v, s64 i) in atomic64_set() 217 static __inline__ s64
|
/arch/riscv/include/asm/ |
D | atomic.h | 41 static __always_inline s64 atomic64_read(const atomic64_t *v) in atomic64_read() 45 static __always_inline void atomic64_set(atomic64_t *v, s64 i) in atomic64_set() 73 ATOMIC_OP (op, asm_op, I, d, s64, 64) 136 ATOMIC_FETCH_OP( op, asm_op, I, d, s64, 64) \ 137 ATOMIC_OP_RETURN(op, asm_op, c_op, I, d, s64, 64) 173 ATOMIC_FETCH_OP(op, asm_op, I, d, s64, 64) 222 static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in atomic64_fetch_add_unless() 224 s64 prev; in atomic64_fetch_add_unless() 298 ATOMIC_OP(s64, 64, 8) 336 static __always_inline s64 atomic64_sub_if_positive(atomic64_t *v, s64 offset) in atomic64_sub_if_positive() [all …]
|
/arch/powerpc/perf/ |
D | 8xx-pmu.c | 35 static s64 get_insn_ctr(void) in get_insn_ctr() 45 return ((s64)ctr << 16) | (counta >> 16); in get_insn_ctr() 83 s64 val = 0; in mpc8xx_pmu_add() 126 s64 prev, val = 0, delta = 0; in mpc8xx_pmu_read() 146 delta = (s64)((s32)val - (s32)prev); in mpc8xx_pmu_read() 150 delta = (s64)((s32)val - (s32)prev); in mpc8xx_pmu_read()
|
/arch/mips/kernel/ |
D | mips-r2-to-r6-emul.c | 93 (s64)regs->regs[MIPSInst_RS(ir)] + in mipsr6_emul() 94 (s64)MIPSInst_SIMM(ir); in mipsr6_emul() 152 (s64)(((u64)regs->regs[MIPSInst_RT(ir)]) << in mipsr6_emul() 161 (s64)(((u64)regs->regs[MIPSInst_RT(ir)]) >> in mipsr6_emul() 179 (s64)((u64)regs->regs[MIPSInst_RS(ir)] - in mipsr6_emul() 404 s64 res; in mult_func() 409 res = (s64)rt * (s64)rs; in mult_func() 412 regs->lo = (s64)rs; in mult_func() 414 res = (s64)rt; in mult_func() 438 regs->lo = (s64)(s32)rt; in multu_func() [all …]
|
/arch/arm64/include/asm/ |
D | atomic_ll_sc.h | 123 __ll_sc_atomic64_##op(s64 i, atomic64_t *v) \ in ATOMIC_OPS() 125 s64 result; \ in ATOMIC_OPS() 140 __ll_sc_atomic64_##op##_return##name(s64 i, atomic64_t *v) \ 142 s64 result; \ 161 __ll_sc_atomic64_fetch_##op##name(s64 i, atomic64_t *v) \ 163 s64 result, val; \ 217 static inline s64 220 s64 result;
|
D | atomic_lse.h | 167 static inline void __lse_atomic64_##op(s64 i, atomic64_t *v) \ 184 static inline long __lse_atomic64_fetch_##op##name(s64 i, atomic64_t *v)\ in ATOMIC64_OP() 211 static inline long __lse_atomic64_add_return##name(s64 i, atomic64_t *v)\ 233 static inline void __lse_atomic64_and(s64 i, atomic64_t *v) 244 static inline long __lse_atomic64_fetch_and##name(s64 i, atomic64_t *v) \ 264 static inline void __lse_atomic64_sub(s64 i, atomic64_t *v) in __lse_atomic64_sub() 275 static inline long __lse_atomic64_sub_return##name(s64 i, atomic64_t *v)\ 299 static inline long __lse_atomic64_fetch_sub##name(s64 i, atomic64_t *v) \ 319 static inline s64 __lse_atomic64_dec_if_positive(atomic64_t *v) in __lse_atomic64_dec_if_positive()
|
/arch/mips/math-emu/ |
D | dp_cmp.c | 14 s64 vx; in ieee754dp_cmp() 15 s64 vy; in ieee754dp_cmp()
|