Home
last modified time | relevance | path

Searched refs:s64 (Results 1 – 25 of 90) sorted by relevance

1234

/external/kernel-headers/original/linux/
Dpercpu_counter.h19 s64 count;
29 static inline void percpu_counter_init(struct percpu_counter *fbc, s64 amount) in percpu_counter_init()
42 s64 percpu_counter_sum(struct percpu_counter *fbc);
44 static inline s64 percpu_counter_read(struct percpu_counter *fbc) in percpu_counter_read()
54 static inline s64 percpu_counter_read_positive(struct percpu_counter *fbc) in percpu_counter_read_positive()
56 s64 ret = fbc->count; in percpu_counter_read_positive()
67 s64 count;
70 static inline void percpu_counter_init(struct percpu_counter *fbc, s64 amount) in percpu_counter_init()
87 static inline s64 percpu_counter_read(struct percpu_counter *fbc) in percpu_counter_read()
92 static inline s64 percpu_counter_read_positive(struct percpu_counter *fbc) in percpu_counter_read_positive()
[all …]
Dtime.h129 static inline s64 timespec_to_ns(const struct timespec *ts) in timespec_to_ns()
131 return ((s64) ts->tv_sec * NSEC_PER_SEC) + ts->tv_nsec; in timespec_to_ns()
141 static inline s64 timeval_to_ns(const struct timeval *tv) in timeval_to_ns()
143 return ((s64) tv->tv_sec * NSEC_PER_SEC) + in timeval_to_ns()
153 extern struct timespec ns_to_timespec(const s64 nsec);
161 extern struct timeval ns_to_timeval(const s64 nsec);
Dktime.h47 s64 tv64;
59 #define KTIME_MAX ((s64)~((u64)1 << 63))
81 return (ktime_t) { .tv64 = (s64)secs * NSEC_PER_SEC + (s64)nsecs }; in ktime_set()
Dpoll.h112 #define MAX_INT64_SECONDS (((s64)(~((u64)0)>>1)/HZ)-1)
114 extern int do_select(int n, fd_set_bits *fds, s64 *timeout);
116 s64 *timeout);
/external/webrtc/src/modules/audio_coding/codecs/isac/fix/source/
Dlpc_masking_model_neon.S37 vmov.s64 q11, #0 @ Initialize shift_internal.
38 vmov.s64 q13, #0 @ Initialize sum64.
39 vmov.s64 q10, #0
54 vmov.s64 q15, #0 @ Initialize the sum64_tmp.
73 vshl.s64 q0, q11
75 vshl.s64 q0, #1
78 vqadd.s64 q14, q0, q15 @ Sum and test overflow.
98 vshr.s64 q13, #1
99 vshr.s64 q15, #1
100 vadd.s64 q0, q13, q15
[all …]
/external/llvm/test/MC/ARM/
Dneon-satshift-encoding.s9 @ CHECK: vqshl.s64 d16, d16, d17 @ encoding: [0xb0,0x04,0x71,0xf2]
10 vqshl.s64 d16, d16, d17
25 @ CHECK: vqshl.s64 q8, q8, q9 @ encoding: [0xf0,0x04,0x72,0xf2]
26 vqshl.s64 q8, q8, q9
41 @ CHECK: vqshl.s64 d16, d16, #63 @ encoding: [0xb0,0x07,0xff,0xf2]
42 vqshl.s64 d16, d16, #63
57 @ CHECK: vqshlu.s64 d16, d16, #63 @ encoding: [0xb0,0x06,0xff,0xf3]
58 vqshlu.s64 d16, d16, #63
65 @ CHECK: vqshl.s64 q8, q8, #63 @ encoding: [0xf0,0x07,0xff,0xf2]
66 vqshl.s64 q8, q8, #63
[all …]
Dneont2-satshift-encoding.s11 @ CHECK: vqshl.s64 d16, d16, d17 @ encoding: [0x71,0xef,0xb0,0x04]
12 vqshl.s64 d16, d16, d17
27 @ CHECK: vqshl.s64 q8, q8, q9 @ encoding: [0x72,0xef,0xf0,0x04]
28 vqshl.s64 q8, q8, q9
43 @ CHECK: vqshl.s64 d16, d16, #63 @ encoding: [0xff,0xef,0xb0,0x07]
44 vqshl.s64 d16, d16, #63
59 @ CHECK: vqshlu.s64 d16, d16, #63 @ encoding: [0xff,0xff,0xb0,0x06]
60 vqshlu.s64 d16, d16, #63
67 @ CHECK: vqshl.s64 q8, q8, #63 @ encoding: [0xff,0xef,0xf0,0x07]
68 vqshl.s64 q8, q8, #63
[all …]
Dneon-shift-encoding.s50 vshr.s64 d16, d16, #63
54 vshr.s64 q8, q8, #63
67 @ CHECK: vshr.s64 d16, d16, #63 @ encoding: [0xb0,0x00,0xc1,0xf2]
71 @ CHECK: vshr.s64 q8, q8, #63 @ encoding: [0xf0,0x00,0xc1,0xf2]
85 vshr.s64 d16, #63
89 vshr.s64 q8, #63
102 @ CHECK: vshr.s64 d16, d16, #63 @ encoding: [0xb0,0x00,0xc1,0xf2]
106 @ CHECK: vshr.s64 q8, q8, #63 @ encoding: [0xf0,0x00,0xc1,0xf2]
112 vsra.s64 d12, d19, #63
116 vsra.s64 q4, q5, #63
[all …]
Dneont2-shiftaccum-encoding.s8 vsra.s64 d11, d10, #64
12 vsra.s64 q8, q4, #64
26 vsra.s64 d10, #64
30 vsra.s64 q4, #64
43 @ CHECK: vsra.s64 d11, d10, #64 @ encoding: [0x80,0xef,0x9a,0xb1]
47 @ CHECK: vsra.s64 q8, q4, #64 @ encoding: [0xc0,0xef,0xd8,0x01]
60 @ CHECK: vsra.s64 d10, d10, #64 @ encoding: [0x80,0xef,0x9a,0xa1]
64 @ CHECK: vsra.s64 q4, q4, #64 @ encoding: [0x80,0xef,0xd8,0x81]
78 vrsra.s64 d14, d23, #64
86 vrsra.s64 q4, q5, #64
[all …]
Dneon-shiftaccum-encoding.s6 vsra.s64 d11, d10, #64
10 vsra.s64 q8, q4, #64
24 vsra.s64 d10, #64
28 vsra.s64 q4, #64
41 @ CHECK: vsra.s64 d11, d10, #64 @ encoding: [0x9a,0xb1,0x80,0xf2]
45 @ CHECK: vsra.s64 q8, q4, #64 @ encoding: [0xd8,0x01,0xc0,0xf2]
58 @ CHECK: vsra.s64 d10, d10, #64 @ encoding: [0x9a,0xa1,0x80,0xf2]
62 @ CHECK: vsra.s64 q4, q4, #64 @ encoding: [0xd8,0x81,0x80,0xf2]
75 vrsra.s64 d14, d23, #64
83 vrsra.s64 q4, q5, #64
[all …]
Dneont2-shift-encoding.s59 @ CHECK: vshr.s64 d16, d16, #64 @ encoding: [0xc0,0xef,0xb0,0x00]
60 vshr.s64 d16, d16, #64
67 @ CHECK: vshr.s64 q8, q8, #64 @ encoding: [0xc0,0xef,0xf0,0x00]
68 vshr.s64 q8, q8, #64
99 @ CHECK: vrshl.s64 d16, d17, d16 @ encoding: [0x70,0xef,0xa1,0x05]
100 vrshl.s64 d16, d17, d16
115 @ CHECK: vrshl.s64 q8, q9, q8 @ encoding: [0x70,0xef,0xe2,0x05]
116 vrshl.s64 q8, q9, q8
131 @ CHECK: vrshr.s64 d16, d16, #64 @ encoding: [0xc0,0xef,0xb0,0x02]
132 vrshr.s64 d16, d16, #64
[all …]
Dneon-mov-encoding.s87 vqmovn.s64 d16, q8
93 vqmovun.s64 d16, q8
100 @ CHECK: vqmovn.s64 d16, q8 @ encoding: [0xa0,0x02,0xfa,0xf3]
106 @ CHECK: vqmovun.s64 d16, q8 @ encoding: [0x60,0x02,0xfa,0xf3]
144 vmvn.s64 d1, d2
Dneont2-mov-encoding.s81 vqmovn.s64 d16, q8
87 vqmovun.s64 d16, q8
100 @ CHECK: vqmovn.s64 d16, q8 @ encoding: [0xfa,0xff,0xa0,0x02]
106 @ CHECK: vqmovun.s64 d16, q8 @ encoding: [0xfa,0xff,0x60,0x02]
Dneon-add-encoding.s151 vqadd.s64 d16, d16, d17
160 @ CHECK: vqadd.s64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xf2]
169 vqadd.s64 q8, q8, q9
178 @ CHECK: vqadd.s64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xf2]
189 vqadd.s64 d16, d17
198 @ CHECK: vqadd.s64 d16, d16, d17 @ encoding: [0xb1,0x00,0x70,0xf2]
207 vqadd.s64 q8, q9
216 @ CHECK: vqadd.s64 q8, q8, q9 @ encoding: [0xf2,0x00,0x70,0xf2]
/external/grub/stage2/
Djfs.h104 typedef long long s64; typedef
136 #define addressPXD(pxd) (((s64)((pxd)->addr1)) << 32 | ((pxd)->addr2))
174 s64 s_size; /* 8: aggregate size in hardware/LVM blocks;
231 s64 s_xsize; /* 8: extendfs s_size */
380 s64 next; /* 8: next sibling */
381 s64 prev; /* 8: previous sibling */
414 #define offsetXAD(xad) (((s64)((xad)->off1)) << 32 | ((xad)->off2))
415 #define addressXAD(xad) (((s64)((xad)->addr1)) << 32 | ((xad)->addr2))
427 s64 next; /* 8: */
428 s64 prev; /* 8: */
[all …]
Dfsys_jfs.c61 isinxt (s64 key, s64 offset, s64 len) in isinxt()
108 s64 key; in di_read()
110 s64 offset; in di_read()
231 s64 endofprev, endofcur; in jfs_read()
232 s64 offset, xadlen; in jfs_read()
279 s64 di_size; in jfs_dir()
/external/llvm/test/CodeGen/NVPTX/
Dsext-in-reg.ll7 ; CHECK: cvt.s64.s8
8 ; CHECK: cvt.s64.s8
26 ; CHECK: cvt.s64.s32
27 ; CHECK: cvt.s64.s32
44 ; CHECK: cvt.s64.s16
45 ; CHECK: cvt.s64.s16
Dadd-128bit.ll8 ; CHECK: add.s64
13 ; CHECK: add.s64
Darithmetic-int.ll12 ; CHECK: add.s64 %rl{{[0-9]+}}, %rl{{[0-9]+}}, %rl{{[0-9]+}}
19 ; CHECK: sub.s64 %rl{{[0-9]+}}, %rl{{[0-9]+}}, %rl{{[0-9]+}}
26 ; CHECK: mul.lo.s64 %rl{{[0-9]+}}, %rl{{[0-9]+}}, %rl{{[0-9]+}}
33 ; CHECK: div.s64 %rl{{[0-9]+}}, %rl{{[0-9]+}}, %rl{{[0-9]+}}
47 ; CHECK: rem.s64 %rl{{[0-9]+}}, %rl{{[0-9]+}}, %rl{{[0-9]+}}
91 ; CHECK: shr.s64 %rl{{[0-9]+}}, %rl{{[0-9]+}}, %r{{[0-9]+}}
/external/compiler-rt/lib/sanitizer_common/
Dsanitizer_libc.cc19 s64 internal_atoll(const char *nptr) { in internal_atoll()
178 s64 internal_simple_strtoll(const char *nptr, char **endptr, int base) { in internal_simple_strtoll()
203 return (s64)(Min((u64)INT64_MAX, res)); in internal_simple_strtoll()
205 return (res > INT64_MAX) ? INT64_MIN : ((s64)res * -1); in internal_simple_strtoll()
Dsanitizer_libc.h27 s64 internal_atoll(const char *nptr);
47 s64 internal_simple_strtoll(const char *nptr, char **endptr, int base);
Dsanitizer_printf.cc90 static int AppendSignedDecimal(char **buff, const char *buff_end, s64 num, in AppendSignedDecimal()
142 s64 dval; in VSNPrintf()
147 dval = have_ll ? va_arg(args, s64) in VSNPrintf()
/external/llvm/test/CodeGen/X86/
Drd-mod-wr-eflags.ll72 %s64 = getelementptr inbounds %struct.obj2* %o, i64 0, i32 0
74 %0 = load i64* %s64, align 8
77 store i64 %dec, i64* %s64, align 8
126 %s64 = getelementptr inbounds %struct.obj2* %o, i64 0, i32 0
128 %0 = load i64* %s64, align 8
131 store i64 %inc, i64* %s64, align 8
/external/linux-tools-perf/util/
Dtypes.h11 typedef int64_t s64; typedef
/external/qemu/target-i386/
Dhelper.h117 DEF_HELPER_1(fildll_ST0, void, s64)
122 DEF_HELPER_0(fistll_ST0, s64)
125 DEF_HELPER_0(fisttll_ST0, s64)

1234