Lines Matching refs:u64
78 FastState(u64 tid, u64 epoch) { in FastState()
85 explicit FastState(u64 x) in FastState()
89 u64 raw() const { in raw()
93 u64 tid() const { in tid()
94 u64 res = x_ >> kTidShift; in tid()
98 u64 epoch() const { in epoch()
99 u64 res = (x_ << (kTidBits + 1)) >> (64 - kClkBits); in epoch()
104 u64 old_epoch = epoch(); in IncrementEpoch()
118 static const u64 kIgnoreBit = 1ull;
119 static const u64 kFreedBit = 1ull << 63;
120 u64 x_;
132 explicit Shadow(u64 x) : FastState(x) { } in Shadow()
136 void SetAddr0AndSizeLog(u64 addr0, unsigned kAccessSizeLog) { in SetAddr0AndSizeLog()
155 u64 shifted_xor = (s1.x_ ^ s2.x_) >> kTidShift; in TidsAreEqual()
161 u64 masked_xor = (s1.x_ ^ s2.x_) & 31; in Addr0AndSizeAreEqual()
168 u64 diff = s1.addr0() - s2.addr0(); in TwoRangesIntersect()
197 u64 addr0() const { return x_ & 7; } in addr0()
198 u64 size() const { return 1ull << size_log(); } in size()
221 u64 size_log() const { return (x_ >> 3) & 3; } in size_log()
235 const u64 kShadowFreed = 0xfffffffffffffff8ull;
253 u64 fast_synch_epoch;
259 u64 *racy_shadow_addr;
260 u64 racy_state[2];
274 u64 stat[StatCnt];
298 explicit ThreadState(Context *ctx, int tid, int unique_id, u64 epoch,
337 u64 epoch0;
338 u64 epoch1;
388 u64 stat[StatCnt];
389 u64 int_alloc_cnt[MBlockTypeCount];
390 u64 int_alloc_siz[MBlockTypeCount];
425 void RestoreStack(int tid, const u64 epoch, StackTrace *stk);
427 void StatAggregate(u64 *dst, u64 *src);
428 void StatOutput(u64 *stat);
429 void ALWAYS_INLINE INLINE StatInc(ThreadState *thr, StatType typ, u64 n = 1) {
465 u64 *shadow_mem, Shadow cur);
526 void ALWAYS_INLINE INLINE TraceAddEvent(ThreadState *thr, u64 epoch, in TraceAddEvent()
537 Event ev = (u64)addr | ((u64)typ << 61); in TraceAddEvent()