/external/compiler-rt/lib/tsan/rtl/ |
D | tsan_clock.cc | 125 u64 epoch = src->elem(tid).epoch; in acquire() local 126 if (clk_[tid].epoch < epoch) { in acquire() 127 clk_[tid].epoch = epoch; in acquire() 134 last_acquire_ = clk_[tid_].epoch; in acquire() 144 u64 epoch = src->elem(i).epoch; in acquire() local 145 if (clk_[i].epoch < epoch) { in acquire() 146 clk_[i].epoch = epoch; in acquire() 157 last_acquire_ = clk_[tid_].epoch; in acquire() 180 if (dst->elem(tid_).epoch > last_acquire_) { in release() 197 ce.epoch = max(ce.epoch, clk_[i].epoch); in release() [all …]
|
D | tsan_mutexset.cc | 25 void MutexSet::Add(u64 id, bool write, u64 epoch) { in Add() argument 30 descs_[i].epoch = epoch; in Add() 39 if (descs_[i].epoch < minepoch) { in Add() 40 minepoch = descs_[i].epoch; in Add() 50 descs_[size_].epoch = epoch; in Add()
|
D | tsan_clock.h | 22 u64 epoch : kClkBits; member 54 return elem(tid).epoch; in get() 89 return clk_[tid].epoch; in get() 95 DCHECK_GE(v, clk_[tid_].epoch); in set() 96 clk_[tid_].epoch = v; in set() 100 clk_[tid_].epoch++; in tick()
|
D | tsan_mutexset.h | 28 u64 epoch; member 35 void Add(u64 id, bool write, u64 epoch); 60 void MutexSet::Add(u64 id, bool write, u64 epoch) {} in Add() argument
|
D | tsan_rtl_mutex.cc | 120 RestoreStack(last.tid(), last.epoch(), &trace, 0); in MutexDestroy() 171 thr->mset.Add(s->GetId(), true, thr->fast_state.epoch()); in MutexLock() 249 thr->mset.Add(s->GetId(), false, thr->fast_state.epoch()); in MutexReadLock() 377 thr->clock.set(tctx->tid, tctx->thr->fast_state.epoch()); in UpdateClockCallback() 420 thr->last_sleep_clock.set(tctx->tid, tctx->thr->fast_state.epoch()); in UpdateSleepClockCallback() 439 thr->clock.set(thr->fast_state.epoch()); in AcquireImpl() 447 thr->clock.set(thr->fast_state.epoch()); in ReleaseImpl() 448 thr->fast_synch_epoch = thr->fast_state.epoch(); in ReleaseImpl() 456 thr->clock.set(thr->fast_state.epoch()); in ReleaseStoreImpl() 457 thr->fast_synch_epoch = thr->fast_state.epoch(); in ReleaseStoreImpl() [all …]
|
/external/skqp/src/compute/skc/ |
D | weakref.c | 48 skc_ulong epoch : 64 - SKC_WEAKREF_INDEX_BITS; member 60 skc_weakref_epoch_init(skc_epoch_t * const epoch) in skc_weakref_epoch_init() argument 62 *epoch = SKC_WEAKREF_EPOCH_INIT; in skc_weakref_epoch_init() 66 skc_weakref_epoch_inc(skc_epoch_t * const epoch) in skc_weakref_epoch_inc() argument 68 *epoch += SKC_WEAKREF_EPOCH_ONE; in skc_weakref_epoch_inc() 73 skc_epoch_t * const epoch, in skc_weakref_init() argument 76 *weakref = *epoch | (index & SKC_WEAKREF_INDEX_MASK); in skc_weakref_init() 81 skc_epoch_t const * const epoch) in skc_weakref_is_invalid() argument 83 return ((*weakref ^ *epoch) & SKC_WEAKREF_EPOCH_MASK) != 0UL; in skc_weakref_is_invalid()
|
D | weakref.h | 29 skc_weakref_epoch_init(skc_epoch_t * const epoch); 32 skc_weakref_epoch_inc(skc_epoch_t * const epoch); 36 skc_epoch_t * const epoch, 41 skc_epoch_t const * const epoch);
|
/external/skia/src/compute/skc/ |
D | weakref.c | 48 skc_ulong epoch : 64 - SKC_WEAKREF_INDEX_BITS; member 60 skc_weakref_epoch_init(skc_epoch_t * const epoch) in skc_weakref_epoch_init() argument 62 *epoch = SKC_WEAKREF_EPOCH_INIT; in skc_weakref_epoch_init() 66 skc_weakref_epoch_inc(skc_epoch_t * const epoch) in skc_weakref_epoch_inc() argument 68 *epoch += SKC_WEAKREF_EPOCH_ONE; in skc_weakref_epoch_inc() 73 skc_epoch_t * const epoch, in skc_weakref_init() argument 76 *weakref = *epoch | (index & SKC_WEAKREF_INDEX_MASK); in skc_weakref_init() 81 skc_epoch_t const * const epoch) in skc_weakref_is_invalid() argument 83 return ((*weakref ^ *epoch) & SKC_WEAKREF_EPOCH_MASK) != 0UL; in skc_weakref_is_invalid()
|
D | weakref.h | 29 skc_weakref_epoch_init(skc_epoch_t * const epoch); 32 skc_weakref_epoch_inc(skc_epoch_t * const epoch); 36 skc_epoch_t * const epoch, 41 skc_epoch_t const * const epoch);
|
/external/jemalloc/test/unit/ |
D | stats.c | 39 uint64_t epoch; in TEST_BEGIN() local 48 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() 79 uint64_t epoch; in TEST_BEGIN() local 103 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() 147 uint64_t epoch, nmalloc, ndalloc, nrequests; in TEST_BEGIN() local 162 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() 198 uint64_t epoch, nmalloc, ndalloc, nrequests; in TEST_BEGIN() local 208 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() 244 uint64_t epoch, nmalloc, ndalloc; in TEST_BEGIN() local 254 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() [all …]
|
D | mallctl.c | 5 uint64_t epoch; in TEST_BEGIN() local 15 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, in TEST_BEGIN() 16 sizeof(epoch)-1), EINVAL, in TEST_BEGIN() 18 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, in TEST_BEGIN() 19 sizeof(epoch)+1), EINVAL, in TEST_BEGIN() 22 sz = sizeof(epoch)-1; in TEST_BEGIN() 23 assert_d_eq(mallctl("epoch", (void *)&epoch, &sz, NULL, 0), EINVAL, in TEST_BEGIN() 25 sz = sizeof(epoch)+1; in TEST_BEGIN() 26 assert_d_eq(mallctl("epoch", (void *)&epoch, &sz, NULL, 0), EINVAL, in TEST_BEGIN() 44 uint64_t epoch; in TEST_BEGIN() local [all …]
|
/external/jemalloc_new/test/unit/ |
D | stats.c | 30 uint64_t epoch; in TEST_BEGIN() local 39 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() 70 uint64_t epoch; in TEST_BEGIN() local 90 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() 146 uint64_t epoch, nmalloc, ndalloc, nrequests; in TEST_BEGIN() local 157 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() 191 uint64_t epoch, nmalloc, ndalloc; in TEST_BEGIN() local 197 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() 231 uint64_t epoch, nmalloc, ndalloc, nrequests, nfills, nflushes; in TEST_BEGIN() local 254 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, sizeof(epoch)), in TEST_BEGIN() [all …]
|
D | retained.c | 10 static atomic_u_t epoch; variable 38 uint64_t epoch = 1; in do_refresh() local 39 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, in do_refresh() 40 sizeof(epoch)), 0, "Unexpected mallctl() failure"); in do_refresh() 75 while ((cur_epoch = atomic_load_u(&epoch, ATOMIC_ACQUIRE)) != in thd_start() 107 atomic_store_u(&epoch, 0, ATOMIC_RELAXED); in TEST_BEGIN() 117 atomic_store_u(&epoch, e, ATOMIC_RELEASE); in TEST_BEGIN()
|
D | mallctl.c | 6 uint64_t epoch; in TEST_BEGIN() local 16 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, in TEST_BEGIN() 17 sizeof(epoch)-1), EINVAL, in TEST_BEGIN() 19 assert_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch, in TEST_BEGIN() 20 sizeof(epoch)+1), EINVAL, in TEST_BEGIN() 23 sz = sizeof(epoch)-1; in TEST_BEGIN() 24 assert_d_eq(mallctl("epoch", (void *)&epoch, &sz, NULL, 0), EINVAL, in TEST_BEGIN() 26 sz = sizeof(epoch)+1; in TEST_BEGIN() 27 assert_d_eq(mallctl("epoch", (void *)&epoch, &sz, NULL, 0), EINVAL, in TEST_BEGIN() 43 uint64_t epoch; in TEST_BEGIN() local [all …]
|
/external/eigen/unsupported/Eigen/CXX11/src/ThreadPool/ |
D | EventCount.h | 68 w->epoch = state_.fetch_add(kWaiterInc, std::memory_order_relaxed); in Prewait() 76 uint64_t epoch = in CommitWait() local 77 (w->epoch & kEpochMask) + in CommitWait() 78 (((w->epoch & kWaiterMask) >> kWaiterShift) << kEpochShift); in CommitWait() 81 if (int64_t((state & kEpochMask) - epoch) < 0) { in CommitWait() 89 if (int64_t((state & kEpochMask) - epoch) > 0) return; in CommitWait() 107 uint64_t epoch = in CancelWait() local 108 (w->epoch & kEpochMask) + in CancelWait() 109 (((w->epoch & kWaiterMask) >> kWaiterShift) << kEpochShift); in CancelWait() 112 if (int64_t((state & kEpochMask) - epoch) < 0) { in CancelWait() [all …]
|
/external/tensorflow/tensorflow/python/keras/ |
D | callbacks.py | 262 def on_epoch_begin(self, epoch, logs=None): argument 274 callback.on_epoch_begin(epoch, logs) 277 def on_epoch_end(self, epoch, logs=None): argument 290 callback.on_epoch_end(epoch, logs) 461 def on_epoch_begin(self, epoch, logs=None): argument 473 def on_epoch_end(self, epoch, logs=None): argument 637 def on_epoch_begin(self, epoch, logs=None): argument 658 def on_epoch_end(self, epoch, logs=None): argument 714 def on_epoch_begin(self, epoch, logs=None): argument 723 print('Epoch %d/%d' % (epoch + 1, self.epochs)) [all …]
|
/external/llvm/unittests/Support/ |
D | TimeValueTest.cpp | 26 sys::TimeValue epoch; in TEST() local 30 epoch.fromWin32Time(ft1970); in TEST() 33 EXPECT_EQ(0u, epoch.toEpochTime()); in TEST() 34 EXPECT_EQ(ns, static_cast<uint32_t>(epoch.nanoseconds())); in TEST() 37 EXPECT_EQ(ft1970, epoch.toWin32Time()); in TEST()
|
/external/python/setuptools/setuptools/_vendor/packaging/ |
D | version.py | 136 epoch = -1 155 return epoch, parts 206 epoch=int(match.group("epoch")) if match.group("epoch") else 0, 225 self._version.epoch, 240 if self._version.epoch != 0: 241 parts.append("{0}!".format(self._version.epoch)) 275 if self._version.epoch != 0: 276 parts.append("{0}!".format(self._version.epoch)) 343 def _cmpkey(epoch, release, pre, post, dev, local): argument 393 return epoch, release, pre, post, dev, local
|
/external/python/setuptools/pkg_resources/_vendor/packaging/ |
D | version.py | 136 epoch = -1 155 return epoch, parts 206 epoch=int(match.group("epoch")) if match.group("epoch") else 0, 225 self._version.epoch, 240 if self._version.epoch != 0: 241 parts.append("{0}!".format(self._version.epoch)) 275 if self._version.epoch != 0: 276 parts.append("{0}!".format(self._version.epoch)) 343 def _cmpkey(epoch, release, pre, post, dev, local): argument 393 return epoch, release, pre, post, dev, local
|
/external/grpc-grpc-java/netty/src/main/java/io/grpc/netty/ |
D | KeepAliveEnforcer.java | 34 private final long epoch; field in KeepAliveEnforcer 51 this.epoch = ticker.nanoTime(); in KeepAliveEnforcer() 52 lastValidPingTime = epoch; in KeepAliveEnforcer() 79 lastValidPingTime = epoch; in resetCounters()
|
/external/boringssl/src/ssl/ |
D | dtls_record.cc | 225 uint16_t epoch = (((uint16_t)sequence[0]) << 8) | sequence[1]; in dtls_open_record() local 226 if (epoch != ssl->d1->r_epoch || in dtls_open_record() 305 uint16_t epoch = ssl->d1->w_epoch; in dtls_seal_record() local 310 epoch = ssl->d1->w_epoch - 1; in dtls_seal_record() 326 out[3] = epoch >> 8; in dtls_seal_record() 327 out[4] = epoch & 0xff; in dtls_seal_record()
|
/external/scapy/scapy/contrib/ |
D | ppi_geotag.uts | 47 utc_time = UTCTimeField("Test", None, epoch=local_time) 48 assert time.localtime(utc_time.epoch) == local_time 57 lme_time = LETimeField("Test", None, epoch=local_time) 58 assert time.localtime(lme_time.epoch) == local_time
|
/external/tensorflow/tensorflow/python/keras/engine/ |
D | training_arrays.py | 233 for epoch in range(initial_epoch, epochs): 241 callbacks.on_epoch_begin(epoch, epoch_logs) 242 progbar.on_epoch_begin(epoch, epoch_logs) 377 training_utils.should_run_validation(validation_freq, epoch) and 403 if val_iterator and epoch < epochs - 1: 408 callbacks.on_epoch_end(epoch, epoch_logs) 409 progbar.on_epoch_end(epoch, epoch_logs) 412 if reset_dataset_after_each_epoch and epoch < epochs - 1:
|
/external/libogg/ |
D | libogg.spec.in | 13 # We're forced to use an epoch since both Red Hat and Ximian use it in their 18 Provides: %{name} = %{epoch}:1.0rc3-%{release} 19 Provides: %{name} = %{epoch}:1.0beta4-%{release} 31 Provides: %{name}-devel = %{epoch}:1.0rc3-%{release} 32 Provides: %{name}-devel = %{epoch}:1.0beta4-%{release}
|
/external/compiler-rt/lib/tsan/tests/unit/ |
D | tsan_shadow_test.cc | 22 EXPECT_EQ(s.epoch(), (u64)22); in TEST() 31 EXPECT_EQ(s.epoch(), (u64)23); in TEST() 33 EXPECT_EQ(s.epoch(), (u64)24); in TEST()
|