/external/tensorflow/tensorflow/core/data/service/ |
D | worker_client.cc | 106 if (cancelled_) { in GetElement() 150 cancelled_ = true; in TryCancel() 165 bool cancelled_ TF_GUARDED_BY(mu_) = false; 209 cancelled_ = true; in TryCancel() 215 if (cancelled_) { in VerifyClientIsNotCancelled() 238 bool cancelled_ TF_GUARDED_BY(mu_) = false;
|
D | task_runner.cc | 281 while (!cancelled_ && current_round_ < req.round_index()) { in PrepareRound() 285 if (current_round_ < req.round_index() && cancelled_) { in PrepareRound() 335 cancelled_ = true; in Cancel() 348 cancelled_ = true; in ~PrefetchThread() 356 while (!cancelled_ && buffer_.size() >= round_size_) { in Run() 359 if (cancelled_) { in Run() 393 while (buffer_.size() < round_size_ && !cancelled_ && status_.ok()) { in FillBuffer() 401 if (cancelled_) { in FillBuffer()
|
D | worker_impl.cc | 155 cancelled_ = true; in ~DataServiceWorkerImpl() 197 cancelled_ = true; in Stop() 234 if (cancelled_) { in GetElementResult() 444 while (!cancelled_ && pending_completed_tasks_.empty()) { in TaskCompletionThread() 447 if (cancelled_) { in TaskCompletionThread() 456 if (!cancelled_) { in TaskCompletionThread() 493 while (!cancelled_ && in HeartbeatThread() 500 if (cancelled_) { in HeartbeatThread()
|
D | task_runner.h | 200 bool cancelled_ TF_GUARDED_BY(mu_) = false; 245 bool cancelled_ TF_GUARDED_BY(mu_) = false;
|
/external/grpc-grpc/src/cpp/server/ |
D | server_context.cc | 48 cancelled_(0) {} in CompletionOp() 72 return finalized_ ? (cancelled_ != 0) : false; in CheckCancelledNoPluck() 80 int cancelled_; member in grpc::ServerContext::CompletionOp 94 ops->data.recv_close_on_server.cancelled = &cancelled_; in FillOps() 108 if (!*status) cancelled_ = 1; in FinalizeResult()
|
/external/eigen/unsupported/Eigen/CXX11/src/ThreadPool/ |
D | NonBlockingThreadPool.h | 36 cancelled_(false), in env_() 73 if (!cancelled_) { in ~ThreadPoolTempl() 139 cancelled_ = true; in Cancel() 245 std::atomic<bool> cancelled_; variable 284 while (!cancelled_) { in WorkerLoop() 287 if (!cancelled_.load(std::memory_order_relaxed)) { in WorkerLoop() 301 while (!cancelled_) { in WorkerLoop() 311 if (!cancelled_.load(std::memory_order_relaxed)) { in WorkerLoop() 393 if (cancelled_) { in WaitForWork()
|
/external/pdfium/xfa/fwl/ |
D | cfwl_eventtextwillchange.h | 26 bool GetCancelled() const { return cancelled_; } in GetCancelled() 38 void SetCancelled(bool cancelled) { cancelled_ = cancelled; } in SetCancelled() 45 bool cancelled_ = false; variable
|
/external/libchrome/base/files/ |
D | file_path_watcher.h | 68 cancelled_ = true; in set_cancelled() 72 return cancelled_; in is_cancelled() 77 bool cancelled_; variable
|
D | file_path_watcher.cc | 31 FilePathWatcher::PlatformDelegate::PlatformDelegate(): cancelled_(false) { in PlatformDelegate()
|
/external/tensorflow/tensorflow/tsl/platform/default/ |
D | unbounded_work_queue.cc | 34 cancelled_ = true; in ~UnboundedWorkQueue() 86 while (!cancelled_ && work_queue_.empty()) { in PooledThreadFunc() 91 if (cancelled_) { in PooledThreadFunc()
|
/external/rust/crates/grpcio-sys/grpc/src/cpp/server/ |
D | server_context.cc | 59 cancelled_(0), in CompletionOp() 140 return finalized_ ? (cancelled_ != 0) : false; in CheckCancelledNoPluck() 151 int cancelled_; // This is an int (not bool) because it is passed to core member in grpc::ServerContextBase::CompletionOp 167 ops.data.recv_close_on_server.cancelled = &cancelled_; in FillOps() 203 cancelled_ = 1; in FinalizeResult() 206 call_cancel = (cancelled_ != 0); in FinalizeResult()
|
/external/cronet/base/files/ |
D | file_path_watcher.h | 115 void set_cancelled() { cancelled_ = true; } in set_cancelled() 117 bool is_cancelled() const { return cancelled_; } in is_cancelled() 121 bool cancelled_ = false; variable
|
/external/tensorflow/tensorflow/core/kernels/data/experimental/ |
D | sleep_dataset_op.cc | 111 cancelled_ = true; in ~Iterator() 123 cancelled_ = true; in Initialize() 136 Condition(&cancelled_), in GetNextInternal() 165 bool cancelled_ TF_GUARDED_BY(mu_) = false;
|
D | data_service_dataset_op.cc | 482 while (!ResultReady() && !Finished() && !cancelled_ && status_.ok()) { in GetNextInternal() 486 if (cancelled_) { in GetNextInternal() 614 if (!task_thread_manager_ && !cancelled_) { in EnsureThreadsStarted() 627 cancelled_ = true; in CancelThreads() 676 while (!cancelled_ && Env::Default()->NowMicros() < next_check) { in TaskThreadManager() 683 if (cancelled_) { in TaskThreadManager() 888 while (num_running_worker_threads_ < max_num_threads && !cancelled_ && in UpdateWorkerThreads() 921 if (cancelled_ || !ShouldWaitForNext()) { in RunWorkerThread() 1107 return !cancelled_; in MaybeRemoveTask() 1135 if (cancelled_) { in GetElement() [all …]
|
D | map_and_batch_dataset_op.cc | 242 while (!cancelled_ && (batch_results_.empty() || in GetNextInternal() 250 if (cancelled_) { in GetNextInternal() 490 cancelled_ = true; in CancelThreads() 556 while (!cancelled_ && busy()) { in RunnerThread() 571 if (cancelled_) { in RunnerThread() 686 bool cancelled_ TF_GUARDED_BY(*mu_) = false;
|
D | parallel_interleave_dataset_op.cc | 311 while (!cancelled_) { in GetNextInternal() 675 cancelled_ = true; in CancelThreads() 773 while (!cancelled_ && !workers_[thread_index].is_producing) { in WorkerThread() 778 if (cancelled_) return; in WorkerThread() 836 while (!cancelled_ && workers_[thread_index].outputs.size() == in WorkerThread() 842 if (cancelled_) return; in WorkerThread() 897 while (!cancelled_ && workers_[thread_index].outputs.size() == in WorkerThread() 903 if (cancelled_) return; in WorkerThread() 1194 bool cancelled_ TF_GUARDED_BY(mu_) = false;
|
D | parse_example_dataset_op.cc | 406 if (cancelled_) { in GetNextInternal() 548 cancelled_ = true; in CancelThreads() 769 while (!cancelled_ && busy()) { in RunnerThread() 774 if (cancelled_) { in RunnerThread() 796 if (cancelled_) { in ShouldWait() 830 if (step != 0 && !cancelled_) { in StatsThread() 834 if (cancelled_) { in StatsThread() 910 bool cancelled_ TF_GUARDED_BY(*mu_) = false;
|
/external/tensorflow/tensorflow/core/platform/cloud/ |
D | gcs_dns_cache.h | 46 cancelled_ = true; in ~GcsDnsCache() 67 bool cancelled_ TF_GUARDED_BY(mu_) = false;
|
D | gcs_dns_cache.cc | 160 if (cancelled_) return; in WorkerThread() 162 if (cancelled_) return; in WorkerThread()
|
/external/tensorflow/tensorflow/core/kernels/data/ |
D | parallel_map_dataset_op.cc | 285 if (cancelled_) { in GetNextInternal() 426 cancelled_ = true; in CancelThreads() 560 while (!cancelled_ && busy()) { in RunnerThread() 565 if (cancelled_) { in RunnerThread() 586 if (cancelled_) { in ShouldWait() 620 if (step != 0 && !cancelled_) { in StatsThread() 624 if (cancelled_) { in StatsThread() 696 bool cancelled_ TF_GUARDED_BY(*mu_) = false;
|
D | parallel_interleave_dataset_op.cc | 410 while (!cancelled_ && !Consume(&result)) { in GetNextInternal() 421 if (cancelled_) { in GetNextInternal() 658 cancelled_ = true; in CancelThreads() 865 while (!cancelled_ && in WorkerManagerThread() 871 if (cancelled_ || end_of_input_) { in WorkerManagerThread() 923 while (!cancelled_) { in CurrentWorkerThread() 941 if (cancelled_) { in CurrentWorkerThread() 989 while (!cancelled_ && (future_elements_.size() >= in FutureWorkerThread() 994 if (cancelled_) { in FutureWorkerThread() 1211 if (step != 0 && !cancelled_) { in StatsThread() [all …]
|
D | parallel_batch_dataset_op.cc | 238 if (cancelled_) { in GetNextInternal() 427 cancelled_ = true; in CancelThreads() 463 while (!cancelled_ && busy()) { in RunnerThread() 469 if (cancelled_) { in RunnerThread() 490 if (cancelled_) { in ShouldWait() 609 bool cancelled_ TF_GUARDED_BY(*mu_) = false;
|
D | parallel_filter_dataset_op.cc | 182 if (cancelled_) { in GetNextInternal() 307 cancelled_ = true; in CancelThreads() 435 while (!cancelled_ && busy()) { in RunnerThread() 440 if (cancelled_) { in RunnerThread() 463 if (cancelled_) { in ShouldWait() 610 bool cancelled_ TF_GUARDED_BY(*mu_) = false;
|
/external/cronet/net/third_party/quiche/src/quiche/quic/core/qpack/ |
D | qpack_progressive_decoder.cc | 48 cancelled_(false) {} in QpackProgressiveDecoder() 51 if (blocked_ && !cancelled_) { in ~QpackProgressiveDecoder() 156 void QpackProgressiveDecoder::Cancel() { cancelled_ = true; } in Cancel()
|
/external/cronet/net/base/ |
D | directory_lister.cc | 109 base::subtle::NoBarrier_Store(&cancelled_, 1); in CancelOnOriginSequence() 174 return !!base::subtle::NoBarrier_Load(&cancelled_); in IsCancelled()
|