Searched refs:steal (Results 1 – 4 of 4) sorted by relevance
272 u64 steal; in steal_account_process_time() local274 steal = paravirt_steal_clock(smp_processor_id()); in steal_account_process_time()275 steal -= this_rq()->prev_steal_time; in steal_account_process_time()276 steal = min(steal, maxtime); in steal_account_process_time()277 account_steal_time(steal); in steal_account_process_time()278 this_rq()->prev_steal_time += steal; in steal_account_process_time()280 return steal; in steal_account_process_time()506 u64 cputime, steal; in account_process_tick() local522 steal = steal_account_process_time(ULONG_MAX); in account_process_tick()524 if (steal >= cputime) in account_process_tick()[all …]
720 s64 __maybe_unused steal = 0, irq_delta = 0; in update_rq_clock_task() local749 steal = paravirt_steal_clock(cpu_of(rq)); in update_rq_clock_task()750 steal -= rq->prev_steal_time_rq; in update_rq_clock_task()752 if (unlikely(steal > delta)) in update_rq_clock_task()753 steal = delta; in update_rq_clock_task()755 rq->prev_steal_time_rq += steal; in update_rq_clock_task()756 delta -= steal; in update_rq_clock_task()763 if ((irq_delta + steal) && sched_feat(NONTASK_CAPACITY)) in update_rq_clock_task()764 update_irq_load_avg(rq, irq_delta + steal); in update_rq_clock_task()
442 int steal, first_steal; in bpf_common_lru_pop_free() local473 steal = first_steal; in bpf_common_lru_pop_free()475 steal_loc_l = per_cpu_ptr(clru->local_list, steal); in bpf_common_lru_pop_free()485 steal = get_next_cpu(steal); in bpf_common_lru_pop_free()486 } while (!node && steal != first_steal); in bpf_common_lru_pop_free()488 loc_l->next_steal = steal; in bpf_common_lru_pop_free()
1037 bool steal = true; in rwsem_down_read_slowpath() local1052 trace_android_vh_rwsem_direct_rsteal(sem, &steal); in rwsem_down_read_slowpath()1053 if (steal && !(count & (RWSEM_WRITER_LOCKED | RWSEM_FLAG_HANDOFF))) { in rwsem_down_read_slowpath()