Home
last modified time | relevance | path

Searched refs:steal (Results 1 – 25 of 36) sorted by relevance

12

/kernel/linux/linux-5.10/arch/arm64/kvm/
Dpvtime.c16 u64 base = vcpu->arch.steal.base; in kvm_update_stolen_time()
17 u64 last_steal = vcpu->arch.steal.last_steal; in kvm_update_stolen_time()
19 u64 steal = 0; in kvm_update_stolen_time() local
26 if (!kvm_get_guest(kvm, base + offset, steal)) { in kvm_update_stolen_time()
27 steal = le64_to_cpu(steal); in kvm_update_stolen_time()
28 vcpu->arch.steal.last_steal = READ_ONCE(current->sched_info.run_delay); in kvm_update_stolen_time()
29 steal += vcpu->arch.steal.last_steal - last_steal; in kvm_update_stolen_time()
30 kvm_put_guest(kvm, base + offset, cpu_to_le64(steal)); in kvm_update_stolen_time()
43 if (vcpu->arch.steal.base != GPA_INVALID) in kvm_hypercall_pv_features()
55 u64 base = vcpu->arch.steal.base; in kvm_init_stolen_time()
[all …]
/kernel/linux/linux-5.10/drivers/media/pci/ivtv/
Divtv-queue.c110 int ivtv_queue_move(struct ivtv_stream *s, struct ivtv_queue *from, struct ivtv_queue *steal, in ivtv_queue_move() argument
126 bytes_steal = (from_free && steal) ? steal->length : 0; in ivtv_queue_move()
132 while (steal && bytes_available < needed_bytes) { in ivtv_queue_move()
133 struct ivtv_buffer *buf = list_entry(steal->list.prev, struct ivtv_buffer, list); in ivtv_queue_move()
141 list_move_tail(steal->list.prev, &from->list); in ivtv_queue_move()
143 steal->buffers--; in ivtv_queue_move()
144 steal->length -= s->buf_size; in ivtv_queue_move()
145 steal->bytesused -= buf->bytesused - buf->readpos; in ivtv_queue_move()
150 if (list_empty(&steal->list)) in ivtv_queue_move()
152 buf = list_entry(steal->list.prev, struct ivtv_buffer, list); in ivtv_queue_move()
Divtv-queue.h62 int ivtv_queue_move(struct ivtv_stream *s, struct ivtv_queue *from, struct ivtv_queue *steal,
/kernel/linux/linux-5.10/kernel/sched/
Dcputime.c253 u64 steal; in steal_account_process_time() local
255 steal = paravirt_steal_clock(smp_processor_id()); in steal_account_process_time()
256 steal -= this_rq()->prev_steal_time; in steal_account_process_time()
257 steal = min(steal, maxtime); in steal_account_process_time()
258 account_steal_time(steal); in steal_account_process_time()
259 this_rq()->prev_steal_time += steal; in steal_account_process_time()
261 return steal; in steal_account_process_time()
488 u64 cputime, steal; in account_process_tick() local
499 steal = steal_account_process_time(ULONG_MAX); in account_process_tick()
501 if (steal >= cputime) in account_process_tick()
[all …]
Dcore.c262 s64 __maybe_unused steal = 0, irq_delta = 0; in update_rq_clock_task() local
290 steal = paravirt_steal_clock(cpu_of(rq)); in update_rq_clock_task()
291 steal -= rq->prev_steal_time_rq; in update_rq_clock_task()
293 if (unlikely(steal > delta)) in update_rq_clock_task()
294 steal = delta; in update_rq_clock_task()
296 rq->prev_steal_time_rq += steal; in update_rq_clock_task()
297 delta -= steal; in update_rq_clock_task()
304 if ((irq_delta + steal) && sched_feat(NONTASK_CAPACITY)) in update_rq_clock_task()
305 update_irq_load_avg(rq, irq_delta + steal); in update_rq_clock_task()
/kernel/linux/linux-5.10/fs/proc/
Dstat.c110 u64 user, nice, system, idle, iowait, irq, softirq, steal; in show_stat() local
118 irq = softirq = steal = 0; in show_stat()
135 steal += cpustat[CPUTIME_STEAL]; in show_stat()
157 seq_put_decimal_ull(p, " ", nsec_to_clock_t(steal)); in show_stat()
176 steal = cpustat[CPUTIME_STEAL]; in show_stat()
187 seq_put_decimal_ull(p, " ", nsec_to_clock_t(steal)); in show_stat()
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/
Dvmware.c219 struct vmware_steal_time *steal = &per_cpu(vmw_steal_time, cpu); in vmware_steal_clock() local
223 clock = READ_ONCE(steal->clock); in vmware_steal_clock()
228 initial_high = READ_ONCE(steal->clock_high); in vmware_steal_clock()
231 low = READ_ONCE(steal->clock_low); in vmware_steal_clock()
234 high = READ_ONCE(steal->clock_high); in vmware_steal_clock()
/kernel/linux/linux-5.10/arch/s390/kernel/
Dvtime.c211 u64 steal, avg_steal; in vtime_flush() local
216 steal = S390_lowcore.steal_timer; in vtime_flush()
218 if ((s64) steal > 0) { in vtime_flush()
220 account_steal_time(cputime_to_nsecs(steal)); in vtime_flush()
221 avg_steal += steal; in vtime_flush()
/kernel/linux/linux-5.10/kernel/bpf/
Dbpf_lru_list.c437 int steal, first_steal; in bpf_common_lru_pop_free() local
468 steal = first_steal; in bpf_common_lru_pop_free()
470 steal_loc_l = per_cpu_ptr(clru->local_list, steal); in bpf_common_lru_pop_free()
480 steal = get_next_cpu(steal); in bpf_common_lru_pop_free()
481 } while (!node && steal != first_steal); in bpf_common_lru_pop_free()
483 loc_l->next_steal = steal; in bpf_common_lru_pop_free()
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/
Dradeon_object.c610 int steal; in radeon_bo_get_surface_reg() local
624 steal = -1; in radeon_bo_get_surface_reg()
633 steal = i; in radeon_bo_get_surface_reg()
638 if (steal == -1) in radeon_bo_get_surface_reg()
641 reg = &rdev->surface_regs[steal]; in radeon_bo_get_surface_reg()
644 DRM_DEBUG("stealing surface reg %d from %p\n", steal, old_object); in radeon_bo_get_surface_reg()
647 i = steal; in radeon_bo_get_surface_reg()
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/
Dsteal_time.c51 WRITE_ONCE(guest_stolen_time[cpu], st->steal); in guest_code()
58 WRITE_ONCE(guest_stolen_time[cpu], st->steal); in guest_code()
95 pr_info(" steal: %lld\n", st->steal); in steal_time_dump()
/kernel/linux/linux-5.10/arch/arm64/include/asm/
Dkvm_host.h383 } steal; member
563 vcpu_arch->steal.base = GPA_INVALID; in kvm_arm_pvtime_vcpu_init()
568 return (vcpu_arch->steal.base != GPA_INVALID); in kvm_arm_is_pvtime_enabled()
/kernel/linux/linux-5.10/arch/x86/kernel/
Dkvm.c428 u64 steal; in kvm_steal_clock() local
436 steal = src->steal; in kvm_steal_clock()
440 return steal; in kvm_steal_clock()
/kernel/linux/linux-5.10/arch/x86/include/uapi/asm/
Dkvm_para.h59 __u64 steal; member
/kernel/linux/patches/linux-5.10/prebuilts/usr/include/asm-x86/asm/
Dkvm_para.h53 __u64 steal; member
/kernel/linux/linux-5.10/Documentation/translations/zh_CN/admin-guide/
Dcpu-load.rst12 avg-cpu: %user %nice %system %iowait %steal %idle
/kernel/linux/linux-5.10/fs/btrfs/
Dspace-info.h81 bool steal; member
Dspace-info.c878 if (ticket->steal && in maybe_fail_all_tickets()
1369 ticket.steal = (flush == BTRFS_RESERVE_FLUSH_ALL_STEAL); in __reserve_bytes()
/kernel/linux/linux-5.10/Documentation/virt/kvm/
Dcpuid.rst58 KVM_FEATURE_STEAL_TIME 5 steal time can be enabled by
Dmsr.rst269 __u64 steal;
296 steal:
299 reported as steal time.
/kernel/linux/linux-5.10/Documentation/admin-guide/
Dcpu-load.rst12 avg-cpu: %user %nice %system %iowait %steal %idle
/kernel/linux/linux-5.10/lib/raid6/
Daltivec.uc21 * you can just "steal" the vec unit with enable_kernel_altivec() (but
/kernel/linux/linux-5.10/Documentation/x86/
Dintel_txt.rst161 attempt to crash the system to gain control on reboot and steal
/kernel/linux/linux-5.10/Documentation/target/
Dtcmu-design.rst192 command(a.k.a steal the original command's entry).
/kernel/linux/linux-5.10/Documentation/vm/
Dfrontswap.rst155 Dickins has observed that frontswap could probably steal one of

12