Searched refs:pc (Results 1 – 10 of 10) sorted by relevance
/kernel/ |
D | profile.c | 36 u32 pc, hits; member 243 if (hits[i].pc) in profile_flip_buffers() 244 hits[i].pc = 0; in profile_flip_buffers() 247 atomic_add(hits[i].hits, &prof_buffer[hits[i].pc]); in profile_flip_buffers() 248 hits[i].hits = hits[i].pc = 0; in profile_flip_buffers() 271 unsigned long primary, secondary, flags, pc = (unsigned long)__pc; in do_profile_hits() local 275 pc = min((pc - (unsigned long)_stext) >> prof_shift, prof_len - 1); in do_profile_hits() 276 i = primary = (pc & (NR_PROFILE_GRP - 1)) << PROFILE_GRPSHIFT; in do_profile_hits() 277 secondary = (~(pc << 1) & (NR_PROFILE_GRP - 1)) << PROFILE_GRPSHIFT; in do_profile_hits() 292 if (hits[i + j].pc == pc) { in do_profile_hits() [all …]
|
D | seccomp.c | 277 int pc; in seccomp_check_filter() local 278 for (pc = 0; pc < flen; pc++) { in seccomp_check_filter() 279 struct sock_filter *ftest = &filter[pc]; in seccomp_check_filter() 724 unsigned int pc; in seccomp_is_const_allow() local 730 for (pc = 0; pc < fprog->len; pc++) { in seccomp_is_const_allow() 731 struct sock_filter *insn = &fprog->filter[pc]; in seccomp_is_const_allow() 753 pc += insn->k; in seccomp_is_const_allow() 777 pc += op_res ? insn->jt : insn->jf; in seccomp_is_const_allow()
|
/kernel/bpf/ |
D | memalloc.c | 361 struct bpf_mem_cache *c, __percpu *pc; in bpf_mem_alloc_init() local 366 pc = __alloc_percpu_gfp(sizeof(*pc), 8, GFP_KERNEL); in bpf_mem_alloc_init() 367 if (!pc) in bpf_mem_alloc_init() 381 c = per_cpu_ptr(pc, cpu); in bpf_mem_alloc_init() 387 ma->cache = pc; in bpf_mem_alloc_init()
|
/kernel/sched/ |
D | idle.c | 337 bool cpu_in_idle(unsigned long pc) in cpu_in_idle() argument 339 return pc >= (unsigned long)__cpuidle_text_start && in cpu_in_idle() 340 pc < (unsigned long)__cpuidle_text_end; in cpu_in_idle()
|
D | cputime.c | 55 unsigned int pc; in irqtime_account_irq() local 66 pc = irq_count() - offset; in irqtime_account_irq() 74 if (pc & HARDIRQ_MASK) { in irqtime_account_irq() 77 } else if ((pc & SOFTIRQ_OFFSET) && curr != this_cpu_ksoftirqd()) { in irqtime_account_irq() 454 unsigned int pc = irq_count() - offset; in vtime_account_irq() local 456 if (pc & HARDIRQ_OFFSET) { in vtime_account_irq() 458 } else if (pc & SOFTIRQ_OFFSET) { in vtime_account_irq()
|
/kernel/trace/ |
D | trace_mmiotrace.c | 184 rw->value, rw->pc, 0); in mmio_print_rw() 191 rw->value, rw->pc, 0); in mmio_print_rw() 200 (rw->value >> 0) & 0xff, rw->pc, 0); in mmio_print_rw()
|
D | trace_irqsoff.c | 45 preempt_trace(int pc) in preempt_trace() argument 47 return ((trace_type & TRACER_PREEMPT_OFF) && pc); in preempt_trace() 50 # define preempt_trace(pc) (0) argument
|
D | trace_entries.h | 263 __field_desc( unsigned long, rw, pc ) 270 (unsigned long)__entry->phys, __entry->value, __entry->pc,
|
D | trace_events_hist.c | 5382 unsigned int pc; in hist_trigger_print_val() local 5386 pc = __get_percentage(val, stats[idx].total); in hist_trigger_print_val() 5387 if (pc == UINT_MAX) in hist_trigger_print_val() 5391 pc / 100, pc % 100); in hist_trigger_print_val()
|
D | trace.c | 2635 unsigned int pc; in tracing_gen_ctx_irq_test() local 2637 pc = preempt_count(); in tracing_gen_ctx_irq_test() 2639 if (pc & NMI_MASK) in tracing_gen_ctx_irq_test() 2641 if (pc & HARDIRQ_MASK) in tracing_gen_ctx_irq_test() 2652 return (trace_flags << 16) | (min_t(unsigned int, pc & 0xff, 0xf)) | in tracing_gen_ctx_irq_test()
|