/kernel/ |
D | wait.c | 209 int __sched out_of_line_wait_on_bit(void *word, int bit, in out_of_line_wait_on_bit() argument 212 wait_queue_head_t *wq = bit_waitqueue(word, bit); in out_of_line_wait_on_bit() 213 DEFINE_WAIT_BIT(wait, word, bit); in out_of_line_wait_on_bit() 240 int __sched out_of_line_wait_on_bit_lock(void *word, int bit, in out_of_line_wait_on_bit_lock() argument 243 wait_queue_head_t *wq = bit_waitqueue(word, bit); in out_of_line_wait_on_bit_lock() 244 DEFINE_WAIT_BIT(wait, word, bit); in out_of_line_wait_on_bit_lock() 250 void __wake_up_bit(wait_queue_head_t *wq, void *word, int bit) in __wake_up_bit() argument 252 struct wait_bit_key key = __WAIT_BIT_KEY_INITIALIZER(word, bit); in __wake_up_bit() 275 void wake_up_bit(void *word, int bit) in wake_up_bit() argument 277 __wake_up_bit(bit_waitqueue(word, bit), word, bit); in wake_up_bit() [all …]
|
D | lockdep.c | 476 static inline unsigned long lock_flag(enum lock_usage_bit bit) in lock_flag() argument 478 return 1UL << bit; in lock_flag() 481 static char get_usage_char(struct lock_class *class, enum lock_usage_bit bit) in get_usage_char() argument 485 if (class->usage_mask & lock_flag(bit + 2)) in get_usage_char() 487 if (class->usage_mask & lock_flag(bit)) { in get_usage_char() 489 if (class->usage_mask & lock_flag(bit + 2)) in get_usage_char() 1312 static inline int usage_match(struct lock_list *entry, void *bit) in usage_match() argument 1314 return entry->class->usage_mask & (1 << (enum lock_usage_bit)bit); in usage_match() 1330 find_usage_forwards(struct lock_list *root, enum lock_usage_bit bit, in find_usage_forwards() argument 1337 result = __bfs_forwards(root, (void *)bit, usage_match, target_entry); in find_usage_forwards() [all …]
|
D | panic.c | 195 u8 bit; member 246 *s++ = test_bit(t->bit, &tainted_mask) ? in print_tainted()
|
D | auditsc.c | 756 int bit = AUDIT_BIT(ctx->major); in audit_filter_syscall() local 759 if ((e->rule.mask[word] & bit) == bit && in audit_filter_syscall() 779 int word, bit; in audit_filter_inode_name() local 786 bit = AUDIT_BIT(ctx->major); in audit_filter_inode_name() 792 if ((e->rule.mask[word] & bit) == bit && in audit_filter_inode_name()
|
D | cgroup.c | 1004 unsigned long bit = 1UL << i; in rebind_subsystems() local 1006 if (!(bit & added_mask)) in rebind_subsystems() 1030 unsigned long bit = 1UL << i; in rebind_subsystems() local 1031 if (bit & added_mask) { in rebind_subsystems() 1044 } else if (bit & removed_mask) { in rebind_subsystems() 1057 } else if (bit & final_subsys_mask) { in rebind_subsystems() 1285 unsigned long bit = 1UL << i; in parse_cgroupfs_options() local 1287 if (!(bit & opts->subsys_mask)) in parse_cgroupfs_options() 1302 unsigned long bit = 1UL << i; in parse_cgroupfs_options() local 1304 if (!(bit & opts->subsys_mask)) in parse_cgroupfs_options() [all …]
|
D | auditfilter.c | 272 int bit = AUDIT_BITMASK_SIZE * 32 - i - 1; in audit_to_entry_common() local 273 __u32 *p = &entry->rule.mask[AUDIT_WORD(bit)]; in audit_to_entry_common() 276 if (!(*p & AUDIT_BIT(bit))) in audit_to_entry_common() 278 *p &= ~AUDIT_BIT(bit); in audit_to_entry_common()
|
D | timer.c | 805 int bit; in apply_slack() local 821 bit = find_last_bit(&mask, BITS_PER_LONG); in apply_slack() 823 mask = (1 << bit) - 1; in apply_slack()
|
D | rcutree.c | 2182 unsigned long bit; in force_qs_rnp() local 2201 bit = 1; in force_qs_rnp() 2202 for (; cpu <= rnp->grphi; cpu++, bit <<= 1) { in force_qs_rnp() 2203 if ((rnp->qsmask & bit) != 0 && in force_qs_rnp() 2205 mask |= bit; in force_qs_rnp()
|
D | cpuset.c | 1221 static int update_flag(cpuset_flagbits_t bit, struct cpuset *cs, in update_flag() argument 1235 set_bit(bit, &trialcs->flags); in update_flag() 1237 clear_bit(bit, &trialcs->flags); in update_flag()
|
/kernel/trace/ |
D | trace.h | 296 u32 bit; /* Mask assigned in val field in tracer_flags */ member 309 #define TRACER_OPT(s, b) .name = #s, .bit = b 359 int (*set_flag)(u32 old_flags, u32 bit, int set); 436 #define trace_recursion_set(bit) do { (current)->trace_recursion |= (1<<(bit)); } while (0) argument 437 #define trace_recursion_clear(bit) do { (current)->trace_recursion &= ~(1<<(bit)); } while (0) argument 438 #define trace_recursion_test(bit) ((current)->trace_recursion & (1<<(bit))) argument 455 int bit; in trace_get_context_bit() local 459 bit = 0; in trace_get_context_bit() 462 bit = 1; in trace_get_context_bit() 464 bit = 2; in trace_get_context_bit() [all …]
|
D | trace_nop.c | 65 static int nop_set_flag(u32 old_flags, u32 bit, int set) in nop_set_flag() argument 71 if (bit == TRACE_NOP_OPT_ACCEPT) { in nop_set_flag() 78 if (bit == TRACE_NOP_OPT_REFUSE) { in nop_set_flag()
|
D | trace_functions.c | 64 int bit; in function_trace_call() local 74 bit = trace_test_and_set_recursion(TRACE_FTRACE_START, TRACE_FTRACE_MAX); in function_trace_call() 75 if (bit < 0) in function_trace_call() 84 trace_clear_recursion(bit); in function_trace_call() 178 static int func_set_flag(u32 old_flags, u32 bit, int set) in func_set_flag() argument 180 switch (bit) { in func_set_flag()
|
D | trace_irqsoff.c | 163 static int irqsoff_set_flag(u32 old_flags, u32 bit, int set) in irqsoff_set_flag() argument 167 if (!(bit & TRACE_DISPLAY_GRAPH)) in irqsoff_set_flag() 269 static int irqsoff_set_flag(u32 old_flags, u32 bit, int set) in irqsoff_set_flag() argument
|
D | trace_sched_wakeup.c | 209 static int wakeup_set_flag(u32 old_flags, u32 bit, int set) in wakeup_set_flag() argument 212 if (!(bit & TRACE_DISPLAY_GRAPH)) in wakeup_set_flag() 311 static int wakeup_set_flag(u32 old_flags, u32 bit, int set) in wakeup_set_flag() argument
|
D | ftrace.c | 176 int bit; in ftrace_global_list_func() local 178 bit = trace_test_and_set_recursion(TRACE_GLOBAL_START, TRACE_GLOBAL_MAX); in ftrace_global_list_func() 179 if (bit < 0) in ftrace_global_list_func() 186 trace_clear_recursion(bit); in ftrace_global_list_func() 4232 int bit; in __ftrace_ops_list_func() local 4237 bit = trace_test_and_set_recursion(TRACE_LIST_START, TRACE_LIST_MAX); in __ftrace_ops_list_func() 4238 if (bit < 0) in __ftrace_ops_list_func() 4251 trace_clear_recursion(bit); in __ftrace_ops_list_func()
|
D | ring_buffer.c | 2633 int bit; in trace_recursive_lock() local 2637 bit = 0; in trace_recursive_lock() 2639 bit = 1; in trace_recursive_lock() 2641 bit = 2; in trace_recursive_lock() 2643 bit = 3; in trace_recursive_lock() 2645 if (unlikely(val & (1 << bit))) in trace_recursive_lock() 2648 val |= (1 << bit); in trace_recursive_lock()
|
D | trace_output.c | 783 int bit = state ? __ffs(state) + 1 : 0; local 785 return bit < sizeof(state_to_char) - 1 ? state_to_char[bit] : '?';
|
D | Kconfig | 298 Either of the above profilers adds a bit of overhead to the system. 304 No branch profiling. Branch profiling adds a bit of overhead. 511 with the event enabled. This adds a bit more time for kernel boot
|
D | trace_functions_graph.c | 1434 static int func_graph_set_flag(u32 old_flags, u32 bit, int set) in func_graph_set_flag() argument 1436 if (bit == TRACE_GRAPH_PRINT_IRQS) in func_graph_set_flag()
|
D | trace.c | 76 static int dummy_set_flag(u32 old_flags, u32 bit, int set) in dummy_set_flag() argument 3197 if (tracer_flags & trace_opts[i].bit) in tracing_trace_options_show() 3213 ret = trace->set_flag(tracer_flags->val, opts->bit, !neg); in __set_tracer_option() 3218 tracer_flags->val &= ~opts->bit; in __set_tracer_option() 3220 tracer_flags->val |= opts->bit; in __set_tracer_option() 5474 if (topt->flags->val & topt->opt->bit) in trace_options_read() 5497 if (!!(topt->flags->val & topt->opt->bit) != val) { in trace_options_write()
|
D | blktrace.c | 1412 static int blk_tracer_set_flag(u32 old_flags, u32 bit, int set) in blk_tracer_set_flag() argument 1415 if (bit == TRACE_BLK_OPT_CLASSIC) { in blk_tracer_set_flag()
|
/kernel/power/ |
D | snapshot.c | 272 int bit; member 289 bm->cur.bit = 0; in memory_bm_position_reset() 504 bm->cur.bit = pfn + 1; in memory_bm_find_bit() 513 unsigned int bit; in memory_bm_set_bit() local 516 error = memory_bm_find_bit(bm, pfn, &addr, &bit); in memory_bm_set_bit() 518 set_bit(bit, addr); in memory_bm_set_bit() 524 unsigned int bit; in mem_bm_set_bit_check() local 527 error = memory_bm_find_bit(bm, pfn, &addr, &bit); in mem_bm_set_bit_check() 529 set_bit(bit, addr); in mem_bm_set_bit_check() 536 unsigned int bit; in memory_bm_clear_bit() local [all …]
|
/kernel/time/ |
D | Kconfig | 23 # ktime_t scalar 64bit nsec representation
|
/kernel/events/ |
D | core.c | 4052 int bit; in perf_output_sample_regs() local 4054 for_each_set_bit(bit, (const unsigned long *) &mask, in perf_output_sample_regs() 4058 val = perf_reg_value(regs, bit); in perf_output_sample_regs()
|