| /lib/ |
| D | string_helpers_kunit.c | 15 const char *name, unsigned int flags, in test_string_check_buf() argument 28 unsigned int flags; member 35 .flags = UNESCAPE_SPACE, 40 .flags = UNESCAPE_OCTAL, 45 .flags = UNESCAPE_HEX, 50 .flags = UNESCAPE_SPECIAL, 55 const char *name, unsigned int flags, in test_string_unescape() argument 77 if (flags & strings[i].flags) { in test_string_unescape() 89 if (flags == UNESCAPE_ANY) in test_string_unescape() 92 q_real = string_unescape_inplace(out_real, flags); in test_string_unescape() [all …]
|
| D | atomic64.c | 47 unsigned long flags; in generic_atomic64_read() local 51 local_irq_save(flags); in generic_atomic64_read() 55 local_irq_restore(flags); in generic_atomic64_read() 62 unsigned long flags; in generic_atomic64_set() local 65 local_irq_save(flags); in generic_atomic64_set() 69 local_irq_restore(flags); in generic_atomic64_set() 76 unsigned long flags; \ 79 local_irq_save(flags); \ 83 local_irq_restore(flags); \ 90 unsigned long flags; \ [all …]
|
| D | percpu-refcount.c | 64 unsigned int flags, gfp_t gfp) in percpu_ref_init() argument 83 data->force_atomic = flags & PERCPU_REF_INIT_ATOMIC; in percpu_ref_init() 84 data->allow_reinit = flags & PERCPU_REF_ALLOW_REINIT; in percpu_ref_init() 86 if (flags & (PERCPU_REF_INIT_ATOMIC | PERCPU_REF_INIT_DEAD)) { in percpu_ref_init() 93 if (flags & PERCPU_REF_INIT_DEAD) in percpu_ref_init() 133 unsigned long flags; in percpu_ref_exit() local 140 spin_lock_irqsave(&percpu_ref_switch_lock, flags); in percpu_ref_exit() 144 spin_unlock_irqrestore(&percpu_ref_switch_lock, flags); in percpu_ref_exit() 309 unsigned long flags; in percpu_ref_switch_to_atomic() local 311 spin_lock_irqsave(&percpu_ref_switch_lock, flags); in percpu_ref_switch_to_atomic() [all …]
|
| D | closure.c | 16 static inline void closure_put_after_sub_checks(int flags) in closure_put_after_sub_checks() argument 18 int r = flags & CLOSURE_REMAINING_MASK; in closure_put_after_sub_checks() 20 if (WARN(flags & CLOSURE_GUARD_MASK, in closure_put_after_sub_checks() 22 flags & CLOSURE_GUARD_MASK, (unsigned) __fls(r))) in closure_put_after_sub_checks() 25 WARN(!r && (flags & ~CLOSURE_DESTRUCTOR), in closure_put_after_sub_checks() 27 flags & ~CLOSURE_DESTRUCTOR, (unsigned) __fls(flags)); in closure_put_after_sub_checks() 30 static inline void closure_put_after_sub(struct closure *cl, int flags) in closure_put_after_sub() argument 32 closure_put_after_sub_checks(flags); in closure_put_after_sub() 34 if (!(flags & CLOSURE_REMAINING_MASK)) { in closure_put_after_sub() 39 if (cl->fn && !(flags & CLOSURE_DESTRUCTOR)) { in closure_put_after_sub() [all …]
|
| D | percpu_counter.c | 63 unsigned long flags; in percpu_counter_set() local 65 raw_spin_lock_irqsave(&fbc->lock, flags); in percpu_counter_set() 71 raw_spin_unlock_irqrestore(&fbc->lock, flags); in percpu_counter_set() 96 unsigned long flags; in percpu_counter_add_batch() local 101 raw_spin_lock_irqsave(&fbc->lock, flags); in percpu_counter_add_batch() 109 raw_spin_unlock_irqrestore(&fbc->lock, flags); in percpu_counter_add_batch() 123 unsigned long flags; in percpu_counter_add_batch() local 125 local_irq_save(flags); in percpu_counter_add_batch() 135 local_irq_restore(flags); in percpu_counter_add_batch() 148 unsigned long flags; in percpu_counter_sync() local [all …]
|
| D | debugobjects.c | 132 unsigned long flags; in fill_pool() local 145 raw_spin_lock_irqsave(&pool_lock, flags); in fill_pool() 157 raw_spin_unlock_irqrestore(&pool_lock, flags); in fill_pool() 175 raw_spin_lock_irqsave(&pool_lock, flags); in fill_pool() 181 raw_spin_unlock_irqrestore(&pool_lock, flags); in fill_pool() 289 unsigned long flags; in free_obj_work() local 293 if (!raw_spin_trylock_irqsave(&pool_lock, flags)) in free_obj_work() 313 raw_spin_unlock_irqrestore(&pool_lock, flags); in free_obj_work() 327 raw_spin_unlock_irqrestore(&pool_lock, flags); in free_obj_work() 340 unsigned long flags; in __free_object() local [all …]
|
| D | ref_tracker.c | 136 unsigned long flags; in ref_tracker_dir_print() local 138 spin_lock_irqsave(&dir->lock, flags); in ref_tracker_dir_print() 140 spin_unlock_irqrestore(&dir->lock, flags); in ref_tracker_dir_print() 147 unsigned long flags; in ref_tracker_dir_snprint() local 149 spin_lock_irqsave(&dir->lock, flags); in ref_tracker_dir_snprint() 151 spin_unlock_irqrestore(&dir->lock, flags); in ref_tracker_dir_snprint() 160 unsigned long flags; in ref_tracker_dir_exit() local 164 spin_lock_irqsave(&dir->lock, flags); in ref_tracker_dir_exit() 178 spin_unlock_irqrestore(&dir->lock, flags); in ref_tracker_dir_exit() 193 unsigned long flags; in ref_tracker_alloc() local [all …]
|
| D | dec_and_lock.c | 38 unsigned long *flags) in _atomic_dec_and_lock_irqsave() argument 45 spin_lock_irqsave(lock, *flags); in _atomic_dec_and_lock_irqsave() 48 spin_unlock_irqrestore(lock, *flags); in _atomic_dec_and_lock_irqsave() 69 unsigned long *flags) in _atomic_dec_and_raw_lock_irqsave() argument 76 raw_spin_lock_irqsave(lock, *flags); in _atomic_dec_and_raw_lock_irqsave() 79 raw_spin_unlock_irqrestore(lock, *flags); in _atomic_dec_and_raw_lock_irqsave()
|
| D | asn1_decoder.c | 181 unsigned char flags = 0; in asn1_ber_decoder() local 216 if ((op & ASN1_OP_MATCH__COND && flags & FLAG_MATCHED) || in asn1_ber_decoder() 218 flags &= ~FLAG_LAST_MATCHED; in asn1_ber_decoder() 223 flags = 0; in asn1_ber_decoder() 242 flags |= optag & FLAG_CONS; in asn1_ber_decoder() 258 flags |= FLAG_MATCHED; in asn1_ber_decoder() 266 flags |= FLAG_INDEFINITE_LENGTH; in asn1_ber_decoder() 288 if (flags & FLAG_CONS) { in asn1_ber_decoder() 296 if (!(flags & FLAG_INDEFINITE_LENGTH)) { in asn1_ber_decoder() 306 tag, len, flags & FLAG_CONS ? " CONS" : ""); in asn1_ber_decoder() [all …]
|
| D | ts_kmp.c | 47 const int icase = conf->flags & TS_IGNORECASE; in kmp_find() 75 unsigned int *prefix_tbl, int flags) in compute_prefix_tbl() argument 78 const u8 icase = flags & TS_IGNORECASE; in compute_prefix_tbl() 92 gfp_t gfp_mask, int flags) in kmp_init() argument 104 conf->flags = flags; in kmp_init() 107 compute_prefix_tbl(pattern, len, kmp->prefix_tbl, flags); in kmp_init() 109 if (flags & TS_IGNORECASE) in kmp_init()
|
| D | string_helpers.c | 320 int string_unescape(char *src, char *dst, size_t size, unsigned int flags) in string_unescape() argument 332 if (flags & UNESCAPE_SPACE && in string_unescape() 336 if (flags & UNESCAPE_OCTAL && in string_unescape() 340 if (flags & UNESCAPE_HEX && in string_unescape() 344 if (flags & UNESCAPE_SPECIAL && in string_unescape() 575 unsigned int flags, const char *only) in string_escape_mem() argument 580 bool is_append = flags & ESCAPE_APPEND; in string_escape_mem() 610 flags & ESCAPE_NAP && escape_passthrough(c, &p, end)) in string_escape_mem() 614 flags & ESCAPE_NP && escape_passthrough(c, &p, end)) in string_escape_mem() 618 flags & ESCAPE_NA && escape_passthrough(c, &p, end)) in string_escape_mem() [all …]
|
| D | once.c | 42 bool __do_once_start(bool *done, unsigned long *flags) in __do_once_start() argument 45 spin_lock_irqsave(&once_lock, *flags); in __do_once_start() 47 spin_unlock_irqrestore(&once_lock, *flags); in __do_once_start() 61 unsigned long *flags, struct module *mod) in __do_once_done() argument 65 spin_unlock_irqrestore(&once_lock, *flags); in __do_once_done()
|
| D | vsprintf.c | 447 unsigned int flags:8; /* flags to number() */ member 464 int need_pfx = ((spec.flags & SPECIAL) && spec.base != 10); in number() 472 locase = (spec.flags & SMALL); in number() 473 if (spec.flags & LEFT) in number() 474 spec.flags &= ~ZEROPAD; in number() 476 if (spec.flags & SIGN) { in number() 481 } else if (spec.flags & PLUS) { in number() 484 } else if (spec.flags & SPACE) { in number() 519 if (!(spec.flags & (ZEROPAD | LEFT))) { in number() 546 if (!(spec.flags & LEFT)) { in number() [all …]
|
| D | ratelimit.c | 35 unsigned long flags; in ___ratelimit() local 47 if (!raw_spin_trylock_irqsave(&rs->lock, flags)) in ___ratelimit() 55 if (!(rs->flags & RATELIMIT_MSG_ON_RELEASE)) { in ___ratelimit() 72 raw_spin_unlock_irqrestore(&rs->lock, flags); in ___ratelimit()
|
| D | ts_bm.c | 82 const u8 icase = conf->flags & TS_IGNORECASE; in bm_find() 133 static void compute_prefix_tbl(struct ts_bm *bm, int flags) in compute_prefix_tbl() argument 141 if (flags & TS_IGNORECASE) in compute_prefix_tbl() 161 gfp_t gfp_mask, int flags) in bm_init() argument 173 conf->flags = flags; in bm_init() 177 if (flags & TS_IGNORECASE) in bm_init() 182 compute_prefix_tbl(bm, flags); in bm_init()
|
| D | irq_poll.c | 29 unsigned long flags; in irq_poll_sched() local 36 local_irq_save(flags); in irq_poll_sched() 39 local_irq_restore(flags); in irq_poll_sched() 70 unsigned long flags; in irq_poll_complete() local 72 local_irq_save(flags); in irq_poll_complete() 74 local_irq_restore(flags); in irq_poll_complete()
|
| D | logic_pio.c | 43 (new_range->flags == LOGIC_PIO_INDIRECT && !new_range->ops)) in logic_pio_register_range() 56 if (range->flags == LOGIC_PIO_CPU_MMIO && in logic_pio_register_range() 57 new_range->flags == LOGIC_PIO_CPU_MMIO) { in logic_pio_register_range() 66 } else if (range->flags == LOGIC_PIO_INDIRECT && in logic_pio_register_range() 67 new_range->flags == LOGIC_PIO_INDIRECT) { in logic_pio_register_range() 73 if (new_range->flags == LOGIC_PIO_CPU_MMIO) { in logic_pio_register_range() 84 } else if (new_range->flags == LOGIC_PIO_INDIRECT) { in logic_pio_register_range() 195 if (!range || range->flags == LOGIC_PIO_CPU_MMIO) { in logic_pio_trans_hwaddr() 213 if (range->flags != LOGIC_PIO_CPU_MMIO) in logic_pio_trans_cpuaddr()
|
| D | test_printf.c | 608 int kasan_tag, unsigned long flags, const char *name, in page_flags_test() argument 617 flags |= (values[i] & pft[i].mask) << pft[i].shift; in page_flags_test() 619 size = scnprintf(cmp_buf, BUF_SIZE, "%#lx(", flags); in page_flags_test() 620 if (flags & PAGEFLAGS_MASK) { in page_flags_test() 641 test(cmp_buf, "%pGp", &flags); in page_flags_test() 645 flags(void) in flags() function 647 unsigned long flags; in flags() local 655 flags = 0; in flags() 656 page_flags_test(0, 0, 0, 0, 0, flags, "", cmp_buffer); in flags() 658 flags = 1UL << NR_PAGEFLAGS; in flags() [all …]
|
| D | lru_cache.c | 31 BUG_ON(test_and_set_bit(__LC_PARANOIA, &lc->flags)); \ 35 clear_bit_unlock(__LC_PARANOIA, &lc->flags); \ 59 val = cmpxchg(&lc->flags, 0, LC_LOCKED); in lc_try_lock() 198 lc->flags = 0; in lc_reset() 351 static struct lc_element *__lc_get(struct lru_cache *lc, unsigned int enr, unsigned int flags) in __lc_get() argument 356 if (test_bit(__LC_STARVING, &lc->flags)) { in __lc_get() 372 if (!(flags & LC_GET_MAY_USE_UNCOMMITTED)) in __lc_get() 390 if (!(flags & LC_GET_MAY_CHANGE)) in __lc_get() 395 test_and_set_bit(__LC_DIRTY, &lc->flags); in __lc_get() 400 if (test_bit(__LC_LOCKED, &lc->flags)) { in __lc_get() [all …]
|
| D | idr.c | 385 unsigned long flags; in ida_alloc_range() local 395 xas_lock_irqsave(&xas, flags); in ida_alloc_range() 453 xas_unlock_irqrestore(&xas, flags); in ida_alloc_range() 465 xas_unlock_irqrestore(&xas, flags); in ida_alloc_range() 473 xas_unlock_irqrestore(&xas, flags); in ida_alloc_range() 492 unsigned long flags; in ida_free() local 497 xas_lock_irqsave(&xas, flags); in ida_free() 521 xas_unlock_irqrestore(&xas, flags); in ida_free() 524 xas_unlock_irqrestore(&xas, flags); in ida_free() 545 unsigned long flags; in ida_destroy() local [all …]
|
| D | bug.c | 176 warning = (bug->flags & BUGFLAG_WARNING) != 0; in __report_bug() 177 once = (bug->flags & BUGFLAG_ONCE) != 0; in __report_bug() 178 done = (bug->flags & BUGFLAG_DONE) != 0; in __report_bug() 187 bug->flags |= BUGFLAG_DONE; in __report_bug() 196 if ((bug->flags & BUGFLAG_NO_CUT_HERE) == 0) in __report_bug() 234 bug->flags &= ~BUGFLAG_DONE; in clear_once_table()
|
| D | stackdepot.c | 381 depot_alloc_stack(unsigned long *entries, unsigned int nr_entries, u32 hash, depot_flags_t flags, v… in depot_alloc_stack() argument 396 if (flags & STACK_DEPOT_FLAG_GET) { in depot_alloc_stack() 419 if (flags & STACK_DEPOT_FLAG_GET) { in depot_alloc_stack() 470 unsigned long flags; in depot_free_stack() local 474 raw_spin_lock_irqsave(&pool_lock, flags); in depot_free_stack() 508 raw_spin_unlock_irqrestore(&pool_lock, flags); in depot_free_stack() 537 u32 hash, depot_flags_t flags) in find_stack() argument 572 if ((flags & STACK_DEPOT_FLAG_GET) && !refcount_inc_not_zero(&stack->count)) in find_stack() 595 unsigned long flags; in stack_depot_save_flags() local 637 if (!raw_spin_trylock_irqsave(&pool_lock, flags)) in stack_depot_save_flags() [all …]
|
| /lib/crypto/mpi/ |
| D | mpiutil.c | 51 a->flags = 0; in mpi_alloc() 114 if (a->flags & 4) in mpi_free() 119 if (a->flags & ~7) in mpi_free() 140 b->flags = a->flags; in mpi_copy() 141 b->flags &= ~(16|32); /* Reset the immutable and constant flags. */ in mpi_copy()
|
| /lib/kunit/ |
| D | resource.c | 26 unsigned long flags; in __kunit_add_resource() local 39 spin_lock_irqsave(&test->lock, flags); in __kunit_add_resource() 42 spin_unlock_irqrestore(&test->lock, flags); in __kunit_add_resource() 50 unsigned long flags; in kunit_remove_resource() local 53 spin_lock_irqsave(&test->lock, flags); in kunit_remove_resource() 56 spin_unlock_irqrestore(&test->lock, flags); in kunit_remove_resource()
|
| /lib/vdso/ |
| D | getrandom.c | 69 unsigned int flags, void *opaque_state, size_t opaque_len) in __cvdso_getrandom_data() argument 79 if (unlikely(opaque_len == ~0UL && !buffer && !len && !flags)) { in __cvdso_getrandom_data() 94 if (unlikely(flags & ~(GRND_NONBLOCK | GRND_RANDOM | GRND_INSECURE))) in __cvdso_getrandom_data() 253 return getrandom_syscall(orig_buffer, orig_len, flags); in __cvdso_getrandom_data() 257 __cvdso_getrandom(void *buffer, size_t len, unsigned int flags, void *opaque_state, size_t opaque_l… in __cvdso_getrandom() argument 259 …return __cvdso_getrandom_data(__arch_get_vdso_rng_data(), buffer, len, flags, opaque_state, opaque… in __cvdso_getrandom()
|