/tools/include/asm-generic/bitops/ |
D | non-atomic.h | 18 unsigned long mask = BIT_MASK(nr); in __set_bit() local 21 *p |= mask; in __set_bit() 26 unsigned long mask = BIT_MASK(nr); in __clear_bit() local 29 *p &= ~mask; in __clear_bit() 43 unsigned long mask = BIT_MASK(nr); in __change_bit() local 46 *p ^= mask; in __change_bit() 60 unsigned long mask = BIT_MASK(nr); in __test_and_set_bit() local 64 *p = old | mask; in __test_and_set_bit() 65 return (old & mask) != 0; in __test_and_set_bit() 79 unsigned long mask = BIT_MASK(nr); in __test_and_clear_bit() local [all …]
|
/tools/perf/trace/beauty/ |
D | futex_op.c | 38 P_FUTEX_OP(WAIT); arg->mask |= SCF_VAL3|SCF_UADDR2; break; in syscall_arg__scnprintf_futex_op() 39 P_FUTEX_OP(WAKE); arg->mask |= SCF_VAL3|SCF_UADDR2|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op() 40 P_FUTEX_OP(FD); arg->mask |= SCF_VAL3|SCF_UADDR2|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op() 41 P_FUTEX_OP(REQUEUE); arg->mask |= SCF_VAL3|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op() 42 P_FUTEX_OP(CMP_REQUEUE); arg->mask |= SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op() 43 P_FUTEX_OP(CMP_REQUEUE_PI); arg->mask |= SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op() 45 P_FUTEX_OP(LOCK_PI); arg->mask |= SCF_VAL3|SCF_UADDR2|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op() 46 P_FUTEX_OP(UNLOCK_PI); arg->mask |= SCF_VAL3|SCF_UADDR2|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op() 47 P_FUTEX_OP(TRYLOCK_PI); arg->mask |= SCF_VAL3|SCF_UADDR2; break; in syscall_arg__scnprintf_futex_op() 48 P_FUTEX_OP(WAIT_BITSET); arg->mask |= SCF_UADDR2; break; in syscall_arg__scnprintf_futex_op() [all …]
|
/tools/testing/selftests/bpf/prog_tests/ |
D | cpu_mask.c | 8 static void validate_mask(int case_nr, const char *exp, bool *mask, int n) in validate_mask() argument 18 CHECK(!mask[i], "cpu_not_set", in validate_mask() 22 CHECK(i < n && mask[i], "cpu_set", in validate_mask() 61 bool *mask; in test_cpu_mask() local 64 mask = NULL; in test_cpu_mask() 65 err = parse_cpu_mask_str(test_cases[i].cpu_mask, &mask, &n); in test_cpu_mask() 74 validate_mask(i + 1, test_cases[i].expect, mask, n); in test_cpu_mask() 76 free(mask); in test_cpu_mask()
|
/tools/perf/util/ |
D | parse-regs-options.c | 19 uint64_t mask; in __parse_regs() local 31 mask = arch__intr_reg_mask(); in __parse_regs() 33 mask = arch__user_reg_mask(); in __parse_regs() 51 if (r->mask & mask) in __parse_regs() 61 if ((r->mask & mask) && !strcasecmp(s, r->name)) in __parse_regs() 71 *mode |= r->mask; in __parse_regs() 83 *mode = mask; in __parse_regs()
|
D | print_binary.c | 10 size_t i, j, mask; in binary__fprintf() local 17 mask = bytes_per_line - 1; in binary__fprintf() 21 if ((i & mask) == 0) { in binary__fprintf() 28 if (((i & mask) == mask) || i == len - 1) { in binary__fprintf() 29 for (j = 0; j < mask-(i & mask); j++) in binary__fprintf() 33 for (j = i & ~mask; j <= i; j++) in binary__fprintf() 35 for (j = 0; j < mask-(i & mask); j++) in binary__fprintf()
|
D | perf_regs.c | 26 u64 mask = regs->mask; in perf_reg_value() local 31 if (!(mask & (1ULL << id))) in perf_reg_value() 35 if (mask & (1ULL << i)) in perf_reg_value()
|
D | mmap.c | 29 void mmap_cpu_mask__scnprintf(struct mmap_cpu_mask *mask, const char *tag) in mmap_cpu_mask__scnprintf() argument 34 len = bitmap_scnprintf(mask->bits, mask->nbits, buf, MASK_SIZE); in mmap_cpu_mask__scnprintf() 36 pr_debug("%p: %s mask[%zd]: %s\n", mask, tag, mask->nbits, buf); in mmap_cpu_mask__scnprintf() 241 static void build_node_mask(int node, struct mmap_cpu_mask *mask) in build_node_mask() argument 254 set_bit(cpu, mask->bits); in build_node_mask() 328 if ((md->core.start & md->core.mask) + size != (md->core.end & md->core.mask)) { in perf_mmap__push() 329 buf = &data[md->core.start & md->core.mask]; in perf_mmap__push() 330 size = md->core.mask + 1 - (md->core.start & md->core.mask); in perf_mmap__push() 339 buf = &data[md->core.start & md->core.mask]; in perf_mmap__push()
|
D | perf_regs.h | 12 uint64_t mask; member 14 #define SMPL_REG(n, b) { .name = #n, .mask = 1ULL << (b) } 15 #define SMPL_REG2(n, b) { .name = #n, .mask = 3ULL << (b) }
|
/tools/gpio/ |
D | lsgpio.c | 28 unsigned long long mask; member 34 .mask = GPIO_V2_LINE_FLAG_USED, 38 .mask = GPIO_V2_LINE_FLAG_INPUT, 42 .mask = GPIO_V2_LINE_FLAG_OUTPUT, 46 .mask = GPIO_V2_LINE_FLAG_ACTIVE_LOW, 50 .mask = GPIO_V2_LINE_FLAG_OPEN_DRAIN, 54 .mask = GPIO_V2_LINE_FLAG_OPEN_SOURCE, 58 .mask = GPIO_V2_LINE_FLAG_BIAS_PULL_UP, 62 .mask = GPIO_V2_LINE_FLAG_BIAS_PULL_DOWN, 66 .mask = GPIO_V2_LINE_FLAG_BIAS_DISABLED, [all …]
|
/tools/testing/radix-tree/ |
D | multiorder.c | 60 unsigned long mask = (1UL << order[i]) - 1; in multiorder_iteration() local 62 assert((xas.xa_index | mask) == (index[i] | mask)); in multiorder_iteration() 65 assert((item->index | mask) == (index[i] | mask)); in multiorder_iteration() 109 unsigned long mask; in multiorder_tagged_iteration() local 112 mask = (1UL << order[k]) - 1; in multiorder_tagged_iteration() 114 assert((xas.xa_index | mask) == (tag_index[i] | mask)); in multiorder_tagged_iteration() 116 assert((item->index | mask) == (tag_index[i] | mask)); in multiorder_tagged_iteration() 126 int mask, k; in multiorder_tagged_iteration() local 139 mask = (1 << order[k]) - 1; in multiorder_tagged_iteration() 141 assert((xas.xa_index | mask) == (tag_index[i] | mask)); in multiorder_tagged_iteration() [all …]
|
/tools/testing/selftests/powerpc/dscr/ |
D | dscr_sysfs_thread_test.c | 38 cpu_set_t mask; in check_cpu_dscr_thread() local 42 CPU_ZERO(&mask); in check_cpu_dscr_thread() 43 CPU_SET(cpu, &mask); in check_cpu_dscr_thread() 44 if (sched_setaffinity(0, sizeof(mask), &mask)) in check_cpu_dscr_thread()
|
/tools/lib/ |
D | find_bit.c | 35 unsigned long tmp, mask; in _find_next_bit() local 47 mask = BITMAP_FIRST_WORD_MASK(start); in _find_next_bit() 55 mask = swab(mask); in _find_next_bit() 58 tmp &= mask; in _find_next_bit()
|
/tools/lib/perf/ |
D | mmap.c | 31 return map->mask + 1 + page_size; in perf_mmap__mmap_len() 38 map->mask = mp->mask; in perf_mmap__mmap() 105 static int overwrite_rb_find_range(void *buf, int mask, u64 *start, u64 *end) in overwrite_rb_find_range() argument 109 int size = mask + 1; in overwrite_rb_find_range() 112 pheader = (struct perf_event_header *)(buf + (*start & mask)); in overwrite_rb_find_range() 122 pheader = (struct perf_event_header *)(buf + (evt_head & mask)); in overwrite_rb_find_range() 154 if (size > (unsigned long)(md->mask) + 1) { in __perf_mmap__read_init() 167 if (overwrite_rb_find_range(data, md->mask, &md->start, &md->end)) in __perf_mmap__read_init() 213 event = (union perf_event *)&data[*startp & map->mask]; in perf_mmap__read() 223 if ((*startp & map->mask) + size != ((*startp + size) & map->mask)) { in perf_mmap__read() [all …]
|
/tools/testing/selftests/kvm/lib/ |
D | sparsebit.c | 174 mask_t mask; member 199 return nodep->num_after + __builtin_popcount(nodep->mask); in node_num_set() 288 root->mask = subtree->mask; in node_copy_subtree() 387 assert(!(nodep->mask & (1 << n1))); in node_add() 388 nodep->mask |= (1 << n1); in node_add() 543 nodep2->mask = ~(mask_t) 0; in node_split() 546 nodep2->mask = (1 << nodep2->num_after) - 1; in node_split() 610 if (nodep->mask == 0 && nodep->num_after == 0) { in node_reduce() 648 if (nodep->mask == 0) { in node_reduce() 655 nodep->mask = ~0; in node_reduce() [all …]
|
/tools/io_uring/ |
D | queue.c | 15 const unsigned mask = *cq->kring_mask; in __io_uring_get_cqe() local 31 *cqe_ptr = &cq->cqes[head & mask]; in __io_uring_get_cqe() 71 const unsigned mask = *sq->kring_mask; in io_uring_submit() local 99 sq->array[ktail & mask] = sq->sqe_head & mask; in io_uring_submit()
|
/tools/testing/selftests/arm64/bti/ |
D | signal.c | 34 int sigprocmask(int how, const sigset_t *mask, sigset_t *old) in sigprocmask() argument 36 return syscall(__NR_rt_sigprocmask, how, mask, old, sizeof(*mask)); in sigprocmask()
|
/tools/testing/selftests/powerpc/ |
D | utils.c | 92 cpu_set_t *mask; in pick_online_cpu() local 97 mask = CPU_ALLOC(ncpus); in pick_online_cpu() 98 if (!mask) { in pick_online_cpu() 103 CPU_ZERO_S(size, mask); in pick_online_cpu() 105 if (sched_getaffinity(0, size, mask)) { in pick_online_cpu() 112 if (CPU_ISSET_S(cpu, size, mask)) in pick_online_cpu() 117 if (CPU_ISSET_S(cpu, size, mask)) in pick_online_cpu() 123 CPU_FREE(mask); in pick_online_cpu()
|
/tools/include/linux/ |
D | bitmap.h | 86 unsigned long mask = BIT_MASK(nr); in test_and_set_bit() local 91 *p = old | mask; in test_and_set_bit() 93 return (old & mask) != 0; in test_and_set_bit() 103 unsigned long mask = BIT_MASK(nr); in test_and_clear_bit() local 108 *p = old & ~mask; in test_and_clear_bit() 110 return (old & mask) != 0; in test_and_clear_bit()
|
/tools/include/uapi/linux/ |
D | const.h | 32 #define __ALIGN_KERNEL_MASK(x, mask) (((x) + (mask)) & ~(mask)) argument
|
/tools/virtio/virtio-trace/ |
D | trace-agent-rw.c | 94 cpu_set_t mask; in bind_cpu() local 96 CPU_ZERO(&mask); in bind_cpu() 97 CPU_SET(cpu_num, &mask); in bind_cpu() 100 if (sched_setaffinity(0, sizeof(mask), &mask) == -1) in bind_cpu()
|
/tools/testing/selftests/powerpc/pmu/ebb/ |
D | cycles_with_freeze_test.c | 26 uint64_t mask, val; in ebb_callee() local 28 mask = MMCR0_PMAO | MMCR0_FC; in ebb_callee() 45 mask &= ~MMCR0_FC; in ebb_callee() 50 reset_ebb_with_clear_mask(mask); in ebb_callee()
|
/tools/perf/tests/ |
D | cpumap.c | 20 struct perf_record_record_cpu_map *mask; in process_event_mask() local 29 mask = (struct perf_record_record_cpu_map *)data->data; in process_event_mask() 31 TEST_ASSERT_VAL("wrong nr", mask->nr == 1); in process_event_mask() 34 TEST_ASSERT_VAL("wrong cpu", test_bit(i, mask->mask)); in process_event_mask()
|
/tools/testing/selftests/powerpc/pmu/ |
D | lib.c | 20 cpu_set_t mask; in bind_to_cpu() local 24 CPU_ZERO(&mask); in bind_to_cpu() 25 CPU_SET(cpu, &mask); in bind_to_cpu() 27 return sched_setaffinity(0, sizeof(mask), &mask); in bind_to_cpu()
|
/tools/testing/selftests/syscall_user_dispatch/ |
D | sud_test.c | 167 sigset_t mask; in TEST() local 175 sigemptyset(&mask); in TEST() 179 act.sa_mask = mask; in TEST() 222 sigset_t mask; in TEST_SIGNAL() local 231 sigemptyset(&mask); in TEST_SIGNAL() 235 act.sa_mask = mask; in TEST_SIGNAL()
|
/tools/testing/selftests/arm64/pauth/ |
D | pac.c | 100 cpu_set_t mask; in exec_sign_all() local 118 sched_getaffinity(0, sizeof(mask), &mask); in exec_sign_all() 121 if (CPU_ISSET(i, &mask)) in exec_sign_all() 124 CPU_ZERO(&mask); in exec_sign_all() 125 CPU_SET(i, &mask); in exec_sign_all() 126 sched_setaffinity(0, sizeof(mask), &mask); in exec_sign_all()
|