Home
last modified time | relevance | path

Searched refs:mask (Results 1 – 25 of 94) sorted by relevance

1234

/tools/include/asm-generic/bitops/
Dnon-atomic.h18 unsigned long mask = BIT_MASK(nr); in __set_bit() local
21 *p |= mask; in __set_bit()
26 unsigned long mask = BIT_MASK(nr); in __clear_bit() local
29 *p &= ~mask; in __clear_bit()
43 unsigned long mask = BIT_MASK(nr); in __change_bit() local
46 *p ^= mask; in __change_bit()
60 unsigned long mask = BIT_MASK(nr); in __test_and_set_bit() local
64 *p = old | mask; in __test_and_set_bit()
65 return (old & mask) != 0; in __test_and_set_bit()
79 unsigned long mask = BIT_MASK(nr); in __test_and_clear_bit() local
[all …]
/tools/perf/trace/beauty/
Dfutex_op.c38 P_FUTEX_OP(WAIT); arg->mask |= SCF_VAL3|SCF_UADDR2; break; in syscall_arg__scnprintf_futex_op()
39 P_FUTEX_OP(WAKE); arg->mask |= SCF_VAL3|SCF_UADDR2|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op()
40 P_FUTEX_OP(FD); arg->mask |= SCF_VAL3|SCF_UADDR2|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op()
41 P_FUTEX_OP(REQUEUE); arg->mask |= SCF_VAL3|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op()
42 P_FUTEX_OP(CMP_REQUEUE); arg->mask |= SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op()
43 P_FUTEX_OP(CMP_REQUEUE_PI); arg->mask |= SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op()
45 P_FUTEX_OP(LOCK_PI); arg->mask |= SCF_VAL3|SCF_UADDR2|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op()
46 P_FUTEX_OP(UNLOCK_PI); arg->mask |= SCF_VAL3|SCF_UADDR2|SCF_TIMEOUT; break; in syscall_arg__scnprintf_futex_op()
47 P_FUTEX_OP(TRYLOCK_PI); arg->mask |= SCF_VAL3|SCF_UADDR2; break; in syscall_arg__scnprintf_futex_op()
48 P_FUTEX_OP(WAIT_BITSET); arg->mask |= SCF_UADDR2; break; in syscall_arg__scnprintf_futex_op()
[all …]
Dclone.c67 arg->mask |= SCC_PARENT_TIDPTR; in syscall_arg__scnprintf_clone_flags()
70 arg->mask |= SCC_CHILD_TIDPTR; in syscall_arg__scnprintf_clone_flags()
73 arg->mask |= SCC_TLS; in syscall_arg__scnprintf_clone_flags()
/tools/perf/util/
Dparse-regs-options.c19 uint64_t mask; in __parse_regs() local
31 mask = arch__intr_reg_mask(); in __parse_regs()
33 mask = arch__user_reg_mask(); in __parse_regs()
50 if (r->mask & mask) in __parse_regs()
58 if ((r->mask & mask) && !strcasecmp(s, r->name)) in __parse_regs()
67 *mode |= r->mask; in __parse_regs()
79 *mode = mask; in __parse_regs()
Dprint_binary.c10 size_t i, j, mask; in binary__fprintf() local
17 mask = bytes_per_line - 1; in binary__fprintf()
21 if ((i & mask) == 0) { in binary__fprintf()
28 if (((i & mask) == mask) || i == len - 1) { in binary__fprintf()
29 for (j = 0; j < mask-(i & mask); j++) in binary__fprintf()
33 for (j = i & ~mask; j <= i; j++) in binary__fprintf()
35 for (j = 0; j < mask-(i & mask); j++) in binary__fprintf()
Dperf_regs.c30 u64 mask = regs->mask; in perf_reg_value() local
35 if (!(mask & (1ULL << id))) in perf_reg_value()
39 if (mask & (1ULL << i)) in perf_reg_value()
Dmmap.c28 return map->core.mask + 1 + page_size; in perf_mmap__mmap_len()
42 event = (union perf_event *)&data[*startp & map->core.mask]; in perf_mmap__read()
52 if ((*startp & map->core.mask) + size != ((*startp + size) & map->core.mask)) { in perf_mmap__read()
58 cpy = min(map->core.mask + 1 - (offset & map->core.mask), len); in perf_mmap__read()
59 memcpy(dst, &data[offset & map->core.mask], cpy); in perf_mmap__read()
330 static void build_node_mask(int node, cpu_set_t *mask) in build_node_mask() argument
343 CPU_SET(cpu, mask); in build_node_mask()
373 map->core.mask = mp->mask; in perf_mmap__mmap()
409 static int overwrite_rb_find_range(void *buf, int mask, u64 *start, u64 *end) in overwrite_rb_find_range() argument
413 int size = mask + 1; in overwrite_rb_find_range()
[all …]
Dperf_regs.h12 uint64_t mask; member
14 #define SMPL_REG(n, b) { .name = #n, .mask = 1ULL << (b) }
15 #define SMPL_REG2(n, b) { .name = #n, .mask = 3ULL << (b) }
/tools/testing/radix-tree/
Dmultiorder.c60 unsigned long mask = (1UL << order[i]) - 1; in multiorder_iteration() local
62 assert((xas.xa_index | mask) == (index[i] | mask)); in multiorder_iteration()
65 assert((item->index | mask) == (index[i] | mask)); in multiorder_iteration()
109 unsigned long mask; in multiorder_tagged_iteration() local
112 mask = (1UL << order[k]) - 1; in multiorder_tagged_iteration()
114 assert((xas.xa_index | mask) == (tag_index[i] | mask)); in multiorder_tagged_iteration()
116 assert((item->index | mask) == (tag_index[i] | mask)); in multiorder_tagged_iteration()
126 int mask, k; in multiorder_tagged_iteration() local
139 mask = (1 << order[k]) - 1; in multiorder_tagged_iteration()
141 assert((xas.xa_index | mask) == (tag_index[i] | mask)); in multiorder_tagged_iteration()
[all …]
/tools/testing/selftests/powerpc/dscr/
Ddscr_sysfs_thread_test.c38 cpu_set_t mask; in check_cpu_dscr_thread() local
42 CPU_ZERO(&mask); in check_cpu_dscr_thread()
43 CPU_SET(cpu, &mask); in check_cpu_dscr_thread()
44 if (sched_setaffinity(0, sizeof(mask), &mask)) in check_cpu_dscr_thread()
/tools/gpio/
Dlsgpio.c28 unsigned long mask; member
34 .mask = GPIOLINE_FLAG_KERNEL,
38 .mask = GPIOLINE_FLAG_IS_OUT,
42 .mask = GPIOLINE_FLAG_ACTIVE_LOW,
46 .mask = GPIOLINE_FLAG_OPEN_DRAIN,
50 .mask = GPIOLINE_FLAG_OPEN_SOURCE,
60 if (flags & flagnames[i].mask) { in print_flags()
/tools/include/linux/
Dbitmap.h90 unsigned long mask = BIT_MASK(nr); in test_and_set_bit() local
95 *p = old | mask; in test_and_set_bit()
97 return (old & mask) != 0; in test_and_set_bit()
107 unsigned long mask = BIT_MASK(nr); in test_and_clear_bit() local
112 *p = old & ~mask; in test_and_clear_bit()
114 return (old & mask) != 0; in test_and_clear_bit()
/tools/testing/selftests/kvm/lib/
Dsparsebit.c174 mask_t mask; member
199 return nodep->num_after + __builtin_popcount(nodep->mask); in node_num_set()
288 root->mask = subtree->mask; in node_copy_subtree()
387 assert(!(nodep->mask & (1 << n1))); in node_add()
388 nodep->mask |= (1 << n1); in node_add()
543 nodep2->mask = ~(mask_t) 0; in node_split()
546 nodep2->mask = (1 << nodep2->num_after) - 1; in node_split()
610 if (nodep->mask == 0 && nodep->num_after == 0) { in node_reduce()
648 if (nodep->mask == 0) { in node_reduce()
655 nodep->mask = ~0; in node_reduce()
[all …]
/tools/io_uring/
Dqueue.c15 const unsigned mask = *cq->kring_mask; in __io_uring_get_cqe() local
31 *cqe_ptr = &cq->cqes[head & mask]; in __io_uring_get_cqe()
71 const unsigned mask = *sq->kring_mask; in io_uring_submit() local
99 sq->array[ktail & mask] = sq->sqe_head & mask; in io_uring_submit()
/tools/testing/selftests/powerpc/
Dutils.c93 cpu_set_t *mask; in pick_online_cpu() local
98 mask = CPU_ALLOC(ncpus); in pick_online_cpu()
99 if (!mask) { in pick_online_cpu()
104 CPU_ZERO_S(size, mask); in pick_online_cpu()
106 if (sched_getaffinity(0, size, mask)) { in pick_online_cpu()
113 if (CPU_ISSET_S(cpu, size, mask)) in pick_online_cpu()
118 if (CPU_ISSET_S(cpu, size, mask)) in pick_online_cpu()
124 CPU_FREE(mask); in pick_online_cpu()
/tools/include/uapi/linux/
Dconst.h32 #define __ALIGN_KERNEL_MASK(x, mask) (((x) + (mask)) & ~(mask)) argument
/tools/virtio/virtio-trace/
Dtrace-agent-rw.c94 cpu_set_t mask; in bind_cpu() local
96 CPU_ZERO(&mask); in bind_cpu()
97 CPU_SET(cpu_num, &mask); in bind_cpu()
100 if (sched_setaffinity(0, sizeof(mask), &mask) == -1) in bind_cpu()
/tools/perf/tests/
Dcpumap.c20 struct perf_record_record_cpu_map *mask; in process_event_mask() local
29 mask = (struct perf_record_record_cpu_map *)data->data; in process_event_mask()
31 TEST_ASSERT_VAL("wrong nr", mask->nr == 1); in process_event_mask()
34 TEST_ASSERT_VAL("wrong cpu", test_bit(i, mask->mask)); in process_event_mask()
/tools/testing/selftests/powerpc/pmu/ebb/
Dcycles_with_freeze_test.c26 uint64_t mask, val; in ebb_callee() local
28 mask = MMCR0_PMAO | MMCR0_FC; in ebb_callee()
45 mask &= ~MMCR0_FC; in ebb_callee()
50 reset_ebb_with_clear_mask(mask); in ebb_callee()
/tools/testing/selftests/powerpc/pmu/
Dlib.c20 cpu_set_t mask; in bind_to_cpu() local
24 CPU_ZERO(&mask); in bind_to_cpu()
25 CPU_SET(cpu, &mask); in bind_to_cpu()
27 return sched_setaffinity(0, sizeof(mask), &mask); in bind_to_cpu()
/tools/testing/selftests/kvm/lib/aarch64/
Dprocessor.c27 uint64_t mask = (1UL << (vm->va_bits - shift)) - 1; in pgd_index() local
29 return (gva >> shift) & mask; in pgd_index()
35 uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; in pud_index() local
40 return (gva >> shift) & mask; in pud_index()
46 uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; in pmd_index() local
51 return (gva >> shift) & mask; in pmd_index()
56 uint64_t mask = (1UL << (vm->page_shift - 3)) - 1; in pte_index() local
57 return (gva >> vm->page_shift) & mask; in pte_index()
62 uint64_t mask = ((1UL << (vm->va_bits - vm->page_shift)) - 1) << vm->page_shift; in pte_addr() local
63 return entry & mask; in pte_addr()
/tools/testing/selftests/netfilter/
Dnft_trans_stress.sh55 mask=$(printf 0x%x $((1<<$i)))
56 ip netns exec "$testns" taskset $mask ping -4 127.0.0.1 -fq > /dev/null &
57 ip netns exec "$testns" taskset $mask ping -6 ::1 -fq > /dev/null &
/tools/lib/bpf/
Dxsk.h30 __u32 mask; \
53 return &addrs[idx & fill->mask]; in xsk_ring_prod__fill_addr()
61 return &addrs[idx & comp->mask]; in xsk_ring_cons__comp_addr()
69 return &descs[idx & tx->mask]; in xsk_ring_prod__tx_desc()
77 return &descs[idx & rx->mask]; in xsk_ring_cons__rx_desc()
/tools/testing/selftests/bpf/
Dbpf_rand.h9 static inline uint64_t bpf_rand_mask(uint64_t mask) in bpf_rand_mask() argument
12 ((uint64_t)(uint32_t)rand() << 32)) & mask; in bpf_rand_mask()
/tools/testing/scatterlist/linux/
Dmm.h34 #define __ALIGN_KERNEL_MASK(x, mask) (((x) + (mask)) & ~(mask)) argument

1234