Lines Matching refs:u32
203 const u32 *insn_to_jit_off) in bpf_prog_fill_jited_linfo()
205 u32 linfo_idx, insn_start, insn_end, nr_linfo, i; in bpf_prog_fill_jited_linfo()
236 u32 pages; in bpf_prog_realloc()
276 const u32 bits_offset = SHA1_BLOCK_SIZE - sizeof(__be64); in bpf_prog_calc_tag()
277 u32 raw_size = bpf_prog_tag_scratch_size(fp); in bpf_prog_calc_tag()
278 u32 digest[SHA1_DIGEST_WORDS]; in bpf_prog_calc_tag()
279 u32 ws[SHA1_WORKSPACE_WORDS]; in bpf_prog_calc_tag()
280 u32 i, bsize, psize, blocks; in bpf_prog_calc_tag()
347 static int bpf_adj_delta_to_imm(struct bpf_insn *insn, u32 pos, s32 end_old, in bpf_adj_delta_to_imm()
365 static int bpf_adj_delta_to_off(struct bpf_insn *insn, u32 pos, s32 end_old, in bpf_adj_delta_to_off()
383 static int bpf_adj_branches(struct bpf_prog *prog, u32 pos, s32 end_old, in bpf_adj_branches()
386 u32 i, insn_cnt = prog->len + (probe_pass ? end_new - end_old : 0); in bpf_adj_branches()
430 static void bpf_adj_linfo(struct bpf_prog *prog, u32 off, u32 delta) in bpf_adj_linfo()
433 u32 i, nr_linfo; in bpf_adj_linfo()
450 struct bpf_prog *bpf_patch_insn_single(struct bpf_prog *prog, u32 off, in bpf_patch_insn_single()
451 const struct bpf_insn *patch, u32 len) in bpf_patch_insn_single()
453 u32 insn_adj_cnt, insn_rest, insn_delta = len - 1; in bpf_patch_insn_single()
454 const u32 cnt_max = S16_MAX; in bpf_patch_insn_single()
511 int bpf_remove_insns(struct bpf_prog *prog, u32 off, u32 cnt) in bpf_remove_insns()
784 static const u32 poke_tab_max = 1024; in bpf_jit_add_poke_descriptor()
785 u32 slot = prog->aux->size_poke_tab; in bpf_jit_add_poke_descriptor()
786 u32 size = slot + 1; in bpf_jit_add_poke_descriptor()
877 void *bpf_prog_pack_alloc(u32 size, bpf_jit_fill_hole_t bpf_fill_ill_insns) in bpf_prog_pack_alloc()
982 int bpf_jit_charge_modmem(u32 size) in bpf_jit_charge_modmem()
994 void bpf_jit_uncharge_modmem(u32 size) in bpf_jit_uncharge_modmem()
1015 u32 size, hole, start; in bpf_jit_binary_alloc()
1050 u32 size = hdr->size; in bpf_jit_binary_free()
1072 u32 size, hole, start; in bpf_jit_binary_pack_alloc()
1141 u32 size = ro_header->size; in bpf_jit_binary_pack_free()
1224 u32 imm_rnd = get_random_u32(); in bpf_jit_blind_insn()
1633 u64 __weak bpf_probe_read_kernel(void *dst, u32 size, const void *unsafe_ptr) in bpf_probe_read_kernel()
1667 u32 tail_call_cnt = 0; in ___bpf_prog_run()
1692 DST = (u32) DST OP ((u32) SRC & 31); \ in ___bpf_prog_run()
1698 DST = (u32) DST OP (u32) IMM; \ in ___bpf_prog_run()
1706 DST = (u32) DST OP (u32) SRC; \ in ___bpf_prog_run()
1712 DST = (u32) DST OP (u32) IMM; \ in ___bpf_prog_run()
1725 DST = (u32) -DST; in ___bpf_prog_run()
1731 DST = (u32) SRC; in ___bpf_prog_run()
1734 DST = (u32) IMM; in ___bpf_prog_run()
1743 DST = (u64) (u32) insn[0].imm | ((u64) (u32) insn[1].imm) << 32; in ___bpf_prog_run()
1747 DST = (u64) (u32) (((s32) DST) >> (SRC & 31)); in ___bpf_prog_run()
1750 DST = (u64) (u32) (((s32) DST) >> IMM); in ___bpf_prog_run()
1763 AX = (u32) DST; in ___bpf_prog_run()
1764 DST = do_div(AX, (u32) SRC); in ___bpf_prog_run()
1771 AX = (u32) DST; in ___bpf_prog_run()
1772 DST = do_div(AX, (u32) IMM); in ___bpf_prog_run()
1778 AX = (u32) DST; in ___bpf_prog_run()
1779 do_div(AX, (u32) SRC); in ___bpf_prog_run()
1780 DST = (u32) AX; in ___bpf_prog_run()
1786 AX = (u32) DST; in ___bpf_prog_run()
1787 do_div(AX, (u32) IMM); in ___bpf_prog_run()
1788 DST = (u32) AX; in ___bpf_prog_run()
1796 DST = (__force u32) cpu_to_be32(DST); in ___bpf_prog_run()
1809 DST = (__force u32) cpu_to_le32(DST); in ___bpf_prog_run()
1838 u32 index = BPF_R3; in ___bpf_prog_run()
1936 LDST(W, u32) in ___bpf_prog_run()
1943 atomic_##KOP((u32) SRC, (atomic_t *)(unsigned long) \ in ___bpf_prog_run()
1951 SRC = (u32) atomic_fetch_##KOP( \ in ___bpf_prog_run()
1952 (u32) SRC, \ in ___bpf_prog_run()
1971 SRC = (u32) atomic_xchg( in ___bpf_prog_run()
1973 (u32) SRC); in ___bpf_prog_run()
1981 BPF_R0 = (u32) atomic_cmpxchg( in ___bpf_prog_run()
1983 (u32) BPF_R0, (u32) SRC); in ___bpf_prog_run()
2070 void bpf_patch_call_args(struct bpf_insn *insn, u32 stack_depth) in bpf_patch_call_args()
2072 stack_depth = max_t(u32, stack_depth, 1); in bpf_patch_call_args()
2145 u32 stack_depth = max_t(u32, fp->aux->stack_depth, 1); in bpf_prog_select_func()
2236 struct bpf_prog_array *bpf_prog_array_alloc(u32 prog_cnt, gfp_t flags) in bpf_prog_array_alloc()
2272 u32 cnt = 0; in bpf_prog_array_length()
2291 u32 *prog_ids, in bpf_prog_array_copy_core()
2292 u32 request_cnt) in bpf_prog_array_copy_core()
2311 __u32 __user *prog_ids, u32 cnt) in bpf_prog_array_copy_to_user()
2315 u32 *ids; in bpf_prog_array_copy_to_user()
2323 ids = kcalloc(cnt, sizeof(u32), GFP_USER | __GFP_NOWARN); in bpf_prog_array_copy_to_user()
2327 err = copy_to_user(prog_ids, ids, cnt * sizeof(u32)); in bpf_prog_array_copy_to_user()
2475 u32 *prog_ids, u32 request_cnt, in bpf_prog_array_copy_info()
2476 u32 *prog_cnt) in bpf_prog_array_copy_info()
2478 u32 cnt = 0; in bpf_prog_array_copy_info()
2495 struct bpf_map **used_maps, u32 len) in __bpf_free_used_maps()
2498 u32 i; in __bpf_free_used_maps()
2515 struct btf_mod_pair *used_btfs, u32 len) in __bpf_free_used_btfs()
2519 u32 i; in __bpf_free_used_btfs()
2603 u32 res; in BPF_CALL_0()