Lines Matching refs:stack
618 static bool is_spilled_reg(const struct bpf_stack_state *stack) in is_spilled_reg() argument
620 return stack->slot_type[BPF_REG_SIZE - 1] == STACK_SPILL; in is_spilled_reg()
722 if (state->stack[i].slot_type[j] != STACK_INVALID) in print_verifier_state()
725 state->stack[i].slot_type[j]]; in print_verifier_state()
731 print_liveness(env, state->stack[i].spilled_ptr.live); in print_verifier_state()
732 if (is_spilled_reg(&state->stack[i])) { in print_verifier_state()
733 reg = &state->stack[i].spilled_ptr; in print_verifier_state()
827 dst->stack = copy_array(dst->stack, src->stack, n, sizeof(struct bpf_stack_state), in copy_stack_state()
829 if (!dst->stack) in copy_stack_state()
854 state->stack = realloc_array(state->stack, old_n, n, sizeof(struct bpf_stack_state)); in grow_stack_state()
855 if (!state->stack) in grow_stack_state()
907 kfree(state->stack); in free_func_state()
2400 if (!is_spilled_reg(&func->stack[j])) in mark_all_scalars_precise()
2402 reg = &func->stack[j].spilled_ptr; in mark_all_scalars_precise()
2426 if (!is_spilled_reg(&func->stack[j])) in mark_all_scalars_imprecise()
2428 reg = &func->stack[j].spilled_ptr; in mark_all_scalars_imprecise()
2555 if (!is_spilled_reg(&func->stack[spi])) { in __mark_chain_precision()
2559 reg = &func->stack[spi].spilled_ptr; in __mark_chain_precision()
2680 if (!is_spilled_reg(&func->stack[i])) { in __mark_chain_precision()
2684 reg = &func->stack[i].spilled_ptr; in __mark_chain_precision()
2804 copy_register_state(&state->stack[spi].spilled_ptr, reg); in save_register_state()
2806 state->stack[spi].spilled_ptr.live |= REG_LIVE_WRITTEN; in save_register_state()
2809 state->stack[spi].slot_type[i - 1] = STACK_SPILL; in save_register_state()
2813 scrub_spilled_slot(&state->stack[spi].slot_type[i - 1]); in save_register_state()
2843 is_spilled_reg(&state->stack[spi]) && in check_stack_write_fixed_off()
2856 u8 type = state->stack[spi].slot_type[i]; in check_stack_write_fixed_off()
2884 state->stack[spi].spilled_ptr.id = 0; in check_stack_write_fixed_off()
2908 state->stack[spi].spilled_ptr.type = NOT_INIT; in check_stack_write_fixed_off()
2910 if (is_spilled_reg(&state->stack[spi])) in check_stack_write_fixed_off()
2912 scrub_spilled_slot(&state->stack[spi].slot_type[i]); in check_stack_write_fixed_off()
2923 state->stack[spi].spilled_ptr.live |= REG_LIVE_WRITTEN; in check_stack_write_fixed_off()
2937 state->stack[spi].slot_type[(slot - i) % BPF_REG_SIZE] = in check_stack_write_fixed_off()
2999 stype = &state->stack[spi].slot_type[slot % BPF_REG_SIZE]; in check_stack_write_var_off()
3019 state->stack[spi].spilled_ptr.type = NOT_INIT; in check_stack_write_var_off()
3073 stype = ptr_state->stack[spi].slot_type; in mark_reg_stack_read()
3121 stype = reg_state->stack[spi].slot_type; in check_stack_read_fixed_off()
3122 reg = ®_state->stack[spi].spilled_ptr; in check_stack_read_fixed_off()
3124 if (is_spilled_reg(®_state->stack[spi])) { in check_stack_read_fixed_off()
3837 u16 stack = env->subprog_info[func->subprogno].stack_depth; in update_stack_depth() local
3839 if (stack >= -off) in update_stack_depth()
4763 stype = &state->stack[spi].slot_type[slot % BPF_REG_SIZE]; in check_stack_range_initialized()
4774 if (is_spilled_reg(&state->stack[spi]) && in check_stack_range_initialized()
4775 state->stack[spi].spilled_ptr.type == PTR_TO_BTF_ID) in check_stack_range_initialized()
4778 if (is_spilled_reg(&state->stack[spi]) && in check_stack_range_initialized()
4779 (state->stack[spi].spilled_ptr.type == SCALAR_VALUE || in check_stack_range_initialized()
4782 __mark_reg_unknown(env, &state->stack[spi].spilled_ptr); in check_stack_range_initialized()
4784 scrub_spilled_slot(&state->stack[spi].slot_type[j]); in check_stack_range_initialized()
4805 mark_reg_read(env, &state->stack[spi].spilled_ptr, in check_stack_range_initialized()
4806 state->stack[spi].spilled_ptr.parent, in check_stack_range_initialized()
10354 live = st->stack[i].spilled_ptr.live; in clean_func_state()
10356 st->stack[i].spilled_ptr.live |= REG_LIVE_DONE; in clean_func_state()
10358 __mark_reg_not_init(env, &st->stack[i].spilled_ptr); in clean_func_state()
10360 st->stack[i].slot_type[j] = STACK_INVALID; in clean_func_state()
10563 if (!(old->stack[spi].spilled_ptr.live & REG_LIVE_READ)) { in stacksafe()
10569 if (old->stack[spi].slot_type[i % BPF_REG_SIZE] == STACK_INVALID) in stacksafe()
10582 if (old->stack[spi].slot_type[i % BPF_REG_SIZE] == STACK_MISC && in stacksafe()
10583 cur->stack[spi].slot_type[i % BPF_REG_SIZE] == STACK_ZERO) in stacksafe()
10585 if (old->stack[spi].slot_type[i % BPF_REG_SIZE] != in stacksafe()
10586 cur->stack[spi].slot_type[i % BPF_REG_SIZE]) in stacksafe()
10595 if (!is_spilled_reg(&old->stack[spi])) in stacksafe()
10597 if (!regsafe(env, &old->stack[spi].spilled_ptr, in stacksafe()
10598 &cur->stack[spi].spilled_ptr, idmap)) in stacksafe()
10767 parent_reg = &parent->stack[i].spilled_ptr; in propagate_liveness()
10768 state_reg = &state->stack[i].spilled_ptr; in propagate_liveness()
10804 if (!is_spilled_reg(&state->stack[i])) in propagate_precision()
10806 state_reg = &state->stack[i].spilled_ptr; in propagate_precision()
11059 frame->stack[i].spilled_ptr.live = REG_LIVE_NONE; in is_state_visited()
11060 frame->stack[i].spilled_ptr.parent = in is_state_visited()
11061 &newframe->stack[i].spilled_ptr; in is_state_visited()