Home
last modified time | relevance | path

Searched refs:stack_size (Results 1 – 5 of 5) sorted by relevance

/kernel/bpf/
Dcore.c1748 #define PROG_NAME(stack_size) __bpf_prog_run##stack_size argument
1749 #define DEFINE_BPF_PROG_RUN(stack_size) \ argument
1750 static unsigned int PROG_NAME(stack_size)(const void *ctx, const struct bpf_insn *insn) \
1752 u64 stack[stack_size / sizeof(u64)]; \
1760 #define PROG_NAME_ARGS(stack_size) __bpf_prog_run_args##stack_size argument
1761 #define DEFINE_BPF_PROG_RUN_ARGS(stack_size) \ argument
1762 static u64 PROG_NAME_ARGS(stack_size)(u64 r1, u64 r2, u64 r3, u64 r4, u64 r5, \
1765 u64 stack[stack_size / sizeof(u64)]; \
1792 #define PROG_NAME_LIST(stack_size) PROG_NAME(stack_size), argument
1801 #define PROG_NAME_LIST(stack_size) PROG_NAME_ARGS(stack_size), argument
Dverifier.c1033 env->stack_size--; in pop_stack()
1054 env->stack_size++; in push_stack()
1059 if (env->stack_size > BPF_COMPLEXITY_LIMIT_JMP_SEQ) { in push_stack()
1061 env->stack_size); in push_stack()
1555 env->stack_size++; in push_async_cb()
1556 if (env->stack_size > BPF_COMPLEXITY_LIMIT_JMP_SEQ) { in push_async_cb()
1559 env->stack_size); in push_async_cb()
/kernel/
Dfork.c2271 retval = copy_thread(clone_flags, args->stack, args->stack_size, p, args->tls); in copy_process()
2604 .stack_size = (unsigned long)arg, in create_io_thread()
2718 .stack_size = (unsigned long)arg, in kernel_thread()
2765 int, stack_size, in SYSCALL_DEFINE5()
2845 .stack_size = args.stack_size, in copy_clone_args_from_user()
2872 if (kargs->stack_size > 0) in clone3_stack_valid()
2875 if (kargs->stack_size == 0) in clone3_stack_valid()
2878 if (!access_ok((void __user *)kargs->stack, kargs->stack_size)) in clone3_stack_valid()
2882 kargs->stack += kargs->stack_size; in clone3_stack_valid()
/kernel/trace/
Dtrace_osnoise.c423 int stack_size; member
447 fstack->stack_size = nr_entries * sizeof(unsigned long); in timerlat_save_stack()
470 size = fstack->stack_size; in timerlat_dump_stack()
/kernel/events/
Dcore.c6807 perf_sample_ustack_size(u16 stack_size, u16 header_size, in perf_sample_ustack_size() argument
6827 stack_size = min(stack_size, (u16) task_size); in perf_sample_ustack_size()
6833 if ((u16) (header_size + stack_size) < header_size) { in perf_sample_ustack_size()
6838 stack_size = USHRT_MAX - header_size - sizeof(u64); in perf_sample_ustack_size()
6839 stack_size = round_up(stack_size, sizeof(u64)); in perf_sample_ustack_size()
6842 return stack_size; in perf_sample_ustack_size()
7590 u16 stack_size = event->attr.sample_stack_user; in perf_prepare_sample() local
7593 stack_size = perf_sample_ustack_size(stack_size, header->size, in perf_prepare_sample()
7601 if (stack_size) in perf_prepare_sample()
7602 size += sizeof(u64) + stack_size; in perf_prepare_sample()
[all …]