• Home
  • Raw
  • Download

Lines Matching refs:ctx

66 static inline void emit(const u32 insn, struct jit_ctx *ctx)  in emit()  argument
68 if (ctx->image != NULL) in emit()
69 ctx->image[ctx->idx] = cpu_to_le32(insn); in emit()
71 ctx->idx++; in emit()
75 const s32 val, struct jit_ctx *ctx) in emit_a64_mov_i() argument
82 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); in emit_a64_mov_i()
84 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); in emit_a64_mov_i()
86 emit(A64_MOVK(is64, reg, lo, 0), ctx); in emit_a64_mov_i()
89 emit(A64_MOVZ(is64, reg, lo, 0), ctx); in emit_a64_mov_i()
91 emit(A64_MOVK(is64, reg, hi, 16), ctx); in emit_a64_mov_i()
104 struct jit_ctx *ctx) in emit_a64_mov_i64() argument
111 return emit_a64_mov_i(0, reg, (u32)val, ctx); in emit_a64_mov_i64()
117 emit(A64_MOVN(1, reg, (rev_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
119 emit(A64_MOVZ(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
123 emit(A64_MOVK(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
134 struct jit_ctx *ctx) in emit_addr_mov_i64() argument
139 emit(A64_MOVN(1, reg, ~tmp & 0xffff, shift), ctx); in emit_addr_mov_i64()
143 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx); in emit_addr_mov_i64()
148 const struct jit_ctx *ctx) in bpf2a64_offset() argument
157 return ctx->offset[bpf_insn + off] - (ctx->offset[bpf_insn] - 1); in bpf2a64_offset()
168 static inline int epilogue_offset(const struct jit_ctx *ctx) in epilogue_offset() argument
170 int to = ctx->epilogue_offset; in epilogue_offset()
171 int from = ctx->idx; in epilogue_offset()
189 static int build_prologue(struct jit_ctx *ctx, bool ebpf_from_cbpf) in build_prologue() argument
191 const struct bpf_prog *prog = ctx->prog; in build_prologue()
198 const int idx0 = ctx->idx; in build_prologue()
226 emit(A64_BTI_C, ctx); in build_prologue()
229 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx); in build_prologue()
230 emit(A64_MOV(1, A64_FP, A64_SP), ctx); in build_prologue()
233 emit(A64_PUSH(r6, r7, A64_SP), ctx); in build_prologue()
234 emit(A64_PUSH(r8, r9, A64_SP), ctx); in build_prologue()
235 emit(A64_PUSH(fp, tcc, A64_SP), ctx); in build_prologue()
238 emit(A64_MOV(1, fp, A64_SP), ctx); in build_prologue()
242 emit(A64_MOVZ(1, tcc, 0, 0), ctx); in build_prologue()
244 cur_offset = ctx->idx - idx0; in build_prologue()
253 emit(A64_BTI_J, ctx); in build_prologue()
257 ctx->stack_size = round_up(prog->aux->stack_depth, 16); in build_prologue()
260 emit(A64_SUB_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in build_prologue()
265 static int emit_bpf_tail_call(struct jit_ctx *ctx) in emit_bpf_tail_call() argument
274 const int idx0 = ctx->idx; in emit_bpf_tail_call()
275 #define cur_offset (ctx->idx - idx0) in emit_bpf_tail_call()
283 emit_a64_mov_i64(tmp, off, ctx); in emit_bpf_tail_call()
284 emit(A64_LDR32(tmp, r2, tmp), ctx); in emit_bpf_tail_call()
285 emit(A64_MOV(0, r3, r3), ctx); in emit_bpf_tail_call()
286 emit(A64_CMP(0, r3, tmp), ctx); in emit_bpf_tail_call()
287 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); in emit_bpf_tail_call()
293 emit_a64_mov_i64(tmp, MAX_TAIL_CALL_CNT, ctx); in emit_bpf_tail_call()
294 emit(A64_CMP(1, tcc, tmp), ctx); in emit_bpf_tail_call()
295 emit(A64_B_(A64_COND_HI, jmp_offset), ctx); in emit_bpf_tail_call()
296 emit(A64_ADD_I(1, tcc, tcc, 1), ctx); in emit_bpf_tail_call()
303 emit_a64_mov_i64(tmp, off, ctx); in emit_bpf_tail_call()
304 emit(A64_ADD(1, tmp, r2, tmp), ctx); in emit_bpf_tail_call()
305 emit(A64_LSL(1, prg, r3, 3), ctx); in emit_bpf_tail_call()
306 emit(A64_LDR64(prg, tmp, prg), ctx); in emit_bpf_tail_call()
307 emit(A64_CBZ(1, prg, jmp_offset), ctx); in emit_bpf_tail_call()
311 emit_a64_mov_i64(tmp, off, ctx); in emit_bpf_tail_call()
312 emit(A64_LDR64(tmp, prg, tmp), ctx); in emit_bpf_tail_call()
313 emit(A64_ADD_I(1, tmp, tmp, sizeof(u32) * PROLOGUE_OFFSET), ctx); in emit_bpf_tail_call()
314 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in emit_bpf_tail_call()
315 emit(A64_BR(tmp), ctx); in emit_bpf_tail_call()
330 static void build_epilogue(struct jit_ctx *ctx) in build_epilogue() argument
340 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in build_epilogue()
343 emit(A64_POP(fp, A64_R(26), A64_SP), ctx); in build_epilogue()
346 emit(A64_POP(r8, r9, A64_SP), ctx); in build_epilogue()
347 emit(A64_POP(r6, r7, A64_SP), ctx); in build_epilogue()
350 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx); in build_epilogue()
353 emit(A64_MOV(1, A64_R(0), r0), ctx); in build_epilogue()
355 emit(A64_RET(A64_LR), ctx); in build_epilogue()
374 struct jit_ctx *ctx, in add_exception_handler() argument
381 if (!ctx->image) in add_exception_handler()
388 if (!ctx->prog->aux->extable || in add_exception_handler()
389 WARN_ON_ONCE(ctx->exentry_idx >= ctx->prog->aux->num_exentries)) in add_exception_handler()
392 ex = &ctx->prog->aux->extable[ctx->exentry_idx]; in add_exception_handler()
393 pc = (unsigned long)&ctx->image[ctx->idx - 1]; in add_exception_handler()
415 ctx->exentry_idx++; in add_exception_handler()
425 static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx, in build_insn() argument
436 const int i = insn - ctx->prog->insnsi; in build_insn()
460 emit(A64_MOV(is64, dst, src), ctx); in build_insn()
465 emit(A64_ADD(is64, dst, dst, src), ctx); in build_insn()
469 emit(A64_SUB(is64, dst, dst, src), ctx); in build_insn()
473 emit(A64_AND(is64, dst, dst, src), ctx); in build_insn()
477 emit(A64_ORR(is64, dst, dst, src), ctx); in build_insn()
481 emit(A64_EOR(is64, dst, dst, src), ctx); in build_insn()
485 emit(A64_MUL(is64, dst, dst, src), ctx); in build_insn()
489 emit(A64_UDIV(is64, dst, dst, src), ctx); in build_insn()
493 emit(A64_UDIV(is64, tmp, dst, src), ctx); in build_insn()
494 emit(A64_MSUB(is64, dst, dst, tmp, src), ctx); in build_insn()
498 emit(A64_LSLV(is64, dst, dst, src), ctx); in build_insn()
502 emit(A64_LSRV(is64, dst, dst, src), ctx); in build_insn()
506 emit(A64_ASRV(is64, dst, dst, src), ctx); in build_insn()
511 emit(A64_NEG(is64, dst, dst), ctx); in build_insn()
525 emit(A64_REV16(is64, dst, dst), ctx); in build_insn()
527 emit(A64_UXTH(is64, dst, dst), ctx); in build_insn()
530 emit(A64_REV32(is64, dst, dst), ctx); in build_insn()
534 emit(A64_REV64(dst, dst), ctx); in build_insn()
542 emit(A64_UXTH(is64, dst, dst), ctx); in build_insn()
546 emit(A64_UXTW(is64, dst, dst), ctx); in build_insn()
556 emit_a64_mov_i(is64, dst, imm, ctx); in build_insn()
562 emit(A64_ADD_I(is64, dst, dst, imm), ctx); in build_insn()
564 emit(A64_SUB_I(is64, dst, dst, -imm), ctx); in build_insn()
566 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
567 emit(A64_ADD(is64, dst, dst, tmp), ctx); in build_insn()
573 emit(A64_SUB_I(is64, dst, dst, imm), ctx); in build_insn()
575 emit(A64_ADD_I(is64, dst, dst, -imm), ctx); in build_insn()
577 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
578 emit(A64_SUB(is64, dst, dst, tmp), ctx); in build_insn()
585 emit(a64_insn, ctx); in build_insn()
587 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
588 emit(A64_AND(is64, dst, dst, tmp), ctx); in build_insn()
595 emit(a64_insn, ctx); in build_insn()
597 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
598 emit(A64_ORR(is64, dst, dst, tmp), ctx); in build_insn()
605 emit(a64_insn, ctx); in build_insn()
607 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
608 emit(A64_EOR(is64, dst, dst, tmp), ctx); in build_insn()
613 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
614 emit(A64_MUL(is64, dst, dst, tmp), ctx); in build_insn()
618 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
619 emit(A64_UDIV(is64, dst, dst, tmp), ctx); in build_insn()
623 emit_a64_mov_i(is64, tmp2, imm, ctx); in build_insn()
624 emit(A64_UDIV(is64, tmp, dst, tmp2), ctx); in build_insn()
625 emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx); in build_insn()
629 emit(A64_LSL(is64, dst, dst, imm), ctx); in build_insn()
633 emit(A64_LSR(is64, dst, dst, imm), ctx); in build_insn()
637 emit(A64_ASR(is64, dst, dst, imm), ctx); in build_insn()
642 jmp_offset = bpf2a64_offset(i, off, ctx); in build_insn()
644 emit(A64_B(jmp_offset), ctx); in build_insn()
667 emit(A64_CMP(is64, dst, src), ctx); in build_insn()
669 jmp_offset = bpf2a64_offset(i, off, ctx); in build_insn()
706 emit(A64_B_(jmp_cond, jmp_offset), ctx); in build_insn()
710 emit(A64_TST(is64, dst, src), ctx); in build_insn()
734 emit(A64_CMP_I(is64, dst, imm), ctx); in build_insn()
736 emit(A64_CMN_I(is64, dst, -imm), ctx); in build_insn()
738 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
739 emit(A64_CMP(is64, dst, tmp), ctx); in build_insn()
746 emit(a64_insn, ctx); in build_insn()
748 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
749 emit(A64_TST(is64, dst, tmp), ctx); in build_insn()
759 ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass, in build_insn()
763 emit_addr_mov_i64(tmp, func_addr, ctx); in build_insn()
764 emit(A64_BLR(tmp), ctx); in build_insn()
765 emit(A64_MOV(1, r0, A64_R(0)), ctx); in build_insn()
770 if (emit_bpf_tail_call(ctx)) in build_insn()
777 if (i == ctx->prog->len - 1) in build_insn()
779 jmp_offset = epilogue_offset(ctx); in build_insn()
781 emit(A64_B(jmp_offset), ctx); in build_insn()
792 emit_addr_mov_i64(dst, imm64, ctx); in build_insn()
794 emit_a64_mov_i64(dst, imm64, ctx); in build_insn()
808 emit_a64_mov_i(1, tmp, off, ctx); in build_insn()
811 emit(A64_LDR32(dst, src, tmp), ctx); in build_insn()
814 emit(A64_LDRH(dst, src, tmp), ctx); in build_insn()
817 emit(A64_LDRB(dst, src, tmp), ctx); in build_insn()
820 emit(A64_LDR64(dst, src, tmp), ctx); in build_insn()
824 ret = add_exception_handler(insn, ctx, dst); in build_insn()
848 emit_a64_mov_i(1, tmp2, off, ctx); in build_insn()
849 emit_a64_mov_i(1, tmp, imm, ctx); in build_insn()
852 emit(A64_STR32(tmp, dst, tmp2), ctx); in build_insn()
855 emit(A64_STRH(tmp, dst, tmp2), ctx); in build_insn()
858 emit(A64_STRB(tmp, dst, tmp2), ctx); in build_insn()
861 emit(A64_STR64(tmp, dst, tmp2), ctx); in build_insn()
871 emit_a64_mov_i(1, tmp, off, ctx); in build_insn()
874 emit(A64_STR32(src, dst, tmp), ctx); in build_insn()
877 emit(A64_STRH(src, dst, tmp), ctx); in build_insn()
880 emit(A64_STRB(src, dst, tmp), ctx); in build_insn()
883 emit(A64_STR64(src, dst, tmp), ctx); in build_insn()
903 emit_a64_mov_i(1, tmp, off, ctx); in build_insn()
904 emit(A64_ADD(1, tmp, tmp, dst), ctx); in build_insn()
908 emit(A64_STADD(isdw, reg, src), ctx); in build_insn()
910 emit(A64_LDXR(isdw, tmp2, reg), ctx); in build_insn()
911 emit(A64_ADD(isdw, tmp2, tmp2, src), ctx); in build_insn()
912 emit(A64_STXR(isdw, tmp2, reg, tmp3), ctx); in build_insn()
915 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx); in build_insn()
927 static int build_body(struct jit_ctx *ctx, bool extra_pass) in build_body() argument
929 const struct bpf_prog *prog = ctx->prog; in build_body()
945 if (ctx->image == NULL) in build_body()
946 ctx->offset[i] = ctx->idx; in build_body()
947 ret = build_insn(insn, ctx, extra_pass); in build_body()
950 if (ctx->image == NULL) in build_body()
951 ctx->offset[i] = ctx->idx; in build_body()
962 if (ctx->image == NULL) in build_body()
963 ctx->offset[i] = ctx->idx; in build_body()
968 static int validate_code(struct jit_ctx *ctx) in validate_code() argument
972 for (i = 0; i < ctx->idx; i++) { in validate_code()
973 u32 a64_insn = le32_to_cpu(ctx->image[i]); in validate_code()
979 if (WARN_ON_ONCE(ctx->exentry_idx != ctx->prog->aux->num_exentries)) in validate_code()
993 struct jit_ctx ctx; member
1005 struct jit_ctx ctx; in bpf_int_jit_compile() local
1031 if (jit_data->ctx.offset) { in bpf_int_jit_compile()
1032 ctx = jit_data->ctx; in bpf_int_jit_compile()
1036 prog_size = sizeof(u32) * ctx.idx; in bpf_int_jit_compile()
1039 memset(&ctx, 0, sizeof(ctx)); in bpf_int_jit_compile()
1040 ctx.prog = prog; in bpf_int_jit_compile()
1042 ctx.offset = kcalloc(prog->len + 1, sizeof(int), GFP_KERNEL); in bpf_int_jit_compile()
1043 if (ctx.offset == NULL) { in bpf_int_jit_compile()
1054 if (build_prologue(&ctx, was_classic)) { in bpf_int_jit_compile()
1059 if (build_body(&ctx, extra_pass)) { in bpf_int_jit_compile()
1064 ctx.epilogue_offset = ctx.idx; in bpf_int_jit_compile()
1065 build_epilogue(&ctx); in bpf_int_jit_compile()
1071 prog_size = sizeof(u32) * ctx.idx; in bpf_int_jit_compile()
1082 ctx.image = (__le32 *)image_ptr; in bpf_int_jit_compile()
1086 ctx.idx = 0; in bpf_int_jit_compile()
1087 ctx.exentry_idx = 0; in bpf_int_jit_compile()
1089 build_prologue(&ctx, was_classic); in bpf_int_jit_compile()
1091 if (build_body(&ctx, extra_pass)) { in bpf_int_jit_compile()
1097 build_epilogue(&ctx); in bpf_int_jit_compile()
1100 if (validate_code(&ctx)) { in bpf_int_jit_compile()
1108 bpf_jit_dump(prog->len, prog_size, 2, ctx.image); in bpf_int_jit_compile()
1110 bpf_flush_icache(header, ctx.image + ctx.idx); in bpf_int_jit_compile()
1113 if (extra_pass && ctx.idx != jit_data->ctx.idx) { in bpf_int_jit_compile()
1115 ctx.idx, jit_data->ctx.idx); in bpf_int_jit_compile()
1124 jit_data->ctx = ctx; in bpf_int_jit_compile()
1128 prog->bpf_func = (void *)ctx.image; in bpf_int_jit_compile()
1137 ctx.offset[i] *= AARCH64_INSN_SIZE; in bpf_int_jit_compile()
1138 bpf_prog_fill_jited_linfo(prog, ctx.offset + 1); in bpf_int_jit_compile()
1140 kfree(ctx.offset); in bpf_int_jit_compile()