Lines Matching refs:code
71 /* As per nm, we expose JITed images as text (code) section for
86 .code = BPF_ALU64 | BPF_OP(OP) | BPF_X, \
94 .code = BPF_ALU | BPF_OP(OP) | BPF_X, \
104 .code = BPF_ALU64 | BPF_OP(OP) | BPF_K, \
112 .code = BPF_ALU | BPF_OP(OP) | BPF_K, \
122 .code = BPF_ALU | BPF_END | BPF_SRC(TYPE), \
132 .code = BPF_ALU64 | BPF_MOV | BPF_X, \
140 .code = BPF_ALU | BPF_MOV | BPF_X, \
150 .code = BPF_ALU64 | BPF_MOV | BPF_K, \
158 .code = BPF_ALU | BPF_MOV | BPF_K, \
167 .code = BPF_ALU | BPF_MOV | BPF_X, \
175 return insn->code == (BPF_ALU | BPF_MOV | BPF_X) && insn->imm == 1;
184 .code = BPF_LD | BPF_DW | BPF_IMM, \
190 .code = 0, /* zero is reserved opcode */ \
204 .code = BPF_ALU64 | BPF_MOV | BPF_SRC(TYPE), \
212 .code = BPF_ALU | BPF_MOV | BPF_SRC(TYPE), \
222 .code = BPF_LD | BPF_SIZE(SIZE) | BPF_ABS, \
232 .code = BPF_LD | BPF_SIZE(SIZE) | BPF_IND, \
242 .code = BPF_LDX | BPF_SIZE(SIZE) | BPF_MEM, \
252 .code = BPF_STX | BPF_SIZE(SIZE) | BPF_MEM, \
262 .code = BPF_STX | BPF_SIZE(SIZE) | BPF_XADD, \
272 .code = BPF_ST | BPF_SIZE(SIZE) | BPF_MEM, \
282 .code = BPF_JMP | BPF_OP(OP) | BPF_X, \
292 .code = BPF_JMP | BPF_OP(OP) | BPF_K, \
302 .code = BPF_JMP32 | BPF_OP(OP) | BPF_X, \
312 .code = BPF_JMP32 | BPF_OP(OP) | BPF_K, \
322 .code = BPF_JMP | BPF_JA, \
332 .code = BPF_JMP | BPF_CALL, \
345 .code = BPF_JMP | BPF_CALL, \
351 /* Raw code statement block */
355 .code = CODE, \
365 .code = BPF_JMP | BPF_EXIT, \
427 const int __size = bpf_size_to_bytes(BPF_SIZE((insn)->code)); \
564 * code. Use bpf_call_func to perform additional validation of the call
589 * We are about to call dynamically generated code. Check that the
862 bool bpf_opcode_in_insntable(u8 code);
1023 print_hex_dump(KERN_ERR, "JIT code: ", DUMP_PREFIX_OFFSET,
1168 switch (first->code) {
1187 BUG_ON(ftest->code & BPF_ANC);
1189 switch (ftest->code) {
1215 return ftest->code;