Home
last modified time | relevance | path

Searched refs:op (Results 1 – 25 of 288) sorted by relevance

12345678910>>...12

/arch/powerpc/lib/
Dsstep.c84 struct instruction_op *op) in branch_taken() argument
91 op->type |= DECCTR; in branch_taken()
458 static int do_fp_load(struct instruction_op *op, unsigned long ea, in do_fp_load() argument
471 nb = GETSIZE(op->type); in do_fp_load()
474 rn = op->reg; in do_fp_load()
485 if (op->type & FPCONV) in do_fp_load()
487 else if (op->type & SIGNEXT) in do_fp_load()
509 static int do_fp_store(struct instruction_op *op, unsigned long ea, in do_fp_store() argument
521 nb = GETSIZE(op->type); in do_fp_store()
524 rn = op->reg; in do_fp_store()
[all …]
/arch/sh/kernel/
Dkgdb.c20 #define OPCODE_BT(op) (((op) & 0xff00) == 0x8900) argument
21 #define OPCODE_BF(op) (((op) & 0xff00) == 0x8b00) argument
22 #define OPCODE_BTF_DISP(op) (((op) & 0x80) ? (((op) | 0xffffff80) << 1) : \ argument
23 (((op) & 0x7f ) << 1))
24 #define OPCODE_BFS(op) (((op) & 0xff00) == 0x8f00) argument
25 #define OPCODE_BTS(op) (((op) & 0xff00) == 0x8d00) argument
26 #define OPCODE_BRA(op) (((op) & 0xf000) == 0xa000) argument
27 #define OPCODE_BRA_DISP(op) (((op) & 0x800) ? (((op) | 0xfffff800) << 1) : \ argument
28 (((op) & 0x7ff) << 1))
29 #define OPCODE_BRAF(op) (((op) & 0xf0ff) == 0x0023) argument
[all …]
/arch/xtensa/include/asm/
Datomic.h60 #define ATOMIC_OP(op) \ argument
61 static inline void atomic_##op(int i, atomic_t *v) \
68 " " #op " %0, %1, %2\n" \
78 #define ATOMIC_OP_RETURN(op) \ argument
79 static inline int atomic_##op##_return(int i, atomic_t *v) \
86 " " #op " %0, %1, %2\n" \
90 " " #op " %0, %1, %2\n" \
99 #define ATOMIC_FETCH_OP(op) \ argument
100 static inline int atomic_fetch_##op(int i, atomic_t *v) \
107 " " #op " %0, %1, %2\n" \
[all …]
/arch/sh/include/asm/
Datomic-llsc.h19 #define ATOMIC_OP(op) \ argument
20 static inline void atomic_##op(int i, atomic_t *v) \
25 "1: movli.l @%2, %0 ! atomic_" #op "\n" \
26 " " #op " %1, %0 \n" \
34 #define ATOMIC_OP_RETURN(op) \ argument
35 static inline int atomic_##op##_return(int i, atomic_t *v) \
40 "1: movli.l @%2, %0 ! atomic_" #op "_return \n" \
41 " " #op " %1, %0 \n" \
52 #define ATOMIC_FETCH_OP(op) \ argument
53 static inline int atomic_fetch_##op(int i, atomic_t *v) \
[all …]
Datomic-grb.h5 #define ATOMIC_OP(op) \ argument
6 static inline void atomic_##op(int i, atomic_t *v) \
16 " " #op " %2, %0 \n\t" /* $op */ \
25 #define ATOMIC_OP_RETURN(op) \ argument
26 static inline int atomic_##op##_return(int i, atomic_t *v) \
36 " " #op " %2, %0 \n\t" /* $op */ \
47 #define ATOMIC_FETCH_OP(op) \ argument
48 static inline int atomic_fetch_##op(int i, atomic_t *v) \
59 " " #op " %3, %0 \n\t" /* $op */ \
69 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op) argument
[all …]
Datomic-irq.h13 #define ATOMIC_OP(op, c_op) \ argument
14 static inline void atomic_##op(int i, atomic_t *v) \
23 #define ATOMIC_OP_RETURN(op, c_op) \ argument
24 static inline int atomic_##op##_return(int i, atomic_t *v) \
37 #define ATOMIC_FETCH_OP(op, c_op) \ argument
38 static inline int atomic_fetch_##op(int i, atomic_t *v) \
50 #define ATOMIC_OPS(op, c_op) \ argument
51 ATOMIC_OP(op, c_op) \
52 ATOMIC_OP_RETURN(op, c_op) \
53 ATOMIC_FETCH_OP(op, c_op)
[all …]
/arch/arm/probes/kprobes/
Dopt-arm.c119 int arch_check_optimized_kprobe(struct optimized_kprobe *op) in arch_check_optimized_kprobe() argument
140 __arch_remove_optimized_kprobe(struct optimized_kprobe *op, int dirty) in __arch_remove_optimized_kprobe() argument
142 if (op->optinsn.insn) { in __arch_remove_optimized_kprobe()
143 free_optinsn_slot(op->optinsn.insn, dirty); in __arch_remove_optimized_kprobe()
144 op->optinsn.insn = NULL; in __arch_remove_optimized_kprobe()
151 optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs) in optimized_callback() argument
154 struct kprobe *p = &op->kp; in optimized_callback()
158 regs->ARM_pc = (unsigned long)op->kp.addr; in optimized_callback()
165 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
167 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
[all …]
/arch/x86/kernel/kprobes/
Dopt.c36 struct optimized_kprobe *op; in __recover_optprobed_insn() local
45 op = container_of(kp, struct optimized_kprobe, kp); in __recover_optprobed_insn()
47 if (list_empty(&op->list)) in __recover_optprobed_insn()
65 memcpy(buf + 1, op->optinsn.copied_insn, RELATIVE_ADDR_SIZE); in __recover_optprobed_insn()
68 memcpy(buf, op->optinsn.copied_insn + offs, RELATIVE_ADDR_SIZE - offs); in __recover_optprobed_insn()
148 optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs) in optimized_callback() argument
151 if (kprobe_disabled(&op->kp)) in optimized_callback()
156 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
165 regs->ip = (unsigned long)op->kp.addr + INT3_SIZE; in optimized_callback()
168 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
[all …]
/arch/powerpc/kvm/
Demulate_loadstore.c79 struct instruction_op op; in kvmppc_emulate_loadstore() local
103 if (analyse_instr(&op, &vcpu->arch.regs, inst) == 0) { in kvmppc_emulate_loadstore()
104 int type = op.type & INSTR_TYPE_MASK; in kvmppc_emulate_loadstore()
105 int size = GETSIZE(op.type); in kvmppc_emulate_loadstore()
109 int instr_byte_swap = op.type & BYTEREV; in kvmppc_emulate_loadstore()
111 if (op.type & SIGNEXT) in kvmppc_emulate_loadstore()
113 op.reg, size, !instr_byte_swap); in kvmppc_emulate_loadstore()
116 op.reg, size, !instr_byte_swap); in kvmppc_emulate_loadstore()
118 if ((op.type & UPDATE) && (emulated != EMULATE_FAIL)) in kvmppc_emulate_loadstore()
119 kvmppc_set_gpr(vcpu, op.update_reg, op.ea); in kvmppc_emulate_loadstore()
[all …]
/arch/sparc/lib/
Datomic_64.S21 #define ATOMIC_OP(op) \ argument
22 ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
25 op %g1, %o0, %g7; \
33 ENDPROC(atomic_##op); \
34 EXPORT_SYMBOL(atomic_##op);
36 #define ATOMIC_OP_RETURN(op) \ argument
37 ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
40 op %g1, %o0, %g7; \
44 op %g1, %o0, %g1; \
48 ENDPROC(atomic_##op##_return); \
[all …]
/arch/powerpc/kernel/
Doptprobes.c66 struct instruction_op op; in can_optimize() local
103 analyse_instr(&op, &regs, *p->ainsn.insn) == 1) { in can_optimize()
104 emulate_update_regs(&regs, &op); in can_optimize()
111 static void optimized_callback(struct optimized_kprobe *op, in optimized_callback() argument
115 if (kprobe_disabled(&op->kp)) in optimized_callback()
121 kprobes_inc_nmissed_count(&op->kp); in optimized_callback()
123 __this_cpu_write(current_kprobe, &op->kp); in optimized_callback()
124 regs->nip = (unsigned long)op->kp.addr; in optimized_callback()
126 opt_pre_handler(&op->kp, regs); in optimized_callback()
134 void arch_remove_optimized_kprobe(struct optimized_kprobe *op) in arch_remove_optimized_kprobe() argument
[all …]
/arch/sparc/include/asm/
Datomic_64.h24 #define ATOMIC_OP(op) \ argument
25 void atomic_##op(int, atomic_t *); \
26 void atomic64_##op(s64, atomic64_t *);
28 #define ATOMIC_OP_RETURN(op) \ argument
29 int atomic_##op##_return(int, atomic_t *); \
30 s64 atomic64_##op##_return(s64, atomic64_t *);
32 #define ATOMIC_FETCH_OP(op) \ argument
33 int atomic_fetch_##op(int, atomic_t *); \
34 s64 atomic64_fetch_##op(s64, atomic64_t *);
36 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op) argument
[all …]
/arch/hexagon/include/asm/
Datomic.h85 #define ATOMIC_OP(op) \ argument
86 static inline void atomic_##op(int i, atomic_t *v) \
92 " %0 = "#op "(%0,%2);\n" \
101 #define ATOMIC_OP_RETURN(op) \ argument
102 static inline int atomic_##op##_return(int i, atomic_t *v) \
108 " %0 = "#op "(%0,%2);\n" \
118 #define ATOMIC_FETCH_OP(op) \ argument
119 static inline int atomic_fetch_##op(int i, atomic_t *v) \
125 " %1 = "#op "(%0,%3);\n" \
135 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op) argument
[all …]
/arch/alpha/include/asm/
Datomic.h42 #define ATOMIC_OP(op, asm_op) \ argument
43 static __inline__ void atomic_##op(int i, atomic_t * v) \
58 #define ATOMIC_OP_RETURN(op, asm_op) \ argument
59 static inline int atomic_##op##_return_relaxed(int i, atomic_t *v) \
77 #define ATOMIC_FETCH_OP(op, asm_op) \ argument
78 static inline int atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
95 #define ATOMIC64_OP(op, asm_op) \ argument
96 static __inline__ void atomic64_##op(s64 i, atomic64_t * v) \
111 #define ATOMIC64_OP_RETURN(op, asm_op) \ argument
112 static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v) \
[all …]
/arch/x86/include/asm/
Dparavirt_types.h341 #define paravirt_type(op) \ argument
342 [paravirt_typenum] "i" (PARAVIRT_PATCH(op)), \
343 [paravirt_opptr] "i" (&(pv_ops.op))
496 #define PVOP_TEST_NULL(op) BUG_ON(pv_ops.op == NULL) argument
498 #define PVOP_TEST_NULL(op) ((void)pv_ops.op) argument
513 #define ____PVOP_CALL(rettype, op, clbr, call_clbr, extra_clbr, \ argument
518 PVOP_TEST_NULL(op); \
526 : paravirt_type(op), \
536 : paravirt_type(op), \
545 #define __PVOP_CALL(rettype, op, pre, post, ...) \ argument
[all …]
/arch/powerpc/kernel/trace/
Dftrace.c46 unsigned int op; in ftrace_call_replace() local
51 op = create_branch((unsigned int *)ip, addr, link ? 1 : 0); in ftrace_call_replace()
53 return op; in ftrace_call_replace()
98 static int is_bl_op(unsigned int op) in is_bl_op() argument
100 return (op & 0xfc000003) == 0x48000001; in is_bl_op()
103 static int is_b_op(unsigned int op) in is_b_op() argument
105 return (op & 0xfc000003) == 0x48000000; in is_b_op()
108 static unsigned long find_bl_target(unsigned long ip, unsigned int op) in find_bl_target() argument
112 offset = (op & 0x03fffffc); in find_bl_target()
128 unsigned int op, pop; in __ftrace_make_nop() local
[all …]
/arch/arc/include/asm/
Datomic.h27 #define ATOMIC_OP(op, c_op, asm_op) \ argument
28 static inline void atomic_##op(int i, atomic_t *v) \
43 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument
44 static inline int atomic_##op##_return(int i, atomic_t *v) \
69 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ argument
70 static inline int atomic_fetch_##op(int i, atomic_t *v) \
132 #define ATOMIC_OP(op, c_op, asm_op) \ argument
133 static inline void atomic_##op(int i, atomic_t *v) \
142 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument
143 static inline int atomic_##op##_return(int i, atomic_t *v) \
[all …]
Dbitops.h28 #define BIT_OP(op, c_op, asm_op) \ argument
29 static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
59 #define TEST_N_BIT_OP(op, c_op, asm_op) \ argument
60 static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
106 #define BIT_OP(op, c_op, asm_op) \ argument
107 static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
123 #define TEST_N_BIT_OP(op, c_op, asm_op) \ argument
124 static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
141 #define BIT_OP(op, c_op, asm_op) \ argument
142 static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
[all …]
/arch/sparc/kernel/
Dof_device_32.c246 static void __init build_device_resources(struct platform_device *op, in build_device_resources() argument
260 bus->count_cells(op->dev.of_node, &na, &ns); in build_device_resources()
262 preg = of_get_property(op->dev.of_node, bus->addr_prop_name, &num_reg); in build_device_resources()
272 op->resource = op->archdata.resource; in build_device_resources()
273 op->num_resources = num_reg; in build_device_resources()
275 struct resource *r = &op->resource[index]; in build_device_resources()
278 struct device_node *dp = op->dev.of_node; in build_device_resources()
328 op->dev.of_node, index, in build_device_resources()
336 r->name = op->dev.of_node->full_name; in build_device_resources()
343 struct platform_device *op = kzalloc(sizeof(*op), GFP_KERNEL); in scan_one_device() local
[all …]
Dof_device_common.c18 struct platform_device *op = of_find_device_by_node(node); in irq_of_parse_and_map() local
20 if (!op || index >= op->archdata.num_irqs) in irq_of_parse_and_map()
23 return op->archdata.irqs[index]; in irq_of_parse_and_map()
30 struct platform_device *op = of_find_device_by_node(node); in of_address_to_resource() local
32 if (!op || index >= op->num_resources) in of_address_to_resource()
35 memcpy(r, &op->archdata.resource[index], sizeof(*r)); in of_address_to_resource()
42 struct platform_device *op = of_find_device_by_node(node); in of_iomap() local
45 if (!op || index >= op->num_resources) in of_iomap()
48 r = &op->archdata.resource[index]; in of_iomap()
64 struct platform_device *op = of_find_device_by_node(dp); in of_propagate_archdata() local
[all …]
/arch/csky/include/asm/
Datomic.h38 #define ATOMIC_OP(op, c_op) \ argument
39 static inline void atomic_##op(int i, atomic_t *v) \
45 " " #op " %0, %1 \n" \
53 #define ATOMIC_OP_RETURN(op, c_op) \ argument
54 static inline int atomic_##op##_return(int i, atomic_t *v) \
61 " " #op " %0, %2 \n" \
73 #define ATOMIC_FETCH_OP(op, c_op) \ argument
74 static inline int atomic_fetch_##op(int i, atomic_t *v) \
82 " " #op " %0, %2 \n" \
121 #define ATOMIC_OP(op, c_op) \ argument
[all …]
/arch/parisc/include/asm/
Datomic.h80 #define ATOMIC_OP(op, c_op) \ argument
81 static __inline__ void atomic_##op(int i, atomic_t *v) \
90 #define ATOMIC_OP_RETURN(op, c_op) \ argument
91 static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
103 #define ATOMIC_FETCH_OP(op, c_op) \ argument
104 static __inline__ int atomic_fetch_##op(int i, atomic_t *v) \
117 #define ATOMIC_OPS(op, c_op) \ argument
118 ATOMIC_OP(op, c_op) \
119 ATOMIC_OP_RETURN(op, c_op) \
120 ATOMIC_FETCH_OP(op, c_op)
[all …]
/arch/mips/mm/
Duasm.c152 #define I_u1u2u3(op) \ argument
153 Ip_u1u2u3(op) \
155 build_insn(buf, insn##op, a, b, c); \
157 UASM_EXPORT_SYMBOL(uasm_i##op);
159 #define I_s3s1s2(op) \ argument
160 Ip_s3s1s2(op) \
162 build_insn(buf, insn##op, b, c, a); \
164 UASM_EXPORT_SYMBOL(uasm_i##op);
166 #define I_u2u1u3(op) \ argument
167 Ip_u2u1u3(op) \
[all …]
/arch/mips/include/asm/
Datomic.h55 #define ATOMIC_OP(op, c_op, asm_op) \ argument
56 static __inline__ void atomic_##op(int i, atomic_t * v) \
65 "1: ll %0, %1 # atomic_" #op " \n" \
81 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument
82 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
93 "1: ll %1, %2 # atomic_" #op "_return \n" \
115 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ argument
116 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
127 "1: ll %1, %2 # atomic_fetch_" #op " \n" \
148 #define ATOMIC_OPS(op, c_op, asm_op) \ argument
[all …]
/arch/arm/include/asm/
Datomic.h38 #define ATOMIC_OP(op, c_op, asm_op) \ argument
39 static inline void atomic_##op(int i, atomic_t *v) \
45 __asm__ __volatile__("@ atomic_" #op "\n" \
56 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ argument
57 static inline int atomic_##op##_return_relaxed(int i, atomic_t *v) \
64 __asm__ __volatile__("@ atomic_" #op "_return\n" \
77 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ argument
78 static inline int atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
85 __asm__ __volatile__("@ atomic_fetch_" #op "\n" \
164 #define ATOMIC_OP(op, c_op, asm_op) \ argument
[all …]

12345678910>>...12