/arch/nios2/include/asm/ |
D | asm-macros.h | 30 .macro ANDI32 reg1, reg2, mask 33 movhi \reg1, %hi(\mask) 34 movui \reg1, %lo(\mask) 35 and \reg1, \reg1, \reg2 37 andi \reg1, \reg2, %lo(\mask) 40 andhi \reg1, \reg2, %hi(\mask) 50 .macro ORI32 reg1, reg2, mask 53 orhi \reg1, \reg2, %hi(\mask) 54 ori \reg1, \reg2, %lo(\mask) 56 ori \reg1, \reg2, %lo(\mask) [all …]
|
/arch/arm/probes/kprobes/ |
D | test-core.h | 241 #define TEST_RR(code1, reg1, val1, code2, reg2, val2, code3) \ argument 242 TESTCASE_START(code1 #reg1 code2 #reg2 code3) \ 243 TEST_ARG_REG(reg1, val1) \ 246 TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3) \ 249 #define TEST_RRR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4)\ argument 250 TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ 251 TEST_ARG_REG(reg1, val1) \ 255 TEST_INSTRUCTION(code1 #reg1 code2 #reg2 code3 #reg3 code4) \ 258 #define TEST_RRRR(code1, reg1, val1, code2, reg2, val2, code3, reg3, val3, code4, reg4, val4) \ argument 259 TESTCASE_START(code1 #reg1 code2 #reg2 code3 #reg3 code4 #reg4) \ [all …]
|
/arch/arm/lib/ |
D | csumpartialcopy.S | 28 .macro load1b, reg1 argument 29 ldrb \reg1, [r0], #1 32 .macro load2b, reg1, reg2 33 ldrb \reg1, [r0], #1 37 .macro load1l, reg1 argument 38 ldr \reg1, [r0], #4 41 .macro load2l, reg1, reg2 42 ldr \reg1, [r0], #4 46 .macro load4l, reg1, reg2, reg3, reg4 47 ldmia r0!, {\reg1, \reg2, \reg3, \reg4}
|
D | memcpy.S | 24 .macro ldr4w ptr reg1 reg2 reg3 reg4 abort 25 ldmia \ptr!, {\reg1, \reg2, \reg3, \reg4} 28 .macro ldr8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 29 ldmia \ptr!, {\reg1, \reg2, \reg3, \reg4, \reg5, \reg6, \reg7, \reg8} 40 .macro str8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 41 stmia \ptr!, {\reg1, \reg2, \reg3, \reg4, \reg5, \reg6, \reg7, \reg8} 48 .macro enter reg1 reg2 49 stmdb sp!, {r0, \reg1, \reg2} 52 .macro usave reg1 reg2 53 UNWIND( .save {r0, \reg1, \reg2} ) [all …]
|
D | copy_to_user.S | 48 .macro ldr4w ptr reg1 reg2 reg3 reg4 abort 49 ldmia \ptr!, {\reg1, \reg2, \reg3, \reg4} 52 .macro ldr8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 53 ldmia \ptr!, {\reg1, \reg2, \reg3, \reg4, \reg5, \reg6, \reg7, \reg8} 64 .macro str8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 65 str1w \ptr, \reg1, \abort 79 .macro enter reg1 reg2 81 stmdb sp!, {r0, r2, r3, \reg1, \reg2} 84 .macro usave reg1 reg2 85 UNWIND( .save {r0, r2, r3, \reg1, \reg2} ) [all …]
|
D | copy_from_user.S | 48 .macro ldr4w ptr reg1 reg2 reg3 reg4 abort 49 ldr1w \ptr, \reg1, \abort 55 .macro ldr8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 56 ldr4w \ptr, \reg1, \reg2, \reg3, \reg4, \abort 68 .macro str8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 69 stmia \ptr!, {\reg1, \reg2, \reg3, \reg4, \reg5, \reg6, \reg7, \reg8} 76 .macro enter reg1 reg2 78 stmdb sp!, {r0, r2, r3, \reg1, \reg2} 81 .macro usave reg1 reg2 82 UNWIND( .save {r0, r2, r3, \reg1, \reg2} ) [all …]
|
D | csumpartialcopyuser.S | 42 .macro load1b, reg1 argument 43 ldrusr \reg1, r0, 1 46 .macro load2b, reg1, reg2 47 ldrusr \reg1, r0, 1 51 .macro load1l, reg1 argument 52 ldrusr \reg1, r0, 4 55 .macro load2l, reg1, reg2 56 ldrusr \reg1, r0, 4 60 .macro load4l, reg1, reg2, reg3, reg4 61 ldrusr \reg1, r0, 4
|
/arch/m32r/include/asm/ |
D | dcache_clear.h | 13 #define DCACHE_CLEAR(reg0, reg1, addr) \ argument 14 "seth "reg1", #high(dcache_dummy); \n\t" \ 15 "or3 "reg1", "reg1", #low(dcache_dummy); \n\t" \ 16 "lock "reg0", @"reg1"; \n\t" \ 21 "unlock "reg0", @"reg1"; \n\t" 26 #define DCACHE_CLEAR(reg0, reg1, addr) argument
|
/arch/x86/events/intel/ |
D | uncore_nhmex.c | 353 struct hw_perf_event_extra *reg1 = &hwc->extra_reg; in nhmex_bbox_hw_config() local 368 reg1->reg = NHMEX_B0_MSR_MATCH; in nhmex_bbox_hw_config() 370 reg1->reg = NHMEX_B1_MSR_MATCH; in nhmex_bbox_hw_config() 371 reg1->idx = 0; in nhmex_bbox_hw_config() 372 reg1->config = event->attr.config1; in nhmex_bbox_hw_config() 380 struct hw_perf_event_extra *reg1 = &hwc->extra_reg; in nhmex_bbox_msr_enable_event() local 383 if (reg1->idx != EXTRA_REG_NONE) { in nhmex_bbox_msr_enable_event() 384 wrmsrl(reg1->reg, reg1->config); in nhmex_bbox_msr_enable_event() 385 wrmsrl(reg1->reg + 1, reg2->config); in nhmex_bbox_msr_enable_event() 444 struct hw_perf_event_extra *reg1 = &hwc->extra_reg; in nhmex_sbox_hw_config() local [all …]
|
D | uncore_snbep.c | 489 struct hw_perf_event_extra *reg1 = &hwc->extra_reg; in snbep_uncore_msr_enable_event() local 491 if (reg1->idx != EXTRA_REG_NONE) in snbep_uncore_msr_enable_event() 492 wrmsrl(reg1->reg, uncore_shared_reg_config(box, 0)); in snbep_uncore_msr_enable_event() 780 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; in snbep_cbox_put_constraint() local 788 if (reg1->alloc & (0x1 << i)) in snbep_cbox_put_constraint() 791 reg1->alloc = 0; in snbep_cbox_put_constraint() 798 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; in __snbep_cbox_get_constraint() local 804 if (reg1->idx == EXTRA_REG_NONE) in __snbep_cbox_get_constraint() 809 if (!(reg1->idx & (0x1 << i))) in __snbep_cbox_get_constraint() 811 if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i))) in __snbep_cbox_get_constraint() [all …]
|
/arch/arm/kernel/ |
D | hyp-stub.S | 42 .macro store_primary_cpu_mode reg1, reg2, reg3 43 mrs \reg1, cpsr 44 and \reg1, \reg1, #MODE_MASK 47 str \reg1, [\reg2, \reg3] 56 .macro compare_cpu_mode_with_primary mode, reg1, reg2, reg3 59 ldr \reg1, [\reg2, \reg3] 60 cmp \mode, \reg1 @ matches primary CPU boot mode? 61 orrne \reg1, \reg1, #BOOT_CPU_MODE_MISMATCH 62 strne \reg1, [\reg2, \reg3] @ record what happened and give up 67 .macro store_primary_cpu_mode reg1:req, reg2:req, reg3:req [all …]
|
/arch/unicore32/lib/ |
D | copy_to_user.S | 40 .macro ldr4w ptr reg1 reg2 reg3 reg4 abort 41 ldm.w (\reg1, \reg2, \reg3, \reg4), [\ptr]+ 44 .macro ldr8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 45 ldm.w (\reg1, \reg2, \reg3, \reg4, \reg5, \reg6, \reg7, \reg8), [\ptr]+ 57 .macro str8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 58 100: stm.w (\reg1, \reg2, \reg3, \reg4, \reg5, \reg6, \reg7, \reg8), [\ptr]+
|
D | copy_from_user.S | 40 .macro ldr4w ptr reg1 reg2 reg3 reg4 abort 41 100: ldm.w (\reg1, \reg2, \reg3, \reg4), [\ptr]+ 48 .macro ldr8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 49 100: ldm.w (\reg1, \reg2, \reg3, \reg4, \reg5, \reg6, \reg7, \reg8), [\ptr]+ 64 .macro str8w ptr reg1 reg2 reg3 reg4 reg5 reg6 reg7 reg8 abort 65 stm.w (\reg1, \reg2, \reg3, \reg4, \reg5, \reg6, \reg7, \reg8), [\ptr]+
|
/arch/arm64/crypto/ |
D | aes-cipher-core.S | 23 .macro __pair1, sz, op, reg0, reg1, in0, in1e, in1d, shift 26 ubfiz \reg1, \in1e, #2, #8 29 ubfx \reg1, \in1e, #\shift, #8 41 ldr \reg1, [tt, \reg1, uxtw #2] 45 lsl \reg1, \reg1, #2 48 ldrb \reg1, [tt, \reg1, uxtw] 52 .macro __pair0, sz, op, reg0, reg1, in0, in1e, in1d, shift 54 ubfx \reg1, \in1d, #\shift, #8 56 ldr\op \reg1, [tt, \reg1, uxtw #\sz]
|
D | crct10dif-ce-core.S | 149 .macro fold64, reg1, reg2 152 pmull2 v8.1q, \reg1\().2d, v10.2d 153 pmull \reg1\().1q, \reg1\().1d, v10.1d 164 eor \reg1\().16b, \reg1\().16b, v8.16b 166 eor \reg1\().16b, \reg1\().16b, v11.16b
|
/arch/arm64/include/asm/ |
D | alternative.h | 228 .macro uao_ldp l, reg1, reg2, addr, post_inc 230 8888: ldp \reg1, \reg2, [\addr], \post_inc; 234 ldtr \reg1, [\addr]; 243 .macro uao_stp l, reg1, reg2, addr, post_inc 245 8888: stp \reg1, \reg2, [\addr], \post_inc; 249 sttr \reg1, [\addr]; 270 .macro uao_ldp l, reg1, reg2, addr, post_inc 271 USER(\l, ldp \reg1, \reg2, [\addr], \post_inc) 273 .macro uao_stp l, reg1, reg2, addr, post_inc 274 USER(\l, stp \reg1, \reg2, [\addr], \post_inc)
|
/arch/s390/kvm/ |
D | priv.c | 246 int reg1, reg2; in handle_iske() local 256 kvm_s390_get_regs_rre(vcpu, ®1, ®2); in handle_iske() 270 vcpu->run->s.regs.gprs[reg1] &= ~0xff; in handle_iske() 271 vcpu->run->s.regs.gprs[reg1] |= key; in handle_iske() 278 int reg1, reg2; in handle_rrbe() local 288 kvm_s390_get_regs_rre(vcpu, ®1, ®2); in handle_rrbe() 316 int reg1, reg2; in handle_sske() local 333 kvm_s390_get_regs_rre(vcpu, ®1, ®2); in handle_sske() 335 key = vcpu->run->s.regs.gprs[reg1] & 0xfe; in handle_sske() 368 vcpu->run->s.regs.gprs[reg1] &= ~0xff00UL; in handle_sske() [all …]
|
D | trace.h | 287 TP_PROTO(VCPU_PROTO_COMMON, int g, int reg1, int reg3, u64 addr), 288 TP_ARGS(VCPU_ARGS_COMMON, g, reg1, reg3, addr), 293 __field(int, reg1) 301 __entry->reg1 = reg1; 308 __entry->reg1, __entry->reg3, __entry->addr) 312 TP_PROTO(VCPU_PROTO_COMMON, int g, int reg1, int reg3, u64 addr), 313 TP_ARGS(VCPU_ARGS_COMMON, g, reg1, reg3, addr), 318 __field(int, reg1) 326 __entry->reg1 = reg1; 333 __entry->reg1, __entry->reg3, __entry->addr)
|
/arch/s390/include/asm/ |
D | sigp.h | 44 register unsigned long reg1 asm ("1") = parm; in ____pcpu_sigp() 51 : "=d" (cc), "+d" (reg1) : "d" (addr), "a" (order) : "cc"); in ____pcpu_sigp() 52 *status = reg1; in ____pcpu_sigp()
|
D | timex.h | 116 register unsigned long reg1 asm("1") = (unsigned long) (ptff_block);\ 123 : "=d" (rc), "+m" (*(struct addrtype *) reg1) \ 124 : "d" (reg0), "d" (reg1) : "cc"); \
|
D | processor.h | 310 unsigned int reg1, reg2; in __extract_psw() local 312 asm volatile("epsw %0,%1" : "=d" (reg1), "=a" (reg2)); in __extract_psw() 313 return (((unsigned long) reg1) << 32) | ((unsigned long) reg2); in __extract_psw()
|
/arch/m32r/kernel/ |
D | align.c | 278 int reg1, reg2; in emu_mul() local 280 reg1 = get_reg(regs, dest); in emu_mul() 285 : "+r" (reg1) : "r" (reg2) in emu_mul() 288 set_reg(regs, dest, reg1); in emu_mul() 295 int reg1, reg2; in emu_mullo_a0() local 297 reg1 = get_reg(regs, REG1(insn)); in emu_mullo_a0() 304 : "+r" (reg1), "+r" (reg2) in emu_mullo_a0() 307 regs->acc0h = reg1; in emu_mullo_a0() 315 int reg1, reg2; in emu_mullo_a1() local 317 reg1 = get_reg(regs, REG1(insn)); in emu_mullo_a1() [all …]
|
D | ptrace.c | 227 unsigned long reg1, reg2; in check_condition_src() local 233 reg1 = get_stack_long(child, reg_offset[regno1]); in check_condition_src() 234 return reg1 == reg2; in check_condition_src() 236 reg1 = get_stack_long(child, reg_offset[regno1]); in check_condition_src() 237 return reg1 != reg2; in check_condition_src()
|
/arch/arm/crypto/ |
D | crct10dif-ce-core.S | 169 .macro fold64, reg1, reg2 172 vmull.p64 q8, \reg1\()h, d21 173 vmull.p64 \reg1, \reg1\()l, d20 182 veor.8 \reg1, \reg1, q8 184 veor.8 \reg1, \reg1, q11
|
/arch/ia64/include/asm/native/ |
D | inst.h | 86 #define THASH(pred, reg0, reg1, clob) \ argument 87 (pred) thash reg0 = reg1
|