/arch/mips/ralink/ |
D | timer.c | 40 static inline void rt_timer_w32(struct rt_timer *rt, u8 reg, u32 val) in rt_timer_w32() argument 42 __raw_writel(val, rt->membase + reg); in rt_timer_w32() 45 static inline u32 rt_timer_r32(struct rt_timer *rt, u8 reg) in rt_timer_r32() argument 47 return __raw_readl(rt->membase + reg); in rt_timer_r32() 52 struct rt_timer *rt = (struct rt_timer *) _rt; in rt_timer_irq() local 54 rt_timer_w32(rt, TIMER_REG_TMR0LOAD, rt->timer_freq / rt->timer_div); in rt_timer_irq() 55 rt_timer_w32(rt, TIMER_REG_TMRSTAT, TMRSTAT_TMR0INT); in rt_timer_irq() 61 static int rt_timer_request(struct rt_timer *rt) in rt_timer_request() argument 63 int err = request_irq(rt->irq, rt_timer_irq, 0, in rt_timer_request() 64 dev_name(rt->dev), rt); in rt_timer_request() [all …]
|
/arch/mips/cavium-octeon/crypto/ |
D | octeon-crypto.h | 36 : [rt] "d" (cpu_to_be64(value))); \ 48 : [rt] "=d" (__value) \ 62 : [rt] "d" (cpu_to_be64(value))); \ 73 : [rt] "d" (cpu_to_be64(value))); \ 84 : [rt] "d" (value)); \ 95 : [rt] "d" (value)); \ 110 : [rt] "d" (value)); \ 122 : [rt] "=d" (__value) \ 136 : [rt] "d" (value)); \ 147 : [rt] "d" (value)); \ [all …]
|
/arch/mips/include/asm/ |
D | uasm.h | 208 # define UASM_i_ADDIU(buf, rs, rt, val) uasm_i_daddiu(buf, rs, rt, val) argument 209 # define UASM_i_ADDU(buf, rs, rt, rd) uasm_i_daddu(buf, rs, rt, rd) argument 210 # define UASM_i_LL(buf, rs, rt, off) uasm_i_lld(buf, rs, rt, off) argument 211 # define UASM_i_LW(buf, rs, rt, off) uasm_i_ld(buf, rs, rt, off) argument 212 # define UASM_i_LWX(buf, rs, rt, rd) uasm_i_ldx(buf, rs, rt, rd) argument 213 # define UASM_i_MFC0(buf, rt, rd...) uasm_i_dmfc0(buf, rt, rd) argument 214 # define UASM_i_MTC0(buf, rt, rd...) uasm_i_dmtc0(buf, rt, rd) argument 215 # define UASM_i_ROTR(buf, rs, rt, sh) uasm_i_drotr(buf, rs, rt, sh) argument 216 # define UASM_i_SC(buf, rs, rt, off) uasm_i_scd(buf, rs, rt, off) argument 217 # define UASM_i_SLL(buf, rs, rt, sh) uasm_i_dsll(buf, rs, rt, sh) argument [all …]
|
D | asm.h | 180 #define MOVN(rd, rs, rt) \ argument 183 beqz rt, 9f; \ 187 #define MOVZ(rd, rs, rt) \ argument 190 bnez rt, 9f; \ 196 #define MOVN(rd, rs, rt) \ argument 199 bnezl rt, 9f; \ 203 #define MOVZ(rd, rs, rt) \ argument 206 beqzl rt, 9f; \ 213 #define MOVN(rd, rs, rt) \ argument 214 movn rd, rs, rt [all …]
|
D | mipsmtregs.h | 284 #define mftc0(rt,sel) \ argument 292 " # mftc0 $1, $" #rt ", " #sel " \n" \ 293 " .word 0x41000800 | (" #rt " << 16) | " #sel " \n" \ 301 #define mftgpr(rt) \ argument 309 " # mftgpr $1," #rt " \n" \ 310 " .word 0x41000820 | (" #rt " << 16) \n" \ 318 #define mftr(rt, u, sel) \ argument 323 " mftr %0, " #rt ", " #u ", " #sel " \n" \
|
/arch/arm/mm/ |
D | cache-v7m.S | 25 .macro v7m_cache_read, rt, reg 26 movw \rt, #:lower16:BASEADDR_V7M_SCB + \reg 27 movt \rt, #:upper16:BASEADDR_V7M_SCB + \reg 28 ldr \rt, [\rt] 31 .macro v7m_cacheop, rt, tmp, op, c = al 34 str\c \rt, [\tmp] 38 .macro read_ccsidr, rt argument 39 v7m_cache_read \rt, V7M_SCB_CCSIDR 42 .macro read_clidr, rt argument 43 v7m_cache_read \rt, V7M_SCB_CLIDR [all …]
|
/arch/powerpc/kvm/ |
D | emulate_loadstore.c | 54 int ra, rs, rt; in kvmppc_emulate_loadstore() local 67 rt = get_rt(inst); in kvmppc_emulate_loadstore() 73 emulated = kvmppc_handle_load(run, vcpu, rt, 4, 1); in kvmppc_emulate_loadstore() 77 emulated = kvmppc_handle_load(run, vcpu, rt, 1, 1); in kvmppc_emulate_loadstore() 81 emulated = kvmppc_handle_load(run, vcpu, rt, 1, 1); in kvmppc_emulate_loadstore() 105 emulated = kvmppc_handle_loads(run, vcpu, rt, 2, 1); in kvmppc_emulate_loadstore() 109 emulated = kvmppc_handle_load(run, vcpu, rt, 2, 1); in kvmppc_emulate_loadstore() 113 emulated = kvmppc_handle_load(run, vcpu, rt, 2, 1); in kvmppc_emulate_loadstore() 141 emulated = kvmppc_handle_load(run, vcpu, rt, 4, 0); in kvmppc_emulate_loadstore() 151 emulated = kvmppc_handle_load(run, vcpu, rt, 2, 0); in kvmppc_emulate_loadstore() [all …]
|
D | emulate.c | 146 static int kvmppc_emulate_mfspr(struct kvm_vcpu *vcpu, int sprn, int rt) in kvmppc_emulate_mfspr() argument 204 kvmppc_set_gpr(vcpu, rt, spr_val); in kvmppc_emulate_mfspr() 215 int rs, rt, sprn; in kvmppc_emulate_instruction() local 229 rt = get_rt(inst); in kvmppc_emulate_instruction() 261 emulated = kvmppc_emulate_mfspr(vcpu, sprn, rt); in kvmppc_emulate_instruction()
|
/arch/mips/kernel/ |
D | mips-r2-to-r6-emul.c | 403 s32 rt, rs; in mult_func() local 405 rt = regs->regs[MIPSInst_RT(ir)]; in mult_func() 407 res = (s64)rt * (s64)rs; in mult_func() 411 rt = res >> 32; in mult_func() 412 res = (s64)rt; in mult_func() 430 u32 rt, rs; in multu_func() local 432 rt = regs->regs[MIPSInst_RT(ir)]; in multu_func() 434 res = (u64)rt * (u64)rs; in multu_func() 435 rt = res; in multu_func() 436 regs->lo = (s64)(s32)rt; in multu_func() [all …]
|
D | branch.c | 78 if (insn.mm_i_format.rt != 0) /* Not mm_jr */ in __mm_isBranchInstr() 79 regs->regs[insn.mm_i_format.rt] = in __mm_isBranchInstr() 89 switch (insn.mm_i_format.rt) { in __mm_isBranchInstr() 172 switch (insn.mm_i_format.rt) { in __mm_isBranchInstr() 207 regs->regs[insn.mm_i_format.rt]) in __mm_isBranchInstr() 218 regs->regs[insn.mm_i_format.rt]) in __mm_isBranchInstr() 446 switch (insn.i_format.rt) { in __compute_return_epc_for_insn() 453 if (insn.i_format.rt == bltzl_op) in __compute_return_epc_for_insn() 466 if (insn.i_format.rt == bgezl_op) in __compute_return_epc_for_insn() 476 insn.i_format.rt == bltzall_op)) in __compute_return_epc_for_insn() [all …]
|
D | rtlx.c | 284 struct rtlx_channel *rt; in rtlx_write() local 292 rt = &rtlx->channel[index]; in rtlx_write() 296 rt_read = rt->rt_read; in rtlx_write() 299 count = min_t(size_t, count, write_spacefree(rt_read, rt->rt_write, in rtlx_write() 300 rt->buffer_size)); in rtlx_write() 303 fl = min(count, (size_t) rt->buffer_size - rt->rt_write); in rtlx_write() 305 failed = copy_from_user(rt->rt_buffer + rt->rt_write, buffer, fl); in rtlx_write() 311 failed = copy_from_user(rt->rt_buffer, buffer + fl, count - fl); in rtlx_write() 317 rt->rt_write = (rt->rt_write + count) % rt->buffer_size; in rtlx_write()
|
D | unaligned.c | 963 regs->regs[insn.spec3_format.rt] = value; in emulate_load_store_insn() 976 regs->regs[insn.spec3_format.rt] = value; in emulate_load_store_insn() 989 regs->regs[insn.spec3_format.rt] = value; in emulate_load_store_insn() 997 value = regs->regs[insn.spec3_format.rt]; in emulate_load_store_insn() 1010 value = regs->regs[insn.spec3_format.rt]; in emulate_load_store_insn() 1040 regs->regs[insn.i_format.rt] = value; in emulate_load_store_insn() 1059 regs->regs[insn.i_format.rt] = value; in emulate_load_store_insn() 1078 regs->regs[insn.i_format.rt] = value; in emulate_load_store_insn() 1097 regs->regs[insn.i_format.rt] = value; in emulate_load_store_insn() 1120 regs->regs[insn.i_format.rt] = value; in emulate_load_store_insn() [all …]
|
/arch/arm/kvm/ |
D | mmio.c | 117 vcpu_set_reg(vcpu, vcpu->arch.mmio_decode.rt, data); in kvm_handle_mmio_return() 125 unsigned long rt; in decode_hsr() local 141 rt = kvm_vcpu_dabt_get_rd(vcpu); in decode_hsr() 145 vcpu->arch.mmio_decode.rt = rt; in decode_hsr() 159 unsigned long rt; in io_mem_abort() local 179 rt = vcpu->arch.mmio_decode.rt; in io_mem_abort() 182 data = vcpu_data_guest_to_host(vcpu, vcpu_get_reg(vcpu, rt), in io_mem_abort()
|
/arch/arm/net/ |
D | bpf_jit_32.h | 158 #define ARM_LDR_I(rt, rn, off) (ARM_INST_LDR_I | (rt) << 12 | (rn) << 16 \ argument 160 #define ARM_LDRB_I(rt, rn, off) (ARM_INST_LDRB_I | (rt) << 12 | (rn) << 16 \ argument 162 #define ARM_LDRB_R(rt, rn, rm) (ARM_INST_LDRB_R | (rt) << 12 | (rn) << 16 \ argument 164 #define ARM_LDRH_I(rt, rn, off) (ARM_INST_LDRH_I | (rt) << 12 | (rn) << 16 \ argument 166 #define ARM_LDRH_R(rt, rn, rm) (ARM_INST_LDRH_R | (rt) << 12 | (rn) << 16 \ argument 204 #define ARM_STR_I(rt, rn, off) (ARM_INST_STR_I | (rt) << 12 | (rn) << 16 \ argument
|
/arch/powerpc/kernel/ |
D | kvm.c | 86 static void kvm_patch_ins_ll(u32 *inst, long addr, u32 rt) in kvm_patch_ins_ll() argument 89 kvm_patch_ins(inst, KVM_INST_LD | rt | (addr & 0x0000fffc)); in kvm_patch_ins_ll() 91 kvm_patch_ins(inst, KVM_INST_LWZ | rt | (addr & 0x0000fffc)); in kvm_patch_ins_ll() 95 static void kvm_patch_ins_ld(u32 *inst, long addr, u32 rt) in kvm_patch_ins_ld() argument 98 kvm_patch_ins(inst, KVM_INST_LD | rt | (addr & 0x0000fffc)); in kvm_patch_ins_ld() 100 kvm_patch_ins(inst, KVM_INST_LWZ | rt | ((addr + 4) & 0x0000fffc)); in kvm_patch_ins_ld() 104 static void kvm_patch_ins_lwz(u32 *inst, long addr, u32 rt) in kvm_patch_ins_lwz() argument 106 kvm_patch_ins(inst, KVM_INST_LWZ | rt | (addr & 0x0000ffff)); in kvm_patch_ins_lwz() 109 static void kvm_patch_ins_std(u32 *inst, long addr, u32 rt) in kvm_patch_ins_std() argument 112 kvm_patch_ins(inst, KVM_INST_STD | rt | (addr & 0x0000fffc)); in kvm_patch_ins_std() [all …]
|
/arch/x86/pci/ |
D | irq.c | 66 struct irq_routing_table *rt; in pirq_check_routing_table() local 70 rt = (struct irq_routing_table *) addr; in pirq_check_routing_table() 71 if (rt->signature != PIRQ_SIGNATURE || in pirq_check_routing_table() 72 rt->version != PIRQ_VERSION || in pirq_check_routing_table() 73 rt->size % 16 || in pirq_check_routing_table() 74 rt->size < sizeof(struct irq_routing_table)) in pirq_check_routing_table() 77 for (i = 0; i < rt->size; i++) in pirq_check_routing_table() 81 rt); in pirq_check_routing_table() 82 return rt; in pirq_check_routing_table() 96 struct irq_routing_table *rt; in pirq_find_routing_table() local [all …]
|
D | pcbios.c | 353 struct irq_routing_table *rt = NULL; in pcibios_get_irq_routing_table() local 388 rt = kmalloc(sizeof(struct irq_routing_table) + opt.size, GFP_KERNEL); in pcibios_get_irq_routing_table() 389 if (rt) { in pcibios_get_irq_routing_table() 390 memset(rt, 0, sizeof(struct irq_routing_table)); in pcibios_get_irq_routing_table() 391 rt->size = opt.size + sizeof(struct irq_routing_table); in pcibios_get_irq_routing_table() 392 rt->exclusive_irqs = map; in pcibios_get_irq_routing_table() 393 memcpy(rt->slots, (void *) page, opt.size); in pcibios_get_irq_routing_table() 398 return rt; in pcibios_get_irq_routing_table()
|
/arch/mips/kvm/ |
D | dyntrans.c | 78 synci_inst.i_format.rt = synci_op; in kvm_mips_trans_cache_va() 98 mfc0_inst.r_format.rd = inst.c0r_format.rt; in kvm_mips_trans_mfc0() 102 mfc0_inst.i_format.rt = inst.c0r_format.rt; in kvm_mips_trans_mfc0() 124 mtc0_inst.i_format.rt = inst.c0r_format.rt; in kvm_mips_trans_mtc0()
|
D | emulate.c | 78 switch (insn.i_format.rt) { in kvm_compute_return_epc() 146 arch->gprs[insn.i_format.rt]) in kvm_compute_return_epc() 156 arch->gprs[insn.i_format.rt]) in kvm_compute_return_epc() 167 if (insn.i_format.rt != 0) in kvm_compute_return_epc() 180 if (insn.i_format.rt != 0) in kvm_compute_return_epc() 199 if (insn.i_format.rt != 0) in kvm_compute_return_epc() 205 if (insn.i_format.rs != 0 || insn.i_format.rt != 0) in kvm_compute_return_epc() 1060 u32 rt, rd, sel; in kvm_mips_emulate_CP0() local 1098 rt = inst.c0r_format.rt; in kvm_mips_emulate_CP0() 1109 vcpu->arch.gprs[rt] = in kvm_mips_emulate_CP0() [all …]
|
/arch/arm/probes/kprobes/ |
D | actions-arm.c | 81 int rt = (insn >> 12) & 0xf; in emulate_ldrdstrd() local 85 register unsigned long rtv asm("r0") = regs->uregs[rt]; in emulate_ldrdstrd() 86 register unsigned long rt2v asm("r1") = regs->uregs[rt+1]; in emulate_ldrdstrd() 99 regs->uregs[rt] = rtv; in emulate_ldrdstrd() 100 regs->uregs[rt+1] = rt2v; in emulate_ldrdstrd() 110 int rt = (insn >> 12) & 0xf; in emulate_ldr() local 126 if (rt == 15) in emulate_ldr() 129 regs->uregs[rt] = rtv; in emulate_ldr() 141 int rt = (insn >> 12) & 0xf; in emulate_str() local 145 register unsigned long rtv asm("r0") = (rt == 15) ? rtpc in emulate_str() [all …]
|
D | actions-thumb.c | 113 int rt = (insn >> 12) & 0xf; in t32_simulate_ldr_literal() local 125 if (rt == 15) { in t32_simulate_ldr_literal() 143 regs->uregs[rt] = rtv; in t32_simulate_ldr_literal() 191 int rt = (insn >> 12) & 0xf; in t32_emulate_ldrstr() local 195 register unsigned long rtv asm("r0") = regs->uregs[rt]; in t32_emulate_ldrstr() 207 if (rt == 15) /* Can't be true for a STR as they aren't allowed */ in t32_emulate_ldrstr() 210 regs->uregs[rt] = rtv; in t32_emulate_ldrstr() 328 int rt = (insn >> 8) & 0x7; in t16_simulate_ldr_literal() local 329 regs->uregs[rt] = base[index]; in t16_simulate_ldr_literal() 338 int rt = (insn >> 8) & 0x7; in t16_simulate_ldrstr_sp_relative() local [all …]
|
/arch/arm/lib/ |
D | ecard.S | 16 #define CPSR2SPSR(rt) \ argument 17 mrs rt, cpsr; \ 18 msr spsr_cxsf, rt
|
/arch/powerpc/include/asm/book3s/64/ |
D | mmu-hash.h | 567 #define ASM_VSID_SCRAMBLE(rt, rx, size) \ argument 570 mulld rt,rt,rx; /* rt = rt * MULTIPLIER */ \ 572 srdi rx,rt,VSID_BITS_##size; \ 573 clrldi rt,rt,(64-VSID_BITS_##size); \ 574 add rt,rt,rx; /* add high and low bits */ \ 582 addi rx,rt,1; \ 584 add rt,rt,rx
|
/arch/mips/include/uapi/asm/ |
D | inst.h | 626 __BITFIELD_FIELD(unsigned int rt : 5, 634 __BITFIELD_FIELD(unsigned int rt : 5, 651 __BITFIELD_FIELD(unsigned int rt : 5, 661 __BITFIELD_FIELD(unsigned int rt : 5, 671 __BITFIELD_FIELD(unsigned int rt : 5, 691 __BITFIELD_FIELD(unsigned int rt : 5, 702 __BITFIELD_FIELD(unsigned int rt : 5, 761 __BITFIELD_FIELD(unsigned int rt:5, 807 __BITFIELD_FIELD(unsigned int rt : 5, 816 __BITFIELD_FIELD(unsigned int rt : 5, [all …]
|
/arch/arm64/kernel/ |
D | traps.c | 466 int rt = (esr & ESR_ELx_SYS64_ISS_RT_MASK) >> ESR_ELx_SYS64_ISS_RT_SHIFT; in user_cache_maint_handler() local 470 address = (rt == 31) ? 0 : untagged_addr(regs->regs[rt]); in user_cache_maint_handler() 498 int rt = (esr & ESR_ELx_SYS64_ISS_RT_MASK) >> ESR_ELx_SYS64_ISS_RT_SHIFT; in ctr_read_handler() local 500 regs->regs[rt] = arm64_ftr_reg_ctrel0.sys_val; in ctr_read_handler() 506 int rt = (esr & ESR_ELx_SYS64_ISS_RT_MASK) >> ESR_ELx_SYS64_ISS_RT_SHIFT; in cntvct_read_handler() local 509 if (rt != 31) in cntvct_read_handler() 510 regs->regs[rt] = arch_counter_get_cntvct(); in cntvct_read_handler() 516 int rt = (esr & ESR_ELx_SYS64_ISS_RT_MASK) >> ESR_ELx_SYS64_ISS_RT_SHIFT; in cntfrq_read_handler() local 518 if (rt != 31) in cntfrq_read_handler() 519 regs->regs[rt] = read_sysreg(cntfrq_el0); in cntfrq_read_handler()
|