Home
last modified time | relevance | path

Searched refs:v0 (Results 1 – 25 of 122) sorted by relevance

12345

/arch/arm64/crypto/
Daes-ce-core.S13 ld1 {v0.16b}, [x2]
22 1: aese v0.16b, v2.16b
23 aesmc v0.16b, v0.16b
25 aese v0.16b, v3.16b
26 aesmc v0.16b, v0.16b
29 aese v0.16b, v1.16b
30 aesmc v0.16b, v0.16b
33 aese v0.16b, v2.16b
34 eor v0.16b, v0.16b, v3.16b
35 st1 {v0.16b}, [x1]
[all …]
Daes-ce-ccm-core.S20 ld1 {v0.16b}, [x0] /* load mac */
31 eor v0.16b, v0.16b, v1.16b
43 3: aese v0.16b, v4.16b
44 aesmc v0.16b, v0.16b
46 aese v0.16b, v5.16b
47 aesmc v0.16b, v0.16b
50 aese v0.16b, v3.16b
51 aesmc v0.16b, v0.16b
54 aese v0.16b, v4.16b
56 eor v0.16b, v0.16b, v5.16b /* final round */
[all …]
Daes-modes.S26 encrypt_block4x v0, v1, v2, v3, w3, x2, x8, w7
31 decrypt_block4x v0, v1, v2, v3, w3, x2, x8, w7
37 encrypt_block5x v0, v1, v2, v3, v4, w3, x2, x8, w7
42 decrypt_block5x v0, v1, v2, v3, v4, w3, x2, x8, w7
63 ld1 {v0.16b-v3.16b}, [x1], #64 /* get 4 pt blocks */
67 st1 {v0.16b-v3.16b}, [x0], #64
74 ld1 {v0.16b}, [x1], #16 /* get next pt block */
75 encrypt_block v0, w3, x2, x5, w6
76 st1 {v0.16b}, [x0], #16
94 ld1 {v0.16b-v3.16b}, [x1], #64 /* get 4 ct blocks */
[all …]
Dsm3-ce-core.S88 0: ld1 {v0.16b-v3.16b}, [x1], #64
94 CPU_LE( rev32 v0.16b, v0.16b )
101 qround a, v0, v1, v2, v3, v4
102 qround a, v1, v2, v3, v4, v0
103 qround a, v2, v3, v4, v0, v1
104 qround a, v3, v4, v0, v1, v2
108 qround b, v4, v0, v1, v2, v3
109 qround b, v0, v1, v2, v3, v4
110 qround b, v1, v2, v3, v4, v0
111 qround b, v2, v3, v4, v0, v1
[all …]
Dcrct10dif-ce-core.S281 CPU_LE( rev64 v0.16b, v0.16b )
289 CPU_LE( ext v0.16b, v0.16b, v0.16b, #8 )
301 eor v0.16b, v0.16b, v8.16b
314 fold_32_bytes \p, v0, v1
347 fold_16_bytes \p, v0, v4
371 CPU_LE( rev64 v0.16b, v0.16b )
372 CPU_LE( ext v0.16b, v0.16b, v0.16b, #8 )
373 eor v7.16b, v7.16b, v0.16b
393 CPU_LE( rev64 v0.16b, v0.16b )
394 CPU_LE( ext v0.16b, v0.16b, v0.16b, #8 )
[all …]
Dchacha-neon-core.S46 add v0.4s, v0.4s, v1.4s
47 eor v3.16b, v3.16b, v0.16b
57 add v0.4s, v0.4s, v1.4s
58 eor v3.16b, v3.16b, v0.16b
75 add v0.4s, v0.4s, v1.4s
76 eor v3.16b, v3.16b, v0.16b
86 add v0.4s, v0.4s, v1.4s
87 eor v3.16b, v3.16b, v0.16b
119 ld1 {v0.4s-v3.4s}, [x0]
127 add v0.4s, v0.4s, v8.4s
[all …]
/arch/mips/include/asm/mach-cavium-octeon/
Dkernel-entry-init.h30 dmfc0 v0, CP0_CVMMEMCTL_REG
32 dins v0, $0, 0, 6
33 ori v0, CONFIG_CAVIUM_OCTEON_CVMSEG_SIZE
34 dmtc0 v0, CP0_CVMMEMCTL_REG # Write the cavium mem control register
35 dmfc0 v0, CP0_CVMCTL_REG # Read the cavium control register
38 or v0, v0, 0x5001
39 xor v0, v0, 0x1001
43 and v0, v0, v1
44 ori v0, v0, (6 << 7)
64 or v0, v0, 0x2000 # Set IPREF bit.
[all …]
/arch/mips/lib/
Dstrncpy_user.S34 LONG_L v0, TI_ADDR_LIMIT($28) # pointer ok?
35 and v0, a1
36 bnez v0, .Lfault\@
41 1: EX(lbu, v0, (v1), .Lfault\@)
43 1: EX(lbue, v0, (v1), .Lfault\@)
47 sb v0, (a0)
48 beqz v0, 2f
52 2: PTR_ADDU v0, a1, t0
53 xor v0, a1
54 bltz v0, .Lfault\@
[all …]
Dstrnlen_user.S31 LONG_L v0, TI_ADDR_LIMIT($28) # pointer ok?
32 and v0, a0
33 bnez v0, .Lfault\@
35 move v0, a0
42 beq v0, a1, 1f # limit reached?
44 EX(lb, t0, (v0), .Lfault\@)
46 EX(lbe, t0, (v0), .Lfault\@)
52 PTR_ADDIU v0, 1
54 PTR_ADDU v0, AT
58 PTR_SUBU v0, a0
[all …]
/arch/arm64/lib/
Dxor-neon.c19 register uint64x2_t v0, v1, v2, v3; in xor_arm64_neon_2() local
24 v0 = veorq_u64(vld1q_u64(dp1 + 0), vld1q_u64(dp2 + 0)); in xor_arm64_neon_2()
30 vst1q_u64(dp1 + 0, v0); in xor_arm64_neon_2()
47 register uint64x2_t v0, v1, v2, v3; in xor_arm64_neon_3() local
52 v0 = veorq_u64(vld1q_u64(dp1 + 0), vld1q_u64(dp2 + 0)); in xor_arm64_neon_3()
58 v0 = veorq_u64(v0, vld1q_u64(dp3 + 0)); in xor_arm64_neon_3()
64 vst1q_u64(dp1 + 0, v0); in xor_arm64_neon_3()
83 register uint64x2_t v0, v1, v2, v3; in xor_arm64_neon_4() local
88 v0 = veorq_u64(vld1q_u64(dp1 + 0), vld1q_u64(dp2 + 0)); in xor_arm64_neon_4()
94 v0 = veorq_u64(v0, vld1q_u64(dp3 + 0)); in xor_arm64_neon_4()
[all …]
/arch/powerpc/crypto/
Dcrc32-vpmsum_core.S101 vspltisw v0,-1
103 vsldoi mask_32bit,zeroes,v0,4
104 vsldoi mask_64bit,zeroes,v0,8
160 vxor v0,v0,v0
259 vxor v0,v0,v8
316 vxor v0,v0,v8
350 vxor v0,v0,v8
366 vsldoi v0,v0,zeroes,4
396 vxor v16,v0,v8
425 lvx v0,0,r3
[all …]
/arch/mips/kernel/
Dscall32-o32.S88 subu v0, v0, __NR_O32_Linux # check syscall number
89 sltiu t0, v0, __NR_O32_Linux_syscalls
92 sll t0, v0, 2
102 sltu t0, t0, v0
107 negu v0 # error
109 1: sw v0, PT_R2(sp) # result
124 move a1, v0
125 subu t2, v0, __NR_O32_Linux
131 bltz v0, 1f # seccomp failed? Skip syscall
134 lw v0, PT_R2(sp) # Restore syscall (maybe modified)
[all …]
Dscall64-o32.S36 dsubu t0, v0, __NR_O32_Linux # check syscall number
43 move a1, v0
88 dsll t0, v0, 3 # offset into table
94 sltu t0, t0, v0
99 dnegu v0 # error
101 1: sd v0, PT_R2(sp) # result
125 subu t1, v0, __NR_O32_Linux
126 move a1, v0
133 bltz v0, 1f # seccomp failed? Skip syscall
136 ld v0, PT_R2(sp) # Restore syscall (maybe modified)
[all …]
Dscall64-n32.S35 dsubu t0, v0, __NR_N32_Linux # check syscall number
53 dsll t0, v0, 3 # offset into table
59 sltu t0, t0, v0
64 dnegu v0 # error
66 1: sd v0, PT_R2(sp) # result
75 move a1, v0
78 bltz v0, 1f # seccomp failed? Skip syscall
81 ld v0, PT_R2(sp) # Restore syscall (maybe modified)
89 dsubu t2, v0, __NR_N32_Linux # check (new) syscall number
Dscall64-n64.S56 dsubu t2, v0, __NR_64_Linux
69 sltu t0, t0, v0
74 dnegu v0 # error
76 1: sd v0, PT_R2(sp) # result
86 move a1, v0
89 bltz v0, 1f # seccomp failed? Skip syscall
92 ld v0, PT_R2(sp) # Restore syscall (maybe modified)
105 li v0, ENOSYS # error
106 sd v0, PT_R2(sp)
Dbmips_5xxx_init.S139 li v0, 0x40
140 sllv v0, v0, a0
168 sll v0, v0, a0
188 multu v0, a0 /*multu is interlocked, so no need to insert nops */
189 mflo v0
194 move v0, zero
232 li v0, 0x40
233 sllv v0, v0, a0
260 sll v0, v0, a0
279 multu v0, a0 /*multu is interlocked, so no need to insert nops */
[all …]
/arch/mips/include/asm/mach-malta/
Dkernel-entry-init.h113 PTR_LA v0, 0x9fc00534 /* YAMON print */
114 lw v0, (v0)
117 jal v0
119 PTR_LA v0, 0x9fc00520 /* YAMON exit */
120 lw v0, (v0)
122 jal v0
/arch/alpha/include/asm/
Dpal.h123 register unsigned long v0 __asm__("$0"); in qemu_get_walltime()
127 : "=r"(v0), "+r"(a0) in qemu_get_walltime()
131 return v0; in qemu_get_walltime()
137 register unsigned long v0 __asm__("$0"); in qemu_get_alarm()
141 : "=r"(v0), "+r"(a0) in qemu_get_alarm()
145 return v0; in qemu_get_alarm()
175 register unsigned long v0 __asm__("$0"); in qemu_get_vmtime()
179 : "=r"(v0), "+r"(a0) in qemu_get_vmtime()
183 return v0; in qemu_get_vmtime()
/arch/alpha/lib/
Dstrchr.S25 andnot a0, 7, v0 # .. e1 : align our loop pointer
41 $loop: ldq t0, 8(v0) # e0 :
42 addq v0, 8, v0 # .. e1 :
63 addq v0, t4, v0 # .. e1 :
64 addq v0, t2, v0 # e0 :
68 mov zero, v0 # e0 :
Dstrrchr.S29 andnot a0, 7, v0 # .. e1 : align source addr
45 ldq t0, 8(v0) # e0 : load next quadword
46 cmovne t3, v0, t6 # .. e1 : save previous comparisons match
48 addq v0, 8, v0 # .. e1 :
63 cmovne t3, v0, t6 # e0 :
80 addq t6, t0, v0 # .. e1 : add our aligned base ptr to the mix
81 addq v0, t1, v0 # e0 :
85 mov zero, v0 # e0 :
Dev67-strchr.S39 andnot a0, 7, v0 # E : align our loop pointer
69 $loop: ldq t0, 8(v0) # L : Latency=3
70 addq v0, 8, v0 # E :
82 addq v0, a2, v0 # E : Add in the bit number from above
84 cmoveq t1, $31, v0 # E : Two mapping slots, latency = 2
/arch/ia64/include/asm/
Dpal.h781 u64 v0; member
889 features_avail->pal_bus_features_val = iprv.v0; in ia64_pal_bus_get_features()
916 conf->pcci_info_1.pcci1_data = iprv.v0; in ia64_pal_cache_config_info()
934 prot->pcp_info[0].pcpi_data = iprv.v0 & 0xffffffff; in ia64_pal_cache_prot_info()
935 prot->pcp_info[1].pcpi_data = iprv.v0 >> 32; in ia64_pal_cache_prot_info()
954 *vector = iprv.v0; in ia64_pal_cache_flush()
999 *cache_levels = iprv.v0; in ia64_pal_cache_summary()
1024 *buffer_size = iprv.v0; in ia64_pal_copy_info()
1037 *pal_proc_offset = iprv.v0; in ia64_pal_copy_pal()
1048 *inst_regs = iprv.v0; in ia64_pal_debug_info()
[all …]
/arch/powerpc/lib/
Dmemcpy_power7.S316 lvx v0,r4,r9
319 stvx v0,r3,r9
326 lvx v0,r4,r11
331 stvx v0,r3,r11
361 lvx v0,r4,r16
370 stvx v0,r3,r16
387 lvx v0,r4,r11
392 stvx v0,r3,r11
397 lvx v0,r4,r9
400 stvx v0,r3,r9
[all …]
/arch/s390/crypto/
Dcrc32le-vx.S126 VZERO %v0 /* Clear V0 */
127 VLVGF %v0,%r2,3 /* Load CRC into rightmost word */
136 VX %v1,%v0,%v1 /* V1 ^= CRC */
208 VSRLB %v0,CONST_R4R3,%v9
209 VLEIG %v0,1,0
217 VGFMG %v1,%v0,%v1
/arch/mips/netlogic/common/
Dreset.S106 sll v0, t2, 5
108 ori v1, v0, 0x3 /* way0 | write_enable | write_active */
116 ori v1, v0, 0x7 /* way1 | write_enable | write_active */
234 mfc0 v0, CP0_EBASE
235 andi v0, 0x3ff /* v0 <- node/core */
241 andi v1, v0, 0x3 /* v1 <- thread id */
257 beqz v0, 4f /* boot cpu (cpuid == 0)? */
272 sll v1, v0, 2

12345