/kernel/linux/linux-5.10/arch/arm/mach-at91/ |
D | pm_suspend.S | 19 tmp1 .req r4 label 27 1: ldr tmp1, [pmc, #AT91_PMC_SR] 28 tst tmp1, #AT91_PMC_MCKRDY 36 1: ldr tmp1, [pmc, #AT91_PMC_SR] 37 tst tmp1, #AT91_PMC_MOSCS 45 1: ldr tmp1, [pmc, #AT91_PMC_SR] 46 tst tmp1, #AT91_PMC_MOSCSELS 56 mov tmp1, #AT91_PMC_PCK 57 str tmp1, [pmc, #AT91_PMC_SCDR] 63 mcr p15, 0, tmp1, c7, c0, 4 [all …]
|
/kernel/linux/linux-5.10/arch/arm64/include/asm/ |
D | asm_pointer_auth.h | 16 .macro ptrauth_keys_install_user tsk, tmp1, tmp2, tmp3 17 mov \tmp1, #THREAD_KEYS_USER 18 add \tmp1, \tsk, \tmp1 22 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_USER_KEY_APIA] 25 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_USER_KEY_APIB] 28 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_USER_KEY_APDA] 31 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_USER_KEY_APDB] 36 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_USER_KEY_APGA] 42 .macro __ptrauth_keys_install_kernel_nosync tsk, tmp1, tmp2, tmp3 43 mov \tmp1, #THREAD_KEYS_KERNEL [all …]
|
D | asm-uaccess.h | 15 .macro __uaccess_ttbr0_disable, tmp1 16 mrs \tmp1, ttbr1_el1 // swapper_pg_dir 17 bic \tmp1, \tmp1, #TTBR_ASID_MASK 18 sub \tmp1, \tmp1, #PAGE_SIZE // reserved_pg_dir just before swapper_pg_dir 19 msr ttbr0_el1, \tmp1 // set reserved TTBR0_EL1 21 add \tmp1, \tmp1, #PAGE_SIZE 22 msr ttbr1_el1, \tmp1 // set reserved ASID 26 .macro __uaccess_ttbr0_enable, tmp1, tmp2 27 get_current_task \tmp1 28 ldr \tmp1, [\tmp1, #TSK_TI_TTBR0] // load saved TTBR0_EL1 [all …]
|
/kernel/linux/linux-5.10/arch/sparc/include/asm/ |
D | head_64.h | 35 #define BRANCH_IF_SUN4V(tmp1,label) \ argument 36 sethi %hi(is_sun4v), %tmp1; \ 37 lduw [%tmp1 + %lo(is_sun4v)], %tmp1; \ 38 brnz,pn %tmp1, label; \ 41 #define BRANCH_IF_CHEETAH_BASE(tmp1,tmp2,label) \ argument 42 rdpr %ver, %tmp1; \ 44 srlx %tmp1, 32, %tmp1; \ 46 cmp %tmp1, %tmp2; \ 50 #define BRANCH_IF_JALAPENO(tmp1,tmp2,label) \ argument 51 rdpr %ver, %tmp1; \ [all …]
|
/kernel/linux/linux-5.10/arch/arm/mach-tegra/ |
D | sleep.h | 81 .macro check_cpu_part_num part_num, tmp1, tmp2 82 mrc p15, 0, \tmp1, c0, c0, 0 83 ubfx \tmp1, \tmp1, #4, #12 85 cmp \tmp1, \tmp2 89 .macro exit_smp, tmp1, tmp2 90 mrc p15, 0, \tmp1, c1, c0, 1 @ ACTLR 91 bic \tmp1, \tmp1, #(1<<6) | (1<<0) @ clear ACTLR.SMP | ACTLR.FW 92 mcr p15, 0, \tmp1, c1, c0, 1 @ ACTLR 95 check_cpu_part_num 0xc09, \tmp1, \tmp2 96 mrceq p15, 0, \tmp1, c0, c0, 5 [all …]
|
/kernel/linux/linux-5.10/arch/arm64/lib/ |
D | csum.c | 63 __uint128_t tmp1, tmp2, tmp3, tmp4; in do_csum() local 65 tmp1 = *(__uint128_t *)ptr; in do_csum() 74 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum() 78 tmp1 = ((tmp1 >> 64) << 64) | (tmp2 >> 64); in do_csum() 79 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum() 82 tmp1 = ((tmp1 >> 64) << 64) | (tmp3 >> 64); in do_csum() 83 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum() 84 tmp1 = ((tmp1 >> 64) << 64) | sum64; in do_csum() 85 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum() 86 sum64 = tmp1 >> 64; in do_csum()
|
D | strcmp.S | 45 tmp1 .req x7 label 52 eor tmp1, src1, src2 54 tst tmp1, #7 56 ands tmp1, src1, #7 68 sub tmp1, data1, zeroones 71 bic has_nul, tmp1, tmp2 /* Non-zero if NUL terminator. */ 84 lsl tmp1, tmp1, #3 /* Bytes beyond alignment -> bits. */ 86 neg tmp1, tmp1 /* Bits to alignment -64. */ 90 CPU_BE( lsl tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */ 92 CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */ [all …]
|
D | strlen.S | 36 tmp1 .req x7 label 50 ands tmp1, srcin, #15 66 sub tmp1, data1, zeroones 70 bic has_nul1, tmp1, tmp2 88 CPU_BE( sub tmp1, data2, zeroones ) 90 CPU_BE( bic has_nul2, tmp1, tmp2 ) 99 cmp tmp1, #8 100 neg tmp1, tmp1 102 lsl tmp1, tmp1, #3 /* Bytes beyond alignment -> bits. */ 105 CPU_BE( lsl tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */ [all …]
|
D | strncmp.S | 46 tmp1 .req x8 label 57 eor tmp1, src1, src2 59 tst tmp1, #7 61 ands tmp1, src1, #7 81 sub tmp1, data1, zeroones 85 bics has_nul, tmp1, tmp2 /* Non-zero if NUL terminator. */ 121 neg tmp3, tmp1, lsl #3 /* 64 - bits(bytes beyond align). */ 133 add limit, limit, tmp1 134 add tmp3, tmp3, tmp1 147 and tmp1, src1, #7 [all …]
|
D | memcmp.S | 42 tmp1 .req x8 label 51 eor tmp1, src1, src2 52 tst tmp1, #7 54 ands tmp1, src1, #7 108 add tmp3, tmp3, tmp1 110 add limit, limit, tmp1/* Adjust the limit for the extra. */ 112 lsl tmp1, tmp1, #3/* Bytes beyond alignment -> bits.*/ 113 neg tmp1, tmp1/* Bits to alignment -64. */ 116 CPU_BE( lsl tmp2, tmp2, tmp1 )/*Big-endian.Early bytes are at MSB*/ 118 CPU_LE( lsr tmp2, tmp2, tmp1 ) [all …]
|
D | strnlen.S | 38 tmp1 .req x8 label 54 ands tmp1, srcin, #15 74 sub tmp1, data1, zeroones 78 bic has_nul1, tmp1, tmp2 81 orr tmp1, has_nul1, has_nul2 82 ccmp tmp1, #0, #0, pl /* NZCV = 0000 */ 85 cbz tmp1, .Lhit_limit /* No null in final Qword. */ 106 CPU_BE( sub tmp1, data2, zeroones ) 108 CPU_BE( bic has_nul2, tmp1, tmp2 ) 135 add tmp3, tmp3, tmp1 [all …]
|
/kernel/linux/linux-5.10/arch/mips/crypto/ |
D | poly1305-mips.pl | 76 my ($in0,$in1,$tmp0,$tmp1,$tmp2,$tmp3,$tmp4) = ($a4,$a5,$a6,$a7,$at,$t0,$t1); 139 subu $tmp1,$zero,$tmp0 142 dsrlv $tmp3,$in1,$tmp1 144 dsrlv $tmp2,$tmp2,$tmp1 147 dsllv $tmp3,$in1,$tmp1 149 dsllv $tmp2,$tmp2,$tmp1 171 and $tmp1,$in0,$tmp0 # byte swap 175 dsll $tmp1,24 180 or $tmp1,$tmp2 190 or $tmp1,$tmp2 [all …]
|
/kernel/linux/linux-5.10/tools/testing/selftests/bpf/prog_tests/ |
D | mmap.c | 24 void *bss_mmaped = NULL, *map_mmaped = NULL, *tmp0, *tmp1, *tmp2; in test_mmap() local 41 tmp1 = mmap(NULL, 4096, PROT_READ | PROT_WRITE, MAP_SHARED, rdmap_fd, 0); in test_mmap() 42 if (CHECK(tmp1 != MAP_FAILED, "rdonly_write_mmap", "unexpected success\n")) { in test_mmap() 43 munmap(tmp1, 4096); in test_mmap() 47 tmp1 = mmap(NULL, 4096, PROT_READ, MAP_SHARED, rdmap_fd, 0); in test_mmap() 48 if (CHECK(tmp1 == MAP_FAILED, "rdonly_read_mmap", "failed: %d\n", errno)) in test_mmap() 166 tmp1 = mmap(NULL, map_sz, PROT_READ | PROT_WRITE, MAP_SHARED, in test_mmap() 168 if (CHECK(tmp1 != MAP_FAILED, "data_mmap", "mmap succeeded\n")) { in test_mmap() 169 munmap(tmp1, map_sz); in test_mmap() 192 tmp1 = mmap(tmp0, 3 * page_size, PROT_READ, MAP_SHARED | MAP_FIXED, in test_mmap() [all …]
|
/kernel/linux/linux-5.10/arch/s390/lib/ |
D | uaccess.c | 108 unsigned long tmp1, tmp2; in copy_from_user_mvcos() local 110 tmp1 = -4096UL; in copy_from_user_mvcos() 129 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_from_user_mvcos() 137 unsigned long tmp1, tmp2; in copy_from_user_mvcp() local 141 tmp1 = -256UL; in copy_from_user_mvcp() 165 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_from_user_mvcp() 183 unsigned long tmp1, tmp2; in copy_to_user_mvcos() local 185 tmp1 = -4096UL; in copy_to_user_mvcos() 204 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_to_user_mvcos() 212 unsigned long tmp1, tmp2; in copy_to_user_mvcs() local [all …]
|
/kernel/linux/linux-5.10/tools/lib/ |
D | rbtree.c | 230 struct rb_node *node = NULL, *sibling, *tmp1, *tmp2; in ____rb_erase_color() local 252 tmp1 = sibling->rb_left; in ____rb_erase_color() 253 WRITE_ONCE(parent->rb_right, tmp1); in ____rb_erase_color() 255 rb_set_parent_color(tmp1, parent, RB_BLACK); in ____rb_erase_color() 259 sibling = tmp1; in ____rb_erase_color() 261 tmp1 = sibling->rb_right; in ____rb_erase_color() 262 if (!tmp1 || rb_is_black(tmp1)) { in ____rb_erase_color() 319 tmp1 = tmp2->rb_right; in ____rb_erase_color() 320 WRITE_ONCE(sibling->rb_left, tmp1); in ____rb_erase_color() 323 if (tmp1) in ____rb_erase_color() [all …]
|
/kernel/linux/linux-5.10/lib/ |
D | rbtree.c | 230 struct rb_node *node = NULL, *sibling, *tmp1, *tmp2; in ____rb_erase_color() local 252 tmp1 = sibling->rb_left; in ____rb_erase_color() 253 WRITE_ONCE(parent->rb_right, tmp1); in ____rb_erase_color() 255 rb_set_parent_color(tmp1, parent, RB_BLACK); in ____rb_erase_color() 259 sibling = tmp1; in ____rb_erase_color() 261 tmp1 = sibling->rb_right; in ____rb_erase_color() 262 if (!tmp1 || rb_is_black(tmp1)) { in ____rb_erase_color() 319 tmp1 = tmp2->rb_right; in ____rb_erase_color() 320 WRITE_ONCE(sibling->rb_left, tmp1); in ____rb_erase_color() 323 if (tmp1) in ____rb_erase_color() [all …]
|
/kernel/linux/linux-5.10/arch/arc/include/asm/ |
D | uaccess.h | 172 unsigned long tmp1, tmp2, tmp3, tmp4; in raw_copy_from_user() local 247 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) in raw_copy_from_user() 271 "=r"(tmp1), "=r"(tmp2) in raw_copy_from_user() 291 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 311 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 329 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 386 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) in raw_copy_from_user() 399 unsigned long tmp1, tmp2, tmp3, tmp4; in raw_copy_to_user() local 469 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) in raw_copy_to_user() 493 "=r"(tmp1), "=r"(tmp2) in raw_copy_to_user() [all …]
|
/kernel/linux/linux-5.10/arch/ia64/lib/ |
D | do_csum.S | 105 #define tmp1 r26 macro 136 add tmp1=buf,len // last byte's address 149 adds tmp2=-1,tmp1 // last-1 150 and lastoff=7,tmp1 // how many bytes off for last element 152 sub tmp1=8,lastoff // complement to lastoff 161 and tmp1=7, tmp1 // make sure that if tmp1==8 -> tmp1=0 165 shl tmp1=tmp1,3 // number of bits 170 shr.u tmask=tmask,tmp1 // build tail mask, mask off ]8,lastoff] 263 zxt4 tmp1=result1[0] 266 add result1[0]=tmp1,tmp2 [all …]
|
/kernel/linux/linux-5.10/arch/hexagon/mm/ |
D | strnlen_user.S | 12 #define tmp1 r3 macro 47 tmp1 = memb(start++#1); define 50 P0 = cmp.eq(tmp1,#0); 74 tmp1 = P0; define 78 tmp1 = ct0(tmp1); define 83 P0 = cmp.eq(tmp1,#32); 85 if (!P0.new) start = add(obo,tmp1); 95 P0 = cmp.gt(tmp1,mod8); 97 start = add(obo,tmp1);
|
/kernel/linux/linux-5.10/arch/arm/include/asm/ |
D | tls.h | 10 .macro switch_tls_none, base, tp, tpuser, tmp1, tmp2 13 .macro switch_tls_v6k, base, tp, tpuser, tmp1, tmp2 20 .macro switch_tls_v6, base, tp, tpuser, tmp1, tmp2 21 ldr \tmp1, =elf_hwcap 22 ldr \tmp1, [\tmp1, #0] 24 tst \tmp1, #HWCAP_TLS @ hardware TLS available? 32 .macro switch_tls_software, base, tp, tpuser, tmp1, tmp2 33 mov \tmp1, #0xffff0fff 34 str \tp, [\tmp1, #-15] @ set TLS value at 0xffff0ff0
|
/kernel/linux/linux-5.10/arch/x86/crypto/ |
D | curve25519-x86_64.c | 768 u64 *tmp1 = p01_tmp1 + (u32)16U; in point_add_and_double() local 773 u64 *a = tmp1; in point_add_and_double() 774 u64 *b = tmp1 + (u32)4U; in point_add_and_double() 775 u64 *ab = tmp1; in point_add_and_double() 776 u64 *dc = tmp1 + (u32)8U; in point_add_and_double() 798 a1 = tmp1; in point_add_and_double() 799 b1 = tmp1 + (u32)4U; in point_add_and_double() 800 d = tmp1 + (u32)8U; in point_add_and_double() 801 c = tmp1 + (u32)12U; in point_add_and_double() 802 ab1 = tmp1; in point_add_and_double() [all …]
|
D | glue_helper-asm-avx2.S | 47 #define add2_le128(x, minus_one, minus_two, tmp1, tmp2) \ argument 48 vpcmpeqq minus_one, x, tmp1; \ 51 vpor tmp2, tmp1, tmp1; \ 52 vpslldq $8, tmp1, tmp1; \ 53 vpsubq tmp1, x, x; 106 #define gf128mul_x2_ble(iv, mask1, mask2, tmp0, tmp1) \ argument 108 vpaddq iv, iv, tmp1; \ 111 vpsrad $31, tmp1, tmp1; \ 113 vpshufd $0x13, tmp1, tmp1; \ 115 vpand mask1, tmp1, tmp1; \ [all …]
|
/kernel/linux/linux-5.10/arch/m68k/lib/ |
D | checksum.c | 40 unsigned long tmp1, tmp2; in csum_partial() local 118 "=&d" (tmp1), "=&d" (tmp2) in csum_partial() 139 unsigned long tmp1, tmp2; in csum_and_copy_from_user() local 261 "=&d" (tmp1), "=d" (tmp2) in csum_and_copy_from_user() 278 unsigned long tmp1, tmp2; in csum_partial_copy_nocheck() local 366 "=&d" (tmp1), "=&d" (tmp2) in csum_partial_copy_nocheck()
|
/kernel/linux/linux-5.10/arch/arm/mach-iop32x/include/mach/ |
D | entry-macro.S | 26 .macro arch_ret_to_user, tmp1, tmp2 27 mrc p15, 0, \tmp1, c15, c1, 0 28 ands \tmp2, \tmp1, #(1 << 6) 29 bicne \tmp1, \tmp1, #(1 << 6) 30 mcrne p15, 0, \tmp1, c15, c1, 0 @ Disable cp6 access
|
/kernel/linux/linux-5.10/arch/alpha/lib/ |
D | divide.S | 58 #define tmp1 $3 macro 110 stq tmp1,24($30) 142 subq modulus,divisor,tmp1 145 cmovne compare,tmp1,modulus 151 ldq tmp1,24($30) 184 stq tmp1,24($30) 191 subq $31,$27,tmp1 194 cmovlt $28,tmp1,$27 195 ldq tmp1,24($30)
|