| /kernel/linux/linux-5.10/arch/c6x/lib/ |
| D | csum_64plus.S | 32 AND .S1 3,A4,A1 38 [!A1] B .S1 L8 39 [B0] BNOP .S1 L6,5 47 || EXTU .S1 A7,0,16,A16 59 [!A1] BNOP .S1 L8,5 70 || EXTU .S1 A7,0,16,A16 80 [!B0] BNOP .S1 L82,4 91 SHL .S1 A7,8,A0 92 ADD .S1 A8,A9,A9 94 || ADD .S1 A0,A9,A9 [all …]
|
| D | divremu.S | 28 ||[!b1] zero .s1 A5 37 || xor .s1 1, A2, A2 39 shl .s1 A2, 31, A2 52 || [b0] b .s1 __divremu0 66 || mvk .s1 32, A1 68 || extu .s1 A4, A6, A5 69 shl .s1 A4, A6, A4 70 shru .s1 A4, 1, A4 73 shru .s1 A4, A6, A4
|
| D | llshr.S | 14 mvk .s1 32,A0 17 [A2] shl .s1 A5,A0,A0 20 || [A2] shru .s1 A4,A1,A4 21 [!A2] shr .s1 A5,A4,A4 23 [!A2] shr .s1 A5,0x1f,A5 24 [A2] shr .s1 A5,A1,A5
|
| D | llshl.S | 14 mvk .s1 32,A0 17 [A2] shru .s1 A4,A0,A0 19 || [A2] shl .s1 A5,A1,A5 20 [!A2] shl .s1 A4,A5,A5 23 [A2] shl .s1 A4,A1,A4
|
| D | llshru.S | 14 mvk .s1 32,A0 17 [A2] shl .s1 A5,A0,A0 20 || [A2] shru .s1 A4,A1,A4 21 [!A2] shru .s1 A5,A4,A4 24 [A2] shru .s1 A5,A1,A5
|
| /kernel/linux/linux-5.10/arch/c6x/kernel/ |
| D | entry.S | 41 SHL .S1 reg,THREAD_SHIFT,reg 173 || MVKL .S1 current_ksp,A15 174 MVKH .S1 current_ksp,A15 192 MVKL .S1 schedule,A0 193 MVKH .S1 schedule,A0 196 B .S1 schedule 205 MVKL .S1 syscall_trace_entry,A0 206 MVKH .S1 syscall_trace_entry,A0 209 B .S1 syscall_trace_entry 221 || MVK .S1 -ENOSYS,A4 [all …]
|
| D | head.S | 15 MV .S1 A4,A10 56 MVKL .S1 OF_DT_HEADER,A0 57 MVKH .S1 OF_DT_HEADER,A0 60 [!A0] MVK .S1 0,A4 63 MVKL .S1 machine_init,A0 64 MVKH .S1 machine_init,A0 74 MVKL .S1 start_kernel,A0 75 MVKH .S1 start_kernel,A0
|
| /kernel/linux/linux-5.10/arch/powerpc/crypto/ |
| D | sha256-spe-asm.S | 103 rotrwi rT0,e,6; /* 1: S1 = e rotr 6 */ \ 104 rotrwi rT1,e,11; /* 1: S1' = e rotr 11 */ \ 105 rotrwi rT2,e,25; /* 1: S1" = e rotr 25 */ \ 106 xor rT0,rT0,rT1; /* 1: S1 = S1 xor S1' */ \ 108 xor rT0,rT0,rT2; /* 1: S1 = S1 xor S1" */ \ 112 add h,h,rT0; /* 1: temp1 = h + S1 */ \ 128 rotrwi rT0,d,6; /* 2: S1 = e rotr 6 */ \ 130 rotrwi rT1,d,11; /* 2: S1' = e rotr 11 */ \ 132 rotrwi rT2,d,25; /* 2: S1" = e rotr 25 */ \ 133 xor rT0,rT0,rT1; /* 2: S1 = S1 xor S1' */ \ [all …]
|
| /kernel/linux/linux-5.10/arch/ia64/lib/ |
| D | idiv64.S | 29 # define INT_TO_FP(a,b) fcvt.xuf.s1 a=b 30 # define FP_TO_INT(a,b) fcvt.fxu.trunc.s1 a=b 34 # define FP_TO_INT(a,b) fcvt.fx.trunc.s1 a=b 51 frcpa.s1 f11, p6 = f8, f9 // y0 = frcpa(b) 53 (p6) fmpy.s1 f7 = f8, f11 // q0 = a*y0 54 (p6) fnma.s1 f6 = f9, f11, f1 // e0 = -b*y0 + 1 56 (p6) fma.s1 f10 = f7, f6, f7 // q1 = q0*e0 + q0 57 (p6) fmpy.s1 f7 = f6, f6 // e1 = e0*e0 62 (p6) fma.s1 f10 = f10, f7, f10 // q2 = q1*e1 + q1 63 (p6) fma.s1 f6 = f11, f6, f11 // y1 = y0*e0 + y0 [all …]
|
| D | idiv32.S | 30 # define INT_TO_FP(a,b) fcvt.xuf.s1 a=b 31 # define FP_TO_INT(a,b) fcvt.fxu.trunc.s1 a=b 36 # define FP_TO_INT(a,b) fcvt.fx.trunc.s1 a=b 61 frcpa.s1 f6, p6 = f8, f9 // y0 = frcpa(b) 63 (p6) fmpy.s1 f8 = f8, f6 // q0 = a*y0 64 (p6) fnma.s1 f6 = f9, f6, f1 // e0 = -b*y0 + 1 69 (p6) fma.s1 f8 = f6, f8, f8 // q1 = e0*q0 + q0 70 (p6) fma.s1 f6 = f6, f6, f7 // e1 = e0*e0 + 2^-34 75 (p6) fma.s1 f6 = f6, f8, f8 // q2 = e1*q1 + q1
|
| D | xor.S | 33 .rotr s1[6+1], s2[6+1], d[2] 36 (p[0]) ld8.nta s1[0] = [r16], 8 38 (p[6]) xor d[0] = s1[6], s2[6] 72 .rotr s1[6+1], s2[6+1], s3[6+1], d[2] 75 (p[0]) ld8.nta s1[0] = [r16], 8 77 (p[6]) xor d[0] = s1[6], s2[6] 114 .rotr s1[6+1], s2[6+1], s3[6+1], s4[6+1], d[2] 117 (p[0]) ld8.nta s1[0] = [r16], 8 119 (p[6]) xor d[0] = s1[6], s2[6] 159 .rotr s1[6+1], s2[6+1], s3[6+1], s4[6+1], s5[6+1], d[2] [all …]
|
| /kernel/linux/linux-5.10/net/dccp/ccids/lib/ |
| D | packet_history.c | 155 s1 = DCCP_SKB_CB(skb)->dccpd_seq; in __do_track_loss() local 157 if (!dccp_loss_free(s0, s1, n1)) { /* gap between S0 and S1 */ in __do_track_loss() 166 s1 = tfrc_rx_hist_entry(h, 1)->tfrchrx_seqno, in __one_after_loss() local 169 if (likely(dccp_delta_seqno(s1, s2) > 0)) { /* S1 < S2 */ in __one_after_loss() 175 /* S0 < S2 < S1 */ in __one_after_loss() 180 if (dccp_loss_free(s2, s1, n1)) { in __one_after_loss() 181 /* hole is filled: S0, S2, and S1 are consecutive */ in __one_after_loss() 185 /* gap between S2 and S1: just update loss_prev */ in __one_after_loss() 190 * Reorder history to insert S2 between S0 and S1 in __one_after_loss() 203 s1 = tfrc_rx_hist_entry(h, 1)->tfrchrx_seqno, in __two_after_loss() local [all …]
|
| /kernel/linux/linux-5.10/tools/testing/selftests/powerpc/stringloops/ |
| D | memcmp.c | 34 int test_memcmp(const void *s1, const void *s2, size_t n); 37 static void test_one(char *s1, char *s2, unsigned long max_offset, in test_one() argument 47 y = memcmp(s1+offset, s2+offset, size); in test_one() 48 x = test_memcmp(s1+offset, s2+offset, size); in test_one() 55 printf("%02x ", s1[i]); in test_one() 65 printf("vmx enter/exit not paired.(offset:%ld size:%ld s1:%p s2:%p vc:%d\n", in test_one() 66 offset, size, s1, s2, vmx_count); in test_one() 77 char *p, *s1, *s2; in testcase() local 88 /* Put s1/s2 at the end of a page */ in testcase() 89 s1 = p + MAP_SIZE - alloc_size; in testcase() [all …]
|
| /kernel/linux/linux-5.10/tools/perf/tests/ |
| D | sample-parsing.c | 20 if (s1->m != s2->m) { \ 27 if (memcmp(&s1->m, &s2->m, sizeof(s1->m))) { \ 33 static bool samples_same(const struct perf_sample *s1, in samples_same() argument 79 for (i = 0; i < s1->read.group.nr; i++) in samples_same() 88 for (i = 0; i < s1->callchain->nr; i++) in samples_same() 94 if (memcmp(s1->raw_data, s2->raw_data, s1->raw_size)) { in samples_same() 103 for (i = 0; i < s1->branch_stack->nr; i++) in samples_same() 108 size_t sz = hweight_long(s1->user_regs.mask) * sizeof(u64); in samples_same() 112 if (s1->user_regs.abi && in samples_same() 113 (!s1->user_regs.regs || !s2->user_regs.regs || in samples_same() [all …]
|
| /kernel/linux/linux-5.10/arch/x86/crypto/ |
| D | sha256-ssse3-asm.S | 148 ## compute s0 four at a time and s1 two at a time 169 ror $6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) 173 add y0, y2 # y2 = S1 + CH 174 add _XFER(%rsp) , y2 # y2 = k + w + S1 + CH 177 add y2, h # h = h + S1 + CH + k + w 181 add h, d # d = d + h + S1 + CH + k + w 185 add y1, h # h = h + S1 + CH + k + w + S0 188 add y0, h # h = h + S1 + CH + k + w + S0 + MAJ 207 ror $6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) 212 add y0, y2 # y2 = S1 + CH [all …]
|
| D | sha256-avx-asm.S | 154 ## compute s0 four at a time and s1 two at a time 174 MY_ROR 6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) 177 add y0, y2 # y2 = S1 + CH 178 add _XFER(%rsp), y2 # y2 = k + w + S1 + CH 180 add y2, h # h = h + S1 + CH + k + w 184 add h, d # d = d + h + S1 + CH + k + w 188 add y1, h # h = h + S1 + CH + k + w + S0 191 add y0, h # h = h + S1 + CH + k + w + S0 + MAJ 207 MY_ROR 6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25) 212 add y0, y2 # y2 = S1 + CH [all …]
|
| D | sha256-avx2-asm.S | 168 xor y1, y0 # y0 = (e>>25) ^ (e>>11) # S1 170 vpaddd X0, XTMP0, XTMP0 # XTMP0 = W[-7] + W[-16]# y1 = (e >> 6)# S1 171 rorx $6, e, y1 # y1 = (e >> 6) # S1 174 xor y1, y0 # y0 = (e>>25) ^ (e>>11) ^ (e>>6) # S1 189 add y0, y2 # y2 = S1 + CH # -- 194 add y2, d # d = k + w + h + d + S1 + CH = d + t1 # -- 198 add y2, h # h = k + w + h + S0 + S1 + CH = t1 + S0# -- 217 xor y1, y0 # y0 = (e>>25) ^ (e>>11) # S1 221 rorx $6, e, y1 # y1 = (e >> 6) # S1 222 xor y1, y0 # y0 = (e>>25) ^ (e>>11) ^ (e>>6) # S1 [all …]
|
| D | sha512-avx2-asm.S | 193 xor y1, y0 # y0 = (e>>41) ^ (e>>18) # S1 195 rorx $14, e, y1 # y1 = (e >> 14) # S1 198 xor y1, y0 # y0 = (e>>41) ^ (e>>18) ^ (e>>14) # S1 211 add y0, y2 # y2 = S1 + CH # -- 215 add y2, d # d = k + w + h + d + S1 + CH = d + t1 # -- 217 add y2, h # h = k + w + h + S0 + S1 + CH = t1 + S0# -- 256 xor y1, y0 # y0 = (e>>41) ^ (e>>18) # S1 260 rorx $14, e, y1 # y1 = (e >> 14) # S1 261 xor y1, y0 # y0 = (e>>41) ^ (e>>18) ^ (e>>14) # S1 275 add y0, y2 # y2 = S1 + CH # -- [all …]
|
| /kernel/linux/linux-5.10/arch/s390/lib/ |
| D | string.c | 214 * @s1: One string 217 * returns 0 if @s1 and @s2 are equal, 218 * < 0 if @s1 is less than @s2 219 * > 0 if @s1 is greater than @s2 222 int strcmp(const char *s1, const char *s2) in strcmp() argument 234 : "+d" (ret), "+d" (r0), "+a" (s1), "+a" (s2) in strcmp() 260 static inline int clcle(const char *s1, unsigned long l1, in clcle() argument 263 register unsigned long r2 asm("2") = (unsigned long) s1; in clcle() 280 * @s1: The string to be searched 284 char *strstr(const char *s1, const char *s2) in strstr() argument [all …]
|
| /kernel/linux/linux-5.10/Documentation/devicetree/bindings/regulator/ |
| D | qcom,smd-rpm-regulator.yaml | 27 For pm8841, s1, s2, s3, s4, s5, s6, s7, s8 29 For pm8916, s1, s2, s3, s4, l1, l2, l3, l4, l5, l6, l7, l8, l9, l10, l11, 32 For pm8941, s1, s2, s3, s4, l1, l2, l3, l4, l5, l6, l7, l8, l9, l10, l11, 36 For pm8950 and pm8953, s1, s2, s3, s4, s5, s6, s7, l1, l2, l3, l4, l5, l6, 40 For pm8994, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, l1, l2, l3, 44 For pm8998, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, l1, l2, 48 For pm660, s1, s2, s3, s4, s5, s6, l1, l2, l3, l5, l6, l7, l8, l9, l10, l22, 51 For pm660l s1, s2, s3, s5, l1, l2, l3, l4, l5, l6, l7, l8, l9, l10, bob 53 For pma8084, s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, l1, l2, l3, 57 For pmi8994, s1, s2, s3, boost-bypass [all …]
|
| /kernel/liteos_a/lib/libc/musl/src/ |
| D | memcmp.c | 40 const unsigned char *s1 = str1; in memcmp() local 45 if (*(const uint64_t *)(s1) != *(const uint64_t *)(s2)) { in memcmp() 48 s1 += SIZE_U64; in memcmp() 58 if (*(const uint32_t *)(s1) != *(const uint32_t *)(s2)) { in memcmp() 61 s1 += SIZE_U32; in memcmp() 69 for (; num && (*s1 == *s2); num--, s1++, s2++) { in memcmp() 71 return num ? *s1 - *s2 : 0; in memcmp()
|
| /kernel/linux/linux-5.10/scripts/coccinelle/null/ |
| D | deref_null.cocci | 44 statement S1,S2; 48 if@p1 ((E == NULL && ...) || ...) S1 else S2 57 statement S1,S2,S3,S4; 65 ... when != if (...) S1 else S2 118 statement S1,S2,S3,S4; 126 ... when != if (...) S1 else S2 178 statement S1,S2,S3,S4; 186 ... when != if (...) S1 else S2 240 statement S1,S2; 244 if@p1 ((E == NULL && ...) || ...) S1 else S2 [all …]
|
| /kernel/linux/linux-5.10/drivers/firmware/efi/libstub/ |
| D | string.c | 17 * @s1: The string to be searched 20 char *strstr(const char *s1, const char *s2) in strstr() argument 26 return (char *)s1; in strstr() 27 l1 = strlen(s1); in strstr() 30 if (!memcmp(s1, s2, l2)) in strstr() 31 return (char *)s1; in strstr() 32 s1++; in strstr()
|
| /kernel/linux/linux-5.10/arch/sparc/include/asm/ |
| D | prom.h | 23 #define of_compat_cmp(s1, s2, l) strncmp((s1), (s2), (l)) argument 24 #define of_prop_cmp(s1, s2) strcasecmp((s1), (s2)) argument 25 #define of_node_cmp(s1, s2) strcmp((s1), (s2)) argument
|
| /kernel/linux/linux-5.10/arch/x86/boot/ |
| D | string.c | 32 int memcmp(const void *s1, const void *s2, size_t len) in memcmp() argument 36 : CC_OUT(nz) (diff), "+D" (s1), "+S" (s2), "+c" (len)); in memcmp() 43 int bcmp(const void *s1, const void *s2, size_t len) in bcmp() argument 45 return memcmp(s1, s2, len); in bcmp() 50 const unsigned char *s1 = (const unsigned char *)str1; in strcmp() local 54 while (*s1 || *s2) { in strcmp() 55 delta = *s1 - *s2; in strcmp() 58 s1++; in strcmp() 168 * @s1: The string to be searched 171 char *strstr(const char *s1, const char *s2) in strstr() argument [all …]
|