Home
last modified time | relevance | path

Searched full:s1 (Results 1 – 25 of 1004) sorted by relevance

12345678910>>...41

/kernel/linux/linux-4.19/arch/c6x/lib/
Dcsum_64plus.S36 AND .S1 3,A4,A1
42 [!A1] B .S1 L8
43 [B0] BNOP .S1 L6,5
51 || EXTU .S1 A7,0,16,A16
63 [!A1] BNOP .S1 L8,5
74 || EXTU .S1 A7,0,16,A16
84 [!B0] BNOP .S1 L82,4
95 SHL .S1 A7,8,A0
96 ADD .S1 A8,A9,A9
98 || ADD .S1 A0,A9,A9
[all …]
/kernel/linux/linux-5.10/arch/c6x/lib/
Dcsum_64plus.S32 AND .S1 3,A4,A1
38 [!A1] B .S1 L8
39 [B0] BNOP .S1 L6,5
47 || EXTU .S1 A7,0,16,A16
59 [!A1] BNOP .S1 L8,5
70 || EXTU .S1 A7,0,16,A16
80 [!B0] BNOP .S1 L82,4
91 SHL .S1 A7,8,A0
92 ADD .S1 A8,A9,A9
94 || ADD .S1 A0,A9,A9
[all …]
/kernel/linux/linux-4.19/arch/c6x/kernel/
Dentry.S44 SHL .S1 reg,THREAD_SHIFT,reg
176 || MVKL .S1 current_ksp,A15
177 MVKH .S1 current_ksp,A15
195 MVKL .S1 schedule,A0
196 MVKH .S1 schedule,A0
199 B .S1 schedule
208 MVKL .S1 syscall_trace_entry,A0
209 MVKH .S1 syscall_trace_entry,A0
212 B .S1 syscall_trace_entry
224 || MVK .S1 -ENOSYS,A4
[all …]
/kernel/linux/linux-5.10/arch/c6x/kernel/
Dentry.S41 SHL .S1 reg,THREAD_SHIFT,reg
173 || MVKL .S1 current_ksp,A15
174 MVKH .S1 current_ksp,A15
192 MVKL .S1 schedule,A0
193 MVKH .S1 schedule,A0
196 B .S1 schedule
205 MVKL .S1 syscall_trace_entry,A0
206 MVKH .S1 syscall_trace_entry,A0
209 B .S1 syscall_trace_entry
221 || MVK .S1 -ENOSYS,A4
[all …]
Dhead.S15 MV .S1 A4,A10
56 MVKL .S1 OF_DT_HEADER,A0
57 MVKH .S1 OF_DT_HEADER,A0
60 [!A0] MVK .S1 0,A4
63 MVKL .S1 machine_init,A0
64 MVKH .S1 machine_init,A0
74 MVKL .S1 start_kernel,A0
75 MVKH .S1 start_kernel,A0
/kernel/linux/linux-4.19/arch/powerpc/crypto/
Dsha256-spe-asm.S108 rotrwi rT0,e,6; /* 1: S1 = e rotr 6 */ \
109 rotrwi rT1,e,11; /* 1: S1' = e rotr 11 */ \
110 rotrwi rT2,e,25; /* 1: S1" = e rotr 25 */ \
111 xor rT0,rT0,rT1; /* 1: S1 = S1 xor S1' */ \
113 xor rT0,rT0,rT2; /* 1: S1 = S1 xor S1" */ \
117 add h,h,rT0; /* 1: temp1 = h + S1 */ \
133 rotrwi rT0,d,6; /* 2: S1 = e rotr 6 */ \
135 rotrwi rT1,d,11; /* 2: S1' = e rotr 11 */ \
137 rotrwi rT2,d,25; /* 2: S1" = e rotr 25 */ \
138 xor rT0,rT0,rT1; /* 2: S1 = S1 xor S1' */ \
[all …]
/kernel/linux/linux-5.10/arch/powerpc/crypto/
Dsha256-spe-asm.S103 rotrwi rT0,e,6; /* 1: S1 = e rotr 6 */ \
104 rotrwi rT1,e,11; /* 1: S1' = e rotr 11 */ \
105 rotrwi rT2,e,25; /* 1: S1" = e rotr 25 */ \
106 xor rT0,rT0,rT1; /* 1: S1 = S1 xor S1' */ \
108 xor rT0,rT0,rT2; /* 1: S1 = S1 xor S1" */ \
112 add h,h,rT0; /* 1: temp1 = h + S1 */ \
128 rotrwi rT0,d,6; /* 2: S1 = e rotr 6 */ \
130 rotrwi rT1,d,11; /* 2: S1' = e rotr 11 */ \
132 rotrwi rT2,d,25; /* 2: S1" = e rotr 25 */ \
133 xor rT0,rT0,rT1; /* 2: S1 = S1 xor S1' */ \
[all …]
/kernel/linux/linux-5.10/arch/ia64/lib/
Didiv64.S29 # define INT_TO_FP(a,b) fcvt.xuf.s1 a=b
30 # define FP_TO_INT(a,b) fcvt.fxu.trunc.s1 a=b
34 # define FP_TO_INT(a,b) fcvt.fx.trunc.s1 a=b
51 frcpa.s1 f11, p6 = f8, f9 // y0 = frcpa(b)
53 (p6) fmpy.s1 f7 = f8, f11 // q0 = a*y0
54 (p6) fnma.s1 f6 = f9, f11, f1 // e0 = -b*y0 + 1
56 (p6) fma.s1 f10 = f7, f6, f7 // q1 = q0*e0 + q0
57 (p6) fmpy.s1 f7 = f6, f6 // e1 = e0*e0
62 (p6) fma.s1 f10 = f10, f7, f10 // q2 = q1*e1 + q1
63 (p6) fma.s1 f6 = f11, f6, f11 // y1 = y0*e0 + y0
[all …]
/kernel/linux/linux-4.19/arch/ia64/lib/
Didiv64.S29 # define INT_TO_FP(a,b) fcvt.xuf.s1 a=b
30 # define FP_TO_INT(a,b) fcvt.fxu.trunc.s1 a=b
34 # define FP_TO_INT(a,b) fcvt.fx.trunc.s1 a=b
51 frcpa.s1 f11, p6 = f8, f9 // y0 = frcpa(b)
53 (p6) fmpy.s1 f7 = f8, f11 // q0 = a*y0
54 (p6) fnma.s1 f6 = f9, f11, f1 // e0 = -b*y0 + 1
56 (p6) fma.s1 f10 = f7, f6, f7 // q1 = q0*e0 + q0
57 (p6) fmpy.s1 f7 = f6, f6 // e1 = e0*e0
62 (p6) fma.s1 f10 = f10, f7, f10 // q2 = q1*e1 + q1
63 (p6) fma.s1 f6 = f11, f6, f11 // y1 = y0*e0 + y0
[all …]
/kernel/linux/linux-4.19/tools/testing/selftests/powerpc/stringloops/
Dmemcmp.c29 int test_memcmp(const void *s1, const void *s2, size_t n);
32 static void test_one(char *s1, char *s2, unsigned long max_offset, in test_one() argument
42 y = memcmp(s1+offset, s2+offset, size); in test_one()
43 x = test_memcmp(s1+offset, s2+offset, size); in test_one()
50 printf("%02x ", s1[i]); in test_one()
60 printf("vmx enter/exit not paired.(offset:%ld size:%ld s1:%p s2:%p vc:%d\n", in test_one()
61 offset, size, s1, s2, vmx_count); in test_one()
71 char *s1; in testcase() local
79 s1 = memalign(128, alloc_size); in testcase()
80 if (!s1) { in testcase()
[all …]
/kernel/linux/linux-5.10/net/dccp/ccids/lib/
Dpacket_history.c155 s1 = DCCP_SKB_CB(skb)->dccpd_seq; in __do_track_loss() local
157 if (!dccp_loss_free(s0, s1, n1)) { /* gap between S0 and S1 */ in __do_track_loss()
166 s1 = tfrc_rx_hist_entry(h, 1)->tfrchrx_seqno, in __one_after_loss() local
169 if (likely(dccp_delta_seqno(s1, s2) > 0)) { /* S1 < S2 */ in __one_after_loss()
175 /* S0 < S2 < S1 */ in __one_after_loss()
180 if (dccp_loss_free(s2, s1, n1)) { in __one_after_loss()
181 /* hole is filled: S0, S2, and S1 are consecutive */ in __one_after_loss()
185 /* gap between S2 and S1: just update loss_prev */ in __one_after_loss()
190 * Reorder history to insert S2 between S0 and S1 in __one_after_loss()
203 s1 = tfrc_rx_hist_entry(h, 1)->tfrchrx_seqno, in __two_after_loss() local
[all …]
/kernel/linux/linux-5.10/tools/testing/selftests/powerpc/stringloops/
Dmemcmp.c34 int test_memcmp(const void *s1, const void *s2, size_t n);
37 static void test_one(char *s1, char *s2, unsigned long max_offset, in test_one() argument
47 y = memcmp(s1+offset, s2+offset, size); in test_one()
48 x = test_memcmp(s1+offset, s2+offset, size); in test_one()
55 printf("%02x ", s1[i]); in test_one()
65 printf("vmx enter/exit not paired.(offset:%ld size:%ld s1:%p s2:%p vc:%d\n", in test_one()
66 offset, size, s1, s2, vmx_count); in test_one()
77 char *p, *s1, *s2; in testcase() local
88 /* Put s1/s2 at the end of a page */ in testcase()
89 s1 = p + MAP_SIZE - alloc_size; in testcase()
[all …]
/kernel/linux/linux-4.19/arch/x86/boot/
Dstring.c29 int memcmp(const void *s1, const void *s2, size_t len) in memcmp() argument
33 : CC_OUT(nz) (diff), "+D" (s1), "+S" (s2), "+c" (len)); in memcmp()
40 int bcmp(const void *s1, const void *s2, size_t len) in bcmp() argument
42 return memcmp(s1, s2, len); in bcmp()
47 const unsigned char *s1 = (const unsigned char *)str1; in strcmp() local
51 while (*s1 || *s2) { in strcmp()
52 delta = *s1 - *s2; in strcmp()
55 s1++; in strcmp()
166 * @s1: The string to be searched
169 char *strstr(const char *s1, const char *s2) in strstr() argument
[all …]
/kernel/linux/linux-4.19/net/dccp/ccids/lib/
Dpacket_history.c168 s1 = DCCP_SKB_CB(skb)->dccpd_seq; in __do_track_loss() local
170 if (!dccp_loss_free(s0, s1, n1)) { /* gap between S0 and S1 */ in __do_track_loss()
179 s1 = tfrc_rx_hist_entry(h, 1)->tfrchrx_seqno, in __one_after_loss() local
182 if (likely(dccp_delta_seqno(s1, s2) > 0)) { /* S1 < S2 */ in __one_after_loss()
188 /* S0 < S2 < S1 */ in __one_after_loss()
193 if (dccp_loss_free(s2, s1, n1)) { in __one_after_loss()
194 /* hole is filled: S0, S2, and S1 are consecutive */ in __one_after_loss()
198 /* gap between S2 and S1: just update loss_prev */ in __one_after_loss()
203 * Reorder history to insert S2 between S0 and S1 in __one_after_loss()
216 s1 = tfrc_rx_hist_entry(h, 1)->tfrchrx_seqno, in __two_after_loss() local
[all …]
/kernel/linux/linux-5.10/tools/perf/tests/
Dsample-parsing.c20 if (s1->m != s2->m) { \
27 if (memcmp(&s1->m, &s2->m, sizeof(s1->m))) { \
33 static bool samples_same(const struct perf_sample *s1, in samples_same() argument
79 for (i = 0; i < s1->read.group.nr; i++) in samples_same()
88 for (i = 0; i < s1->callchain->nr; i++) in samples_same()
94 if (memcmp(s1->raw_data, s2->raw_data, s1->raw_size)) { in samples_same()
103 for (i = 0; i < s1->branch_stack->nr; i++) in samples_same()
108 size_t sz = hweight_long(s1->user_regs.mask) * sizeof(u64); in samples_same()
112 if (s1->user_regs.abi && in samples_same()
113 (!s1->user_regs.regs || !s2->user_regs.regs || in samples_same()
[all …]
/kernel/linux/linux-5.10/arch/x86/crypto/
Dsha256-avx-asm.S154 ## compute s0 four at a time and s1 two at a time
174 MY_ROR 6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25)
177 add y0, y2 # y2 = S1 + CH
178 add _XFER(%rsp), y2 # y2 = k + w + S1 + CH
180 add y2, h # h = h + S1 + CH + k + w
184 add h, d # d = d + h + S1 + CH + k + w
188 add y1, h # h = h + S1 + CH + k + w + S0
191 add y0, h # h = h + S1 + CH + k + w + S0 + MAJ
207 MY_ROR 6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25)
212 add y0, y2 # y2 = S1 + CH
[all …]
Dsha256-ssse3-asm.S148 ## compute s0 four at a time and s1 two at a time
169 ror $6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25)
173 add y0, y2 # y2 = S1 + CH
174 add _XFER(%rsp) , y2 # y2 = k + w + S1 + CH
177 add y2, h # h = h + S1 + CH + k + w
181 add h, d # d = d + h + S1 + CH + k + w
185 add y1, h # h = h + S1 + CH + k + w + S0
188 add y0, h # h = h + S1 + CH + k + w + S0 + MAJ
207 ror $6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25)
212 add y0, y2 # y2 = S1 + CH
[all …]
Dsha256-avx2-asm.S168 xor y1, y0 # y0 = (e>>25) ^ (e>>11) # S1
170 vpaddd X0, XTMP0, XTMP0 # XTMP0 = W[-7] + W[-16]# y1 = (e >> 6)# S1
171 rorx $6, e, y1 # y1 = (e >> 6) # S1
174 xor y1, y0 # y0 = (e>>25) ^ (e>>11) ^ (e>>6) # S1
189 add y0, y2 # y2 = S1 + CH # --
194 add y2, d # d = k + w + h + d + S1 + CH = d + t1 # --
198 add y2, h # h = k + w + h + S0 + S1 + CH = t1 + S0# --
217 xor y1, y0 # y0 = (e>>25) ^ (e>>11) # S1
221 rorx $6, e, y1 # y1 = (e >> 6) # S1
222 xor y1, y0 # y0 = (e>>25) ^ (e>>11) ^ (e>>6) # S1
[all …]
/kernel/linux/linux-4.19/arch/x86/crypto/
Dsha256-ssse3-asm.S148 ## compute s0 four at a time and s1 two at a time
169 ror $6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25)
173 add y0, y2 # y2 = S1 + CH
174 add _XFER(%rsp) , y2 # y2 = k + w + S1 + CH
177 add y2, h # h = h + S1 + CH + k + w
181 add h, d # d = d + h + S1 + CH + k + w
185 add y1, h # h = h + S1 + CH + k + w + S0
188 add y0, h # h = h + S1 + CH + k + w + S0 + MAJ
207 ror $6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25)
212 add y0, y2 # y2 = S1 + CH
[all …]
Dsha256-avx-asm.S155 ## compute s0 four at a time and s1 two at a time
175 MY_ROR 6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25)
178 add y0, y2 # y2 = S1 + CH
179 add _XFER(%rsp), y2 # y2 = k + w + S1 + CH
181 add y2, h # h = h + S1 + CH + k + w
185 add h, d # d = d + h + S1 + CH + k + w
189 add y1, h # h = h + S1 + CH + k + w + S0
192 add y0, h # h = h + S1 + CH + k + w + S0 + MAJ
208 MY_ROR 6, y0 # y0 = S1 = (e>>6) & (e>>11) ^ (e>>25)
213 add y0, y2 # y2 = S1 + CH
[all …]
Dsha256-avx2-asm.S169 xor y1, y0 # y0 = (e>>25) ^ (e>>11) # S1
171 vpaddd X0, XTMP0, XTMP0 # XTMP0 = W[-7] + W[-16]# y1 = (e >> 6)# S1
172 rorx $6, e, y1 # y1 = (e >> 6) # S1
175 xor y1, y0 # y0 = (e>>25) ^ (e>>11) ^ (e>>6) # S1
190 add y0, y2 # y2 = S1 + CH # --
195 add y2, d # d = k + w + h + d + S1 + CH = d + t1 # --
199 add y2, h # h = k + w + h + S0 + S1 + CH = t1 + S0# --
218 xor y1, y0 # y0 = (e>>25) ^ (e>>11) # S1
222 rorx $6, e, y1 # y1 = (e >> 6) # S1
223 xor y1, y0 # y0 = (e>>25) ^ (e>>11) ^ (e>>6) # S1
[all …]
/kernel/linux/linux-5.10/arch/s390/lib/
Dstring.c214 * @s1: One string
217 * returns 0 if @s1 and @s2 are equal,
218 * < 0 if @s1 is less than @s2
219 * > 0 if @s1 is greater than @s2
222 int strcmp(const char *s1, const char *s2) in strcmp() argument
234 : "+d" (ret), "+d" (r0), "+a" (s1), "+a" (s2) in strcmp()
260 static inline int clcle(const char *s1, unsigned long l1, in clcle() argument
263 register unsigned long r2 asm("2") = (unsigned long) s1; in clcle()
280 * @s1: The string to be searched
284 char *strstr(const char *s1, const char *s2) in strstr() argument
[all …]
/kernel/linux/linux-4.19/arch/s390/lib/
Dstring.c198 * @s1: One string
201 * returns 0 if @s1 and @s2 are equal,
202 * < 0 if @s1 is less than @s2
203 * > 0 if @s1 is greater than @s2
205 int strcmp(const char *s1, const char *s2) in strcmp() argument
217 : "+d" (ret), "+d" (r0), "+a" (s1), "+a" (s2) in strcmp()
241 static inline int clcle(const char *s1, unsigned long l1, in clcle() argument
244 register unsigned long r2 asm("2") = (unsigned long) s1; in clcle()
261 * @s1: The string to be searched
264 char *strstr(const char *s1, const char *s2) in strstr() argument
[all …]
/kernel/linux/linux-4.19/tools/perf/tests/
Dsample-parsing.c15 if (s1->m != s2->m) { \
22 if (memcmp(&s1->m, &s2->m, sizeof(s1->m))) { \
28 static bool samples_same(const struct perf_sample *s1, in samples_same() argument
74 for (i = 0; i < s1->read.group.nr; i++) in samples_same()
83 for (i = 0; i < s1->callchain->nr; i++) in samples_same()
89 if (memcmp(s1->raw_data, s2->raw_data, s1->raw_size)) { in samples_same()
97 for (i = 0; i < s1->branch_stack->nr; i++) in samples_same()
102 size_t sz = hweight_long(s1->user_regs.mask) * sizeof(u64); in samples_same()
106 if (s1->user_regs.abi && in samples_same()
107 (!s1->user_regs.regs || !s2->user_regs.regs || in samples_same()
[all …]
/kernel/liteos_a/lib/libc/src/
Dmemcmp.c37 const unsigned char *s1 = str1; in memcmp() local
42 if (*(const uint64_t *)(s1) != *(const uint64_t *)(s2)) { in memcmp()
45 s1 += 8; /* 8, compare size, the number of chars of one uint64_t data */ in memcmp()
55 if (*(const uint32_t *)(s1) != *(const uint32_t *)(s2)) { in memcmp()
58 s1 += 4; /* 4, compare size, the number of chars of one uint32_t data */ in memcmp()
66 for (; num && (*s1 == *s2); num--, s1++, s2++) { in memcmp()
68 return num ? *s1 - *s2 : 0; in memcmp()
71 if (*(const uint32_t *)(s1) != *(const uint32_t *)(s2)) { in memcmp()
74 s1 += 4; /* 4, compare size, the number of chars of one uint32_t data */ in memcmp()

12345678910>>...41