Home
last modified time | relevance | path

Searched refs:q (Results 1 – 25 of 30) sorted by relevance

12

/lib/raid6/
Davx2.c45 u8 *p, *q; in raid6_avx21_gen_syndrome() local
50 q = dptr[z0+2]; /* RS syndrome */ in raid6_avx21_gen_syndrome()
82 asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d])); in raid6_avx21_gen_syndrome()
94 u8 *p, *q; in raid6_avx21_xor_syndrome() local
99 q = dptr[disks-1]; /* RS syndrome */ in raid6_avx21_xor_syndrome()
128 asm volatile("vpxor %0,%%ymm4,%%ymm4" : : "m" (q[d])); in raid6_avx21_xor_syndrome()
130 asm volatile("vmovdqa %%ymm4,%0" : "=m" (q[d])); in raid6_avx21_xor_syndrome()
152 u8 *p, *q; in raid6_avx22_gen_syndrome() local
157 q = dptr[z0+2]; /* RS syndrome */ in raid6_avx22_gen_syndrome()
192 asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d])); in raid6_avx22_gen_syndrome()
[all …]
Dsse2.c44 u8 *p, *q; in raid6_sse21_gen_syndrome() local
49 q = dptr[z0+2]; /* RS syndrome */ in raid6_sse21_gen_syndrome()
83 asm volatile("movntdq %%xmm4,%0" : "=m" (q[d])); in raid6_sse21_gen_syndrome()
96 u8 *p, *q; in raid6_sse21_xor_syndrome() local
101 q = dptr[disks-1]; /* RS syndrome */ in raid6_sse21_xor_syndrome()
130 asm volatile("pxor %0,%%xmm4" : : "m" (q[d])); in raid6_sse21_xor_syndrome()
132 asm volatile("movdqa %%xmm4,%0" : "=m" (q[d])); in raid6_sse21_xor_syndrome()
154 u8 *p, *q; in raid6_sse22_gen_syndrome() local
159 q = dptr[z0+2]; /* RS syndrome */ in raid6_sse22_gen_syndrome()
195 asm volatile("movntdq %%xmm4,%0" : "=m" (q[d])); in raid6_sse22_gen_syndrome()
[all …]
Davx512.c52 u8 *p, *q; in raid6_avx5121_gen_syndrome() local
57 q = dptr[z0+2]; /* RS syndrome */ in raid6_avx5121_gen_syndrome()
99 : "m" (p[d]), "m" (q[d])); in raid6_avx5121_gen_syndrome()
110 u8 *p, *q; in raid6_avx5121_xor_syndrome() local
115 q = dptr[disks-1]; /* RS syndrome */ in raid6_avx5121_xor_syndrome()
158 : "m" (q[d]), "m" (p[d])); in raid6_avx5121_xor_syndrome()
179 u8 *p, *q; in raid6_avx5122_gen_syndrome() local
184 q = dptr[z0+2]; /* RS syndrome */ in raid6_avx5122_gen_syndrome()
230 : "m" (p[d]), "m" (p[d+64]), "m" (q[d]), in raid6_avx5122_gen_syndrome()
231 "m" (q[d+64])); in raid6_avx5122_gen_syndrome()
[all …]
Drecov_avx2.c25 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx2() local
31 q = (u8 *)ptrs[disks-1]; in raid6_2data_recov_avx2()
49 ptrs[disks-1] = q; in raid6_2data_recov_avx2()
63 asm volatile("vmovdqa %0, %%ymm1" : : "m" (q[0])); in raid6_2data_recov_avx2()
64 asm volatile("vmovdqa %0, %%ymm9" : : "m" (q[32])); in raid6_2data_recov_avx2()
136 q += 64; in raid6_2data_recov_avx2()
140 asm volatile("vmovdqa %0, %%ymm1" : : "m" (*q)); in raid6_2data_recov_avx2()
183 q += 32; in raid6_2data_recov_avx2()
195 u8 *p, *q, *dq; in raid6_datap_recov_avx2() local
200 q = (u8 *)ptrs[disks-1]; in raid6_datap_recov_avx2()
[all …]
Drecov_avx512.c32 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx512() local
38 q = (u8 *)ptrs[disks-1]; in raid6_2data_recov_avx512()
59 ptrs[disks-1] = q; in raid6_2data_recov_avx512()
82 : "m" (q[0]), "m" (q[64]), "m" (p[0]), in raid6_2data_recov_avx512()
162 q += 128; in raid6_2data_recov_avx512()
171 : "m" (*q), "m" (*p), "m"(*dq), "m" (*dp)); in raid6_2data_recov_avx512()
223 q += 64; in raid6_2data_recov_avx512()
235 u8 *p, *q, *dq; in raid6_datap_recov_avx512() local
240 q = (u8 *)ptrs[disks-1]; in raid6_datap_recov_avx512()
255 ptrs[disks-1] = q; in raid6_datap_recov_avx512()
[all …]
Drecov_ssse3.c25 u8 *p, *q, *dp, *dq; in raid6_2data_recov_ssse3() local
33 q = (u8 *)ptrs[disks-1]; in raid6_2data_recov_ssse3()
51 ptrs[disks-1] = q; in raid6_2data_recov_ssse3()
73 asm volatile("movdqa %0,%%xmm1" : : "m" (q[0])); in raid6_2data_recov_ssse3()
74 asm volatile("movdqa %0,%%xmm9" : : "m" (q[16])); in raid6_2data_recov_ssse3()
140 q += 32; in raid6_2data_recov_ssse3()
144 asm volatile("movdqa %0,%%xmm1" : : "m" (*q)); in raid6_2data_recov_ssse3()
187 q += 16; in raid6_2data_recov_ssse3()
200 u8 *p, *q, *dq; in raid6_datap_recov_ssse3() local
207 q = (u8 *)ptrs[disks-1]; in raid6_datap_recov_ssse3()
[all …]
Drecov_neon.c26 void __raid6_2data_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dp,
30 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq,
36 u8 *p, *q, *dp, *dq; in raid6_2data_recov_neon() local
41 q = (u8 *)ptrs[disks - 1]; in raid6_2data_recov_neon()
61 ptrs[disks - 1] = q; in raid6_2data_recov_neon()
69 __raid6_2data_recov_neon(bytes, p, q, dp, dq, pbmul, qmul); in raid6_2data_recov_neon()
76 u8 *p, *q, *dq; in raid6_datap_recov_neon() local
80 q = (u8 *)ptrs[disks - 1]; in raid6_datap_recov_neon()
94 ptrs[disks - 1] = q; in raid6_datap_recov_neon()
100 __raid6_datap_recov_neon(bytes, p, q, dq, qmul); in raid6_datap_recov_neon()
Drecov.c28 u8 *p, *q, *dp, *dq; in raid6_2data_recov_intx1() local
34 q = (u8 *)ptrs[disks-1]; in raid6_2data_recov_intx1()
52 ptrs[disks-1] = q; in raid6_2data_recov_intx1()
61 qx = qmul[*q ^ *dq]; in raid6_2data_recov_intx1()
64 p++; q++; in raid6_2data_recov_intx1()
72 u8 *p, *q, *dq; in raid6_datap_recov_intx1() local
76 q = (u8 *)ptrs[disks-1]; in raid6_datap_recov_intx1()
88 ptrs[disks-1] = q; in raid6_datap_recov_intx1()
95 *p++ ^= *dq = qmul[*q ^ *dq]; in raid6_datap_recov_intx1()
96 q++; dq++; in raid6_datap_recov_intx1()
Drecov_s390xc.c26 u8 *p, *q, *dp, *dq; in raid6_2data_recov_s390xc() local
32 q = (u8 *)ptrs[disks-1]; in raid6_2data_recov_s390xc()
50 ptrs[disks-1] = q; in raid6_2data_recov_s390xc()
59 xor_block(dq, q); in raid6_2data_recov_s390xc()
64 q += 256; in raid6_2data_recov_s390xc()
75 u8 *p, *q, *dq; in raid6_datap_recov_s390xc() local
80 q = (u8 *)ptrs[disks-1]; in raid6_datap_recov_s390xc()
92 ptrs[disks-1] = q; in raid6_datap_recov_s390xc()
99 xor_block(dq, q); in raid6_datap_recov_s390xc()
104 q += 256; in raid6_datap_recov_s390xc()
Dmmx.c43 u8 *p, *q; in raid6_mmx1_gen_syndrome() local
48 q = dptr[z0+2]; /* RS syndrome */ in raid6_mmx1_gen_syndrome()
70 asm volatile("movq %%mm4,%0" : "=m" (q[d])); in raid6_mmx1_gen_syndrome()
91 u8 *p, *q; in raid6_mmx2_gen_syndrome() local
96 q = dptr[z0+2]; /* RS syndrome */ in raid6_mmx2_gen_syndrome()
129 asm volatile("movq %%mm4,%0" : "=m" (q[d])); in raid6_mmx2_gen_syndrome()
130 asm volatile("movq %%mm6,%0" : "=m" (q[d+8])); in raid6_mmx2_gen_syndrome()
Dsse1.c48 u8 *p, *q; in raid6_sse11_gen_syndrome() local
53 q = dptr[z0+2]; /* RS syndrome */ in raid6_sse11_gen_syndrome()
86 asm volatile("movntq %%mm4,%0" : "=m" (q[d])); in raid6_sse11_gen_syndrome()
107 u8 *p, *q; in raid6_sse12_gen_syndrome() local
112 q = dptr[z0+2]; /* RS syndrome */ in raid6_sse12_gen_syndrome()
148 asm volatile("movntq %%mm4,%0" : "=m" (q[d])); in raid6_sse12_gen_syndrome()
149 asm volatile("movntq %%mm6,%0" : "=m" (q[d+8])); in raid6_sse12_gen_syndrome()
Drecov_neon_inner.c36 void __raid6_2data_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dp, in __raid6_2data_recov_neon() argument
61 vx = veorq_u8(vld1q_u8(q), vld1q_u8(dq)); in __raid6_2data_recov_neon()
79 q += 16; in __raid6_2data_recov_neon()
85 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq, in __raid6_datap_recov_neon() argument
101 vx = veorq_u8(vld1q_u8(q), vld1q_u8(dq)); in __raid6_datap_recov_neon()
114 q += 16; in __raid6_datap_recov_neon()
Ds390vx.uc84 u8 **dptr, *p, *q;
93 q = dptr[z0 + 2]; /* RS syndrome */
108 STORE_DATA(8,$#,&q[d]);
117 u8 **dptr, *p, *q;
123 q = dptr[disks - 1]; /* RS syndrome */
151 LOAD_DATA(16,$#,&q[d]);
153 STORE_DATA(16,$#,&q[d]);
Dneon.uc60 uint8_t *p, *q;
68 q = dptr[z0+2]; /* RS syndrome */
83 vst1q_u8(&q[d+NSIZE*$$], wq$$);
91 uint8_t *p, *q;
99 q = dptr[disks-1]; /* RS syndrome */
147 w1$$ = vld1q_u8(&q[d+NSIZE*$$]);
151 vst1q_u8(&q[d+NSIZE*$$], wq$$);
Dint.uc85 u8 *p, *q;
92 q = dptr[z0+2]; /* RS syndrome */
106 *(unative_t *)&q[d+NSIZE*$$] = wq$$;
114 u8 *p, *q;
121 q = dptr[disks-1]; /* RS syndrome */
143 *(unative_t *)&q[d+NSIZE*$$] ^= wq$$;
Dtilegx.uc53 u64 *p, *q;
62 q = (u64 *)dptr[z0+2]; /* RS syndrome */
77 *q++ = wq$$;
/lib/
Dts_kmp.c49 unsigned int i, q = 0, text_len, consumed = state->offset; in kmp_find() local
60 while (q > 0 && kmp->pattern[q] in kmp_find()
62 q = kmp->prefix_tbl[q - 1]; in kmp_find()
63 if (kmp->pattern[q] in kmp_find()
65 q++; in kmp_find()
66 if (unlikely(q == kmp->pattern_len)) { in kmp_find()
81 unsigned int k, q; in compute_prefix_tbl() local
84 for (k = 0, q = 1; q < len; q++) { in compute_prefix_tbl()
86 != (icase ? toupper(pattern[q]) : pattern[q])) in compute_prefix_tbl()
89 == (icase ? toupper(pattern[q]) : pattern[q])) in compute_prefix_tbl()
[all …]
Dcrc32.c61 # define DO_CRC4 (t3[(q) & 255] ^ t2[(q >> 8) & 255] ^ \
62 t1[(q >> 16) & 255] ^ t0[(q >> 24) & 255])
63 # define DO_CRC8 (t7[(q) & 255] ^ t6[(q >> 8) & 255] ^ \
64 t5[(q >> 16) & 255] ^ t4[(q >> 24) & 255])
67 # define DO_CRC4 (t0[(q) & 255] ^ t1[(q >> 8) & 255] ^ \
68 t2[(q >> 16) & 255] ^ t3[(q >> 24) & 255])
69 # define DO_CRC8 (t4[(q) & 255] ^ t5[(q >> 8) & 255] ^ \
70 t6[(q >> 16) & 255] ^ t7[(q >> 24) & 255])
81 u32 q; local
105 q = crc ^ *++b; /* use pre increment for speed */
[all …]
Dstring_helpers.c134 char *p = *dst, *q = *src; in unescape_space() local
136 switch (*q) { in unescape_space()
162 char *p = *dst, *q = *src; in unescape_octal() local
165 if (isodigit(*q) == 0) in unescape_octal()
168 num = (*q++) & 7; in unescape_octal()
169 while (num < 32 && isodigit(*q) && (q - *src < 3)) { in unescape_octal()
171 num += (*q++) & 7; in unescape_octal()
175 *src = q; in unescape_octal()
181 char *p = *dst, *q = *src; in unescape_hex() local
185 if (*q++ != 'x') in unescape_hex()
[all …]
Dcordic.c64 coord.q = 0; in cordic_calc_iq()
82 valtmp = coord.i - (coord.q >> iter); in cordic_calc_iq()
83 coord.q += (coord.i >> iter); in cordic_calc_iq()
86 valtmp = coord.i + (coord.q >> iter); in cordic_calc_iq()
87 coord.q -= (coord.i >> iter); in cordic_calc_iq()
94 coord.q *= signx; in cordic_calc_iq()
Dvsprintf.c185 unsigned q; in put_dec_trunc8() local
192 q = (r * (u64)0x28f5c29) >> 32; in put_dec_trunc8()
193 *((u16 *)buf) = decpair[r - 100*q]; in put_dec_trunc8()
197 if (q < 100) in put_dec_trunc8()
201 r = (q * (u64)0x28f5c29) >> 32; in put_dec_trunc8()
202 *((u16 *)buf) = decpair[q - 100*r]; in put_dec_trunc8()
210 q = (r * 0x147b) >> 19; in put_dec_trunc8()
211 *((u16 *)buf) = decpair[r - 100*q]; in put_dec_trunc8()
215 r = q; in put_dec_trunc8()
227 unsigned q; in put_dec_full8() local
[all …]
Dinflate.c346 register struct huft *q; /* points to current table */ in huft_build() local
449 q = (struct huft *)NULL; /* ditto */ in huft_build()
488 if ((q = (struct huft *)malloc((z + 1)*sizeof(struct huft))) == in huft_build()
498 *t = q + 1; /* link to list for huft_free() */ in huft_build()
499 *(t = &(q->v.t)) = (struct huft *)NULL; in huft_build()
500 u[h] = ++q; /* table starts after link */ in huft_build()
509 r.v.t = q; /* pointer to this table */ in huft_build()
537 q[j] = r; in huft_build()
574 register struct huft *p, *q; in huft_free() local
581 q = (--p)->v.t; in huft_free()
[all …]
Dtest_hexdump.c81 const char *q = *result++; in test_hexdump_prepare_test() local
82 size_t amount = strlen(q); in test_hexdump_prepare_test()
84 memcpy(p, q, amount); in test_hexdump_prepare_test()
/lib/mpi/
Dmpih-div.c117 mpi_limb_t q; in mpihelp_divrem() local
129 q = ~(mpi_limb_t) 0; in mpihelp_divrem()
135 qp[i] = q; in mpihelp_divrem()
141 udiv_qrnnd(q, r, n1, n0, d1); in mpihelp_divrem()
142 umul_ppmm(n1, n0, d0, q); in mpihelp_divrem()
149 q--; in mpihelp_divrem()
156 qp[i] = q; in mpihelp_divrem()
184 mpi_limb_t q; in mpihelp_divrem() local
200 q = ~(mpi_limb_t) 0; in mpihelp_divrem()
204 udiv_qrnnd(q, r, n0, np[dsize - 1], dX); in mpihelp_divrem()
[all …]
Dlonglong.h144 #define udiv_qrnnd(q, r, n1, n0, d) \ argument
146 : "=r" ((USItype)(q)), \
162 #define udiv_qrnnd(q, r, n1, n0, d) \ argument
164 (q) = __udiv_qrnnd(&__r, (n1), (n0), (d)); \
287 #define udiv_qrnnd(q, r, nh, nl, d) \ argument
289 : "=g" ((USItype)(q)), \
339 #define udiv_qrnnd(q, r, n1, n0, d) \
341 (q) = __udiv_qrnnd(&__r, (n1), (n0), (d)); \
379 #define sdiv_qrnnd(q, r, n1, n0, d) \ argument
388 (q) = __xx.__i.__l; (r) = __xx.__i.__h; \
[all …]

12