Home
last modified time | relevance | path

Searched refs:t (Results 1 – 15 of 15) sorted by relevance

/crypto/
Dgf128mul.c262 struct gf128mul_64k *t; in gf128mul_init_64k_bbe() local
265 t = kzalloc(sizeof(*t), GFP_KERNEL); in gf128mul_init_64k_bbe()
266 if (!t) in gf128mul_init_64k_bbe()
270 t->t[i] = kzalloc(sizeof(*t->t[i]), GFP_KERNEL); in gf128mul_init_64k_bbe()
271 if (!t->t[i]) { in gf128mul_init_64k_bbe()
272 gf128mul_free_64k(t); in gf128mul_init_64k_bbe()
273 t = NULL; in gf128mul_init_64k_bbe()
278 t->t[0]->t[1] = *g; in gf128mul_init_64k_bbe()
280 gf128mul_x_bbe(&t->t[0]->t[j + j], &t->t[0]->t[j]); in gf128mul_init_64k_bbe()
285 be128_xor(&t->t[i]->t[j + k], in gf128mul_init_64k_bbe()
[all …]
Dcast5_generic.c307 u32 l, r, t; in __cast5_encrypt() local
329 t = l; l = r; r = t ^ F1(r, Km[0], Kr[0]); in __cast5_encrypt()
330 t = l; l = r; r = t ^ F2(r, Km[1], Kr[1]); in __cast5_encrypt()
331 t = l; l = r; r = t ^ F3(r, Km[2], Kr[2]); in __cast5_encrypt()
332 t = l; l = r; r = t ^ F1(r, Km[3], Kr[3]); in __cast5_encrypt()
333 t = l; l = r; r = t ^ F2(r, Km[4], Kr[4]); in __cast5_encrypt()
334 t = l; l = r; r = t ^ F3(r, Km[5], Kr[5]); in __cast5_encrypt()
335 t = l; l = r; r = t ^ F1(r, Km[6], Kr[6]); in __cast5_encrypt()
336 t = l; l = r; r = t ^ F2(r, Km[7], Kr[7]); in __cast5_encrypt()
337 t = l; l = r; r = t ^ F3(r, Km[8], Kr[8]); in __cast5_encrypt()
[all …]
Daegis.h46 const u32 *t = crypto_ft_tab[0]; in crypto_aegis_aesenc() local
49 d0 = t[s[ 0]] ^ rol32(t[s[ 5]], 8) ^ rol32(t[s[10]], 16) ^ rol32(t[s[15]], 24); in crypto_aegis_aesenc()
50 d1 = t[s[ 4]] ^ rol32(t[s[ 9]], 8) ^ rol32(t[s[14]], 16) ^ rol32(t[s[ 3]], 24); in crypto_aegis_aesenc()
51 d2 = t[s[ 8]] ^ rol32(t[s[13]], 8) ^ rol32(t[s[ 2]], 16) ^ rol32(t[s[ 7]], 24); in crypto_aegis_aesenc()
52 d3 = t[s[12]] ^ rol32(t[s[ 1]], 8) ^ rol32(t[s[ 6]], 16) ^ rol32(t[s[11]], 24); in crypto_aegis_aesenc()
Dsha3_generic.c49 u64 t[5], tt, bc[5]; in keccakf_round() local
58 t[0] = bc[4] ^ rol64(bc[1], 1); in keccakf_round()
59 t[1] = bc[0] ^ rol64(bc[2], 1); in keccakf_round()
60 t[2] = bc[1] ^ rol64(bc[3], 1); in keccakf_round()
61 t[3] = bc[2] ^ rol64(bc[4], 1); in keccakf_round()
62 t[4] = bc[3] ^ rol64(bc[0], 1); in keccakf_round()
64 st[0] ^= t[0]; in keccakf_round()
68 st[ 1] = rol64(st[ 6] ^ t[1], 44); in keccakf_round()
69 st[ 6] = rol64(st[ 9] ^ t[4], 20); in keccakf_round()
70 st[ 9] = rol64(st[22] ^ t[2], 61); in keccakf_round()
[all …]
Dxts.c34 le128 t; member
94 le128 t = rctx->t; in xor_tweak() local
117 rctx->t = t; in xor_tweak()
118 gf128mul_x_ble(&t, &t); in xor_tweak()
120 le128_xor(wdst, &t, wsrc); in xor_tweak()
122 gf128mul_x_ble(&rctx->t, &t); in xor_tweak()
127 le128_xor(wdst++, &t, wsrc++); in xor_tweak()
128 gf128mul_x_ble(&t, &t); in xor_tweak()
156 le128_xor(&b, &rctx->t, &b); in cts_done()
181 le128_xor(b, &rctx->t, b); in cts_final()
[all …]
Dtgr192.c556 u32 t, msb, lsb; in tgr192_final() local
561 t = tctx->nblocks; in tgr192_final()
562 if ((lsb = t << 6) < t) { /* multiply by 64 to make a byte count */ in tgr192_final()
565 msb += t >> 26; in tgr192_final()
566 t = lsb; in tgr192_final()
567 if ((lsb = t + tctx->count) < t) { /* add the count */ in tgr192_final()
570 t = lsb; in tgr192_final()
571 if ((lsb = t << 3) < t) { /* multiply by 8 to make a bit count */ in tgr192_final()
574 msb += t >> 29; in tgr192_final()
Dvmac.c255 u64 t1, t2, m1, m2, t; \
257 rh = rl = t = 0; \
267 t += (u64)(u32)m1 + (u32)m2; \
269 ADD128(rh, rl, (t >> 32), (t << 32)); \
286 u64 p, q, t; in poly_step_func() local
295 t = (u32)(p); in poly_step_func()
301 t |= ((u64)((u32)p & 0x7fffffff)) << 32; in poly_step_func()
321 *(u64 *)(ahi) = p + t; in poly_step_func()
360 u64 rh, rl, t, z = 0; in l3hash() local
363 t = p1 >> 63; in l3hash()
[all …]
Dsm4_generic.c119 u32 rk[4], t; in crypto_sm4_expand_key() local
130 t = rk[0] ^ sm4_key_sub(rk[1] ^ rk[2] ^ rk[3] ^ ck[i]); in crypto_sm4_expand_key()
131 ctx->rkey_enc[i] = t; in crypto_sm4_expand_key()
135 rk[3] = t; in crypto_sm4_expand_key()
174 u32 x[4], i, t; in sm4_do_crypt() local
180 t = sm4_round(x, rk[i]); in sm4_do_crypt()
184 x[3] = t; in sm4_do_crypt()
Dkeywrap.c129 u64 t = 6 * ((req->cryptlen) >> 3); in crypto_kw_decrypt() local
163 block.A ^= cpu_to_be64(t); in crypto_kw_decrypt()
164 t--; in crypto_kw_decrypt()
198 u64 t = 1; in crypto_kw_encrypt() local
240 block.A ^= cpu_to_be64(t); in crypto_kw_encrypt()
241 t++; in crypto_kw_encrypt()
Dlrw.c53 be128 t; member
148 be128 t = rctx->t; in xor_tweak() local
179 be128_xor(wdst++, &t, wsrc++); in xor_tweak()
183 be128_xor(&t, &t, &ctx->mulinc[next_index(counter)]); in xor_tweak()
236 memcpy(&rctx->t, req->iv, sizeof(rctx->t)); in init_crypt()
239 gf128mul_64k_bbe(&rctx->t, ctx->table); in init_crypt()
Dsm3_generic.c49 static inline u32 t(unsigned int n) in t() function
54 static void sm3_expand(u32 *t, u32 *w, u32 *wt) in sm3_expand() argument
61 w[i] = get_unaligned_be32((__u32 *)t + i); in sm3_expand()
92 ss1 = rol32((rol32(a, 12) + e + rol32(t(i), i & 31)), 7); in sm3_compress()
Decc.c520 u64 t[ECC_MAX_DIGITS * 2]; in vli_mmod_special() local
525 vli_umult(t, r + ndigits, c, ndigits); in vli_mmod_special()
527 vli_add(r, r, t, ndigits * 2); in vli_mmod_special()
529 vli_set(t, mod, ndigits); in vli_mmod_special()
530 vli_clear(t + ndigits, ndigits); in vli_mmod_special()
531 while (vli_cmp(r, t, ndigits * 2) >= 0) in vli_mmod_special()
532 vli_sub(r, r, t, ndigits * 2); in vli_mmod_special()
Dfcrypt.c60 u32 t = lo & ((1 << n) - 1); \
62 hi = (hi >> n) | (t << (24-n)); \
DKconfig436 can't handle a sectorsize which is not a multiple of 16 bytes.
676 Unless you are testing these algorithms, you don't need this.
/crypto/asymmetric_keys/
DKconfig83 just the payload. If it isn't, adding the key will fail with an