Home
last modified time | relevance | path

Searched refs:w (Results 1 – 9 of 9) sorted by relevance

/crypto/
Daegis128-neon-inner.c52 uint8x16_t aegis_aes_round(uint8x16_t w) in aegis_aes_round() argument
69 w = vqtbl1q_u8(w, vld1q_u8(shift_rows)); in aegis_aes_round()
73 v = vqtbl4q_u8(vld1q_u8_x4(crypto_aes_sbox), w); in aegis_aes_round()
74 v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0x40), w - 0x40); in aegis_aes_round()
75 v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0x80), w - 0x80); in aegis_aes_round()
76 v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0xc0), w - 0xc0); in aegis_aes_round()
78 asm("tbl %0.16b, {v16.16b-v19.16b}, %1.16b" : "=w"(v) : "w"(w)); in aegis_aes_round()
79 w -= 0x40; in aegis_aes_round()
80 asm("tbx %0.16b, {v20.16b-v23.16b}, %1.16b" : "+w"(v) : "w"(w)); in aegis_aes_round()
81 w -= 0x40; in aegis_aes_round()
[all …]
Dsm3_generic.c54 static void sm3_expand(u32 *t, u32 *w, u32 *wt) in sm3_expand() argument
61 w[i] = get_unaligned_be32((__u32 *)t + i); in sm3_expand()
64 tmp = w[i - 16] ^ w[i - 9] ^ rol32(w[i - 3], 15); in sm3_expand()
65 w[i] = p1(tmp) ^ (rol32(w[i - 13], 7)) ^ w[i - 6]; in sm3_expand()
69 wt[i] = w[i] ^ w[i + 4]; in sm3_expand()
72 static void sm3_compress(u32 *w, u32 *wt, u32 *m) in sm3_compress() argument
99 tt2 = gg(i, e, f, g) + h + ss1 + *w; in sm3_compress()
100 w++; in sm3_compress()
126 unsigned int w[68]; in sm3_transform() local
129 sm3_expand((u32 *)src, w, wt); in sm3_transform()
[all …]
Dlrw.c149 struct skcipher_walk w; in xor_tweak() local
160 err = skcipher_walk_virt(&w, req, false); in xor_tweak()
164 iv = (__be32 *)w.iv; in xor_tweak()
170 while (w.nbytes) { in xor_tweak()
171 unsigned int avail = w.nbytes; in xor_tweak()
175 wsrc = w.src.virt.addr; in xor_tweak()
176 wdst = w.dst.virt.addr; in xor_tweak()
186 if (second_pass && w.nbytes == w.total) { in xor_tweak()
193 err = skcipher_walk_done(&w, avail); in xor_tweak()
Dxts.c93 struct skcipher_walk w; in xor_tweak() local
102 err = skcipher_walk_virt(&w, req, false); in xor_tweak()
104 while (w.nbytes) { in xor_tweak()
105 unsigned int avail = w.nbytes; in xor_tweak()
109 wsrc = w.src.virt.addr; in xor_tweak()
110 wdst = w.dst.virt.addr; in xor_tweak()
114 w.total - w.nbytes + avail < 2 * XTS_BLOCK_SIZE) { in xor_tweak()
123 skcipher_walk_done(&w, avail - bs); in xor_tweak()
131 err = skcipher_walk_done(&w, avail); in xor_tweak()
Dtwofish_common.c468 #define CALC_S(a, b, c, d, i, w, x, y, z) \ argument
471 (a) ^= exp_to_poly[tmp + (w)]; \
652 CALC_K256 (w, i, q0[i], q1[i], q0[i+1], q1[i+1]); in __twofish_setkey()
665 CALC_K192 (w, i, q0[i], q1[i], q0[i+1], q1[i+1]); in __twofish_setkey()
678 CALC_K (w, i, q0[i], q1[i], q0[i+1], q1[i+1]); in __twofish_setkey()
Dmd5.c40 #define MD5STEP(f, w, x, y, z, in, s) \ argument
41 (w += f(x, y, z) + in, w = (w<<s | w>>(32-s)) + x)
Dtwofish_generic.c86 x = le32_to_cpu(src[n]) ^ ctx->w[m]
89 x ^= ctx->w[m]; \
Decc.c1426 u64 yy[ECC_MAX_DIGITS], xxx[ECC_MAX_DIGITS], w[ECC_MAX_DIGITS]; in ecc_is_pubkey_valid_partial() local
1445 vli_mod_mult_fast(w, curve->a, pk->x, curve->p, pk->ndigits); /* a·x */ in ecc_is_pubkey_valid_partial()
1446 vli_mod_add(w, w, curve->b, curve->p, pk->ndigits); /* a·x + b */ in ecc_is_pubkey_valid_partial()
1447 vli_mod_add(w, w, xxx, curve->p, pk->ndigits); /* x^3 + a·x + b */ in ecc_is_pubkey_valid_partial()
1448 if (vli_cmp(yy, w, pk->ndigits) != 0) /* Equation */ in ecc_is_pubkey_valid_partial()
DKconfig516 combined with ESSIV the only feasible mode for h/w accelerated