Home
last modified time | relevance | path

Searched full:xts (Results 1 – 25 of 171) sorted by relevance

1234567

/kernel/linux/linux-5.10/drivers/crypto/ccp/
Dccp-crypto-aes-xts.c3 * AMD Cryptographic Coprocessor (CCP) AES XTS crypto API support
16 #include <crypto/xts.h>
29 .name = "xts(aes)",
30 .drv_name = "xts-aes-ccp",
170 rctx->cmd.u.xts.type = CCP_AES_TYPE_128; in ccp_aes_xts_crypt()
171 rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT in ccp_aes_xts_crypt()
173 rctx->cmd.u.xts.unit_size = unit_size; in ccp_aes_xts_crypt()
174 rctx->cmd.u.xts.key = &ctx->u.aes.key_sg; in ccp_aes_xts_crypt()
175 rctx->cmd.u.xts.key_len = ctx->u.aes.key_len; in ccp_aes_xts_crypt()
176 rctx->cmd.u.xts.iv = &rctx->iv_sg; in ccp_aes_xts_crypt()
[all …]
Dccp-ops.c1092 struct ccp_xts_aes_engine *xts = &cmd->u.xts; in ccp_run_xts_aes_cmd() local
1102 switch (xts->unit_size) { in ccp_run_xts_aes_cmd()
1123 if (xts->key_len == AES_KEYSIZE_128) in ccp_run_xts_aes_cmd()
1125 else if (xts->key_len == AES_KEYSIZE_256) in ccp_run_xts_aes_cmd()
1130 if (!xts->final && (xts->src_len & (AES_BLOCK_SIZE - 1))) in ccp_run_xts_aes_cmd()
1133 if (xts->iv_len != AES_BLOCK_SIZE) in ccp_run_xts_aes_cmd()
1136 if (!xts->key || !xts->iv || !xts->src || !xts->dst) in ccp_run_xts_aes_cmd()
1149 op.u.xts.type = aestype; in ccp_run_xts_aes_cmd()
1150 op.u.xts.action = xts->action; in ccp_run_xts_aes_cmd()
1151 op.u.xts.unit_size = xts->unit_size; in ccp_run_xts_aes_cmd()
[all …]
/kernel/linux/linux-4.19/drivers/crypto/ccp/
Dccp-crypto-aes-xts.c2 * AMD Cryptographic Coprocessor (CCP) AES XTS crypto API support
19 #include <crypto/xts.h>
32 .name = "xts(aes)",
33 .drv_name = "xts-aes-ccp",
176 rctx->cmd.u.xts.type = CCP_AES_TYPE_128; in ccp_aes_xts_crypt()
177 rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT in ccp_aes_xts_crypt()
179 rctx->cmd.u.xts.unit_size = unit_size; in ccp_aes_xts_crypt()
180 rctx->cmd.u.xts.key = &ctx->u.aes.key_sg; in ccp_aes_xts_crypt()
181 rctx->cmd.u.xts.key_len = ctx->u.aes.key_len; in ccp_aes_xts_crypt()
182 rctx->cmd.u.xts.iv = &rctx->iv_sg; in ccp_aes_xts_crypt()
[all …]
Dccp-ops.c1098 struct ccp_xts_aes_engine *xts = &cmd->u.xts; in ccp_run_xts_aes_cmd() local
1108 switch (xts->unit_size) { in ccp_run_xts_aes_cmd()
1129 if (xts->key_len == AES_KEYSIZE_128) in ccp_run_xts_aes_cmd()
1131 else if (xts->key_len == AES_KEYSIZE_256) in ccp_run_xts_aes_cmd()
1136 if (!xts->final && (xts->src_len & (AES_BLOCK_SIZE - 1))) in ccp_run_xts_aes_cmd()
1139 if (xts->iv_len != AES_BLOCK_SIZE) in ccp_run_xts_aes_cmd()
1142 if (!xts->key || !xts->iv || !xts->src || !xts->dst) in ccp_run_xts_aes_cmd()
1155 op.u.xts.type = aestype; in ccp_run_xts_aes_cmd()
1156 op.u.xts.action = xts->action; in ccp_run_xts_aes_cmd()
1157 op.u.xts.unit_size = xts->unit_size; in ccp_run_xts_aes_cmd()
[all …]
/kernel/linux/linux-5.10/arch/x86/crypto/
Dserpent_avx2_glue.c15 #include <crypto/xts.h>
77 .fn_u = { .xts = serpent_xts_enc_16way }
80 .fn_u = { .xts = serpent_xts_enc_8way_avx }
83 .fn_u = { .xts = serpent_xts_enc }
125 .fn_u = { .xts = serpent_xts_dec_16way }
128 .fn_u = { .xts = serpent_xts_dec_8way_avx }
131 .fn_u = { .xts = serpent_xts_dec }
Dcast6_avx_glue.c18 #include <crypto/xts.h>
96 .fn_u = { .xts = cast6_xts_enc_8way }
99 .fn_u = { .xts = cast6_xts_enc }
135 .fn_u = { .xts = cast6_xts_dec_8way }
138 .fn_u = { .xts = cast6_xts_dec }
182 /* first half of xts-key is for crypt */ in xts_cast6_setkey()
187 /* second half of xts-key is for tweak */ in xts_cast6_setkey()
Dcamellia_aesni_avx2_glue.c12 #include <crypto/xts.h>
78 .fn_u = { .xts = camellia_xts_enc_32way }
81 .fn_u = { .xts = camellia_xts_enc_16way }
84 .fn_u = { .xts = camellia_xts_enc }
132 .fn_u = { .xts = camellia_xts_dec_32way }
135 .fn_u = { .xts = camellia_xts_dec_16way }
138 .fn_u = { .xts = camellia_xts_dec }
Dtwofish_avx_glue.c18 #include <crypto/xts.h>
73 /* first half of xts-key is for crypt */ in xts_twofish_setkey()
78 /* second half of xts-key is for tweak */ in xts_twofish_setkey()
120 .fn_u = { .xts = twofish_xts_enc_8way }
123 .fn_u = { .xts = twofish_xts_enc }
165 .fn_u = { .xts = twofish_xts_dec_8way }
168 .fn_u = { .xts = twofish_xts_dec }
Dcamellia_aesni_avx_glue.c12 #include <crypto/xts.h>
92 .fn_u = { .xts = camellia_xts_enc_16way }
95 .fn_u = { .xts = camellia_xts_enc }
137 .fn_u = { .xts = camellia_xts_dec_16way }
140 .fn_u = { .xts = camellia_xts_dec }
185 /* first half of xts-key is for crypt */ in xts_camellia_setkey()
190 /* second half of xts-key is for tweak */ in xts_camellia_setkey()
Dserpent_avx_glue.c18 #include <crypto/xts.h>
89 /* first half of xts-key is for crypt */ in xts_serpent_setkey()
94 /* second half of xts-key is for tweak */ in xts_serpent_setkey()
131 .fn_u = { .xts = serpent_xts_enc_8way_avx }
134 .fn_u = { .xts = serpent_xts_enc }
170 .fn_u = { .xts = serpent_xts_dec_8way_avx }
173 .fn_u = { .xts = serpent_xts_dec }
/kernel/linux/linux-5.10/drivers/crypto/vmx/
Daes_xts.c3 * AES XTS routines supporting VMX In-core instructions on Power 8
15 #include <crypto/xts.h>
31 fallback = crypto_alloc_skcipher("xts(aes)", 0, in p8_aes_xts_init()
35 pr_err("Failed to allocate xts(aes) fallback: %ld\n", in p8_aes_xts_init()
147 .base.cra_name = "xts(aes)",
/kernel/linux/linux-4.19/arch/x86/crypto/
Dserpent_avx2_glue.c20 #include <crypto/xts.h>
84 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_16way) }
87 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
90 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
132 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_16way) }
135 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
138 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
Dcamellia_aesni_avx2_glue.c17 #include <crypto/xts.h>
86 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way) }
89 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
92 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
140 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way) }
143 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
146 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
Dcamellia_aesni_avx_glue.c17 #include <crypto/xts.h>
102 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
105 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
147 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
150 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
198 /* first half of xts-key is for crypt */ in xts_camellia_setkey()
203 /* second half of xts-key is for tweak */ in xts_camellia_setkey()
Dtwofish_avx_glue.c33 #include <crypto/xts.h>
95 /* first half of xts-key is for crypt */ in xts_twofish_setkey()
100 /* second half of xts-key is for tweak */ in xts_twofish_setkey()
143 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_enc_8way) }
146 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_enc) }
188 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_dec_8way) }
191 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_dec) }
Dserpent_avx_glue.c33 #include <crypto/xts.h>
104 /* first half of xts-key is for crypt */ in xts_serpent_setkey()
109 /* second half of xts-key is for tweak */ in xts_serpent_setkey()
146 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
149 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
185 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
188 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
Dcast6_avx_glue.c33 #include <crypto/xts.h>
114 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way) }
117 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc) }
153 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way) }
156 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec) }
202 /* first half of xts-key is for crypt */ in xts_cast6_setkey()
207 /* second half of xts-key is for tweak */ in xts_cast6_setkey()
/kernel/linux/linux-4.19/arch/arm/crypto/
DKconfig80 CTR and XTS modes
83 and for XTS mode encryption, CBC and XTS mode decryption speedup is
94 Use an implementation of AES in CBC, CTR and XTS modes that uses
/kernel/linux/linux-5.10/arch/powerpc/crypto/
Daes-spe-regs.h14 #define rIP r8 /* potiner to IV (CBC/CTR/XTS modes) */
15 #define rKT r9 /* pointer to tweak key (XTS mode) */
34 #define rG0 r28 /* endian reversed tweak (XTS mode) */
/kernel/linux/linux-4.19/arch/powerpc/crypto/
Daes-spe-regs.h19 #define rIP r8 /* potiner to IV (CBC/CTR/XTS modes) */
20 #define rKT r9 /* pointer to tweak key (XTS mode) */
39 #define rG0 r28 /* endian reversed tweak (XTS mode) */
/kernel/linux/linux-5.10/arch/arm/crypto/
DKconfig89 CTR and XTS modes
92 and for XTS mode encryption, CBC and XTS mode decryption speedup is
104 Use an implementation of AES in CBC, CTR and XTS modes that uses
/kernel/linux/linux-5.10/Documentation/crypto/
Dapi-samples.rst7 This code encrypts some data with AES-256-XTS. For sake of example,
21 u8 iv[16]; /* AES-256-XTS takes a 16-byte IV */
22 u8 key[64]; /* AES-256-XTS takes a 64-byte key */
33 tfm = crypto_alloc_skcipher("xts(aes)", 0, 0);
35 pr_err("Error allocating xts(aes) handle: %ld\n", PTR_ERR(tfm));
/kernel/linux/linux-4.19/arch/arm64/crypto/
DKconfig93 tristate "AES in ECB/CBC/CTR/XTS modes using ARMv8 Crypto Extensions"
101 tristate "AES in ECB/CBC/CTR/XTS modes using NEON instructions"
115 tristate "AES in ECB/CBC/CTR/XTS modes using bit-sliced NEON algorithm"
/kernel/linux/linux-5.10/arch/arm64/crypto/
DKconfig87 tristate "AES in ECB/CBC/CTR/XTS modes using ARMv8 Crypto Extensions"
95 tristate "AES in ECB/CBC/CTR/XTS modes using NEON instructions"
121 tristate "AES in ECB/CBC/CTR/XTS modes using bit-sliced NEON algorithm"
/kernel/linux/linux-5.10/crypto/
Dxts.c2 /* XTS: as defined in IEEE1619/D16
19 #include <crypto/xts.h>
386 err = crypto_inst_setname(skcipher_crypto_instance(inst), "xts", in xts_create()
410 "xts(%s)", ctx->name) >= CRYPTO_MAX_ALG_NAME) { in xts_create()
446 .name = "xts",
465 MODULE_DESCRIPTION("XTS block cipher mode");
466 MODULE_ALIAS_CRYPTO("xts");

1234567