/arch/x86/crypto/ |
D | glue_helper.c | 41 unsigned int nbytes, i, func_bytes; in __glue_ecb_crypt_128bit() local 47 while ((nbytes = walk->nbytes)) { in __glue_ecb_crypt_128bit() 52 desc, fpu_enabled, nbytes); in __glue_ecb_crypt_128bit() 58 if (nbytes >= func_bytes) { in __glue_ecb_crypt_128bit() 65 nbytes -= func_bytes; in __glue_ecb_crypt_128bit() 66 } while (nbytes >= func_bytes); in __glue_ecb_crypt_128bit() 68 if (nbytes < bsize) in __glue_ecb_crypt_128bit() 74 err = blkcipher_walk_done(desc, walk, nbytes); in __glue_ecb_crypt_128bit() 83 struct scatterlist *src, unsigned int nbytes) in glue_ecb_crypt_128bit() argument 87 blkcipher_walk_init(&walk, dst, src, nbytes); in glue_ecb_crypt_128bit() [all …]
|
D | cast5_avx_glue.c | 49 static inline bool cast5_fpu_begin(bool fpu_enabled, unsigned int nbytes) in cast5_fpu_begin() argument 52 NULL, fpu_enabled, nbytes); in cast5_fpu_begin() 66 unsigned int nbytes; in ecb_crypt() local 75 while ((nbytes = walk->nbytes)) { in ecb_crypt() 79 fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); in ecb_crypt() 82 if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { in ecb_crypt() 88 nbytes -= bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt() 89 } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); in ecb_crypt() 91 if (nbytes < bsize) in ecb_crypt() 103 nbytes -= bsize; in ecb_crypt() [all …]
|
D | blowfish_glue.c | 86 unsigned int nbytes; in ecb_crypt() local 91 while ((nbytes = walk->nbytes)) { in ecb_crypt() 96 if (nbytes >= bsize * 4) { in ecb_crypt() 102 nbytes -= bsize * 4; in ecb_crypt() 103 } while (nbytes >= bsize * 4); in ecb_crypt() 105 if (nbytes < bsize) in ecb_crypt() 115 nbytes -= bsize; in ecb_crypt() 116 } while (nbytes >= bsize); in ecb_crypt() 119 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt() 126 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument [all …]
|
D | des3_ede_glue.c | 90 unsigned int nbytes; in ecb_crypt() local 95 while ((nbytes = walk->nbytes)) { in ecb_crypt() 100 if (nbytes >= bsize * 3) { in ecb_crypt() 107 nbytes -= bsize * 3; in ecb_crypt() 108 } while (nbytes >= bsize * 3); in ecb_crypt() 110 if (nbytes < bsize) in ecb_crypt() 120 nbytes -= bsize; in ecb_crypt() 121 } while (nbytes >= bsize); in ecb_crypt() 124 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt() 131 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument [all …]
|
D | camellia_aesni_avx2_glue.c | 155 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 157 return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes); in ecb_encrypt() 161 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 163 return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes); in ecb_decrypt() 167 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 170 dst, src, nbytes); in cbc_encrypt() 174 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 177 nbytes); in cbc_decrypt() 181 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 183 return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | serpent_avx2_glue.c | 141 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 143 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes); in ecb_encrypt() 147 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 149 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes); in ecb_decrypt() 153 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 156 dst, src, nbytes); in cbc_encrypt() 160 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 163 nbytes); in cbc_decrypt() 167 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 169 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | twofish_avx_glue.c | 176 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 178 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes); in ecb_encrypt() 182 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 184 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes); in ecb_decrypt() 188 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 191 dst, src, nbytes); in cbc_encrypt() 195 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 198 nbytes); in cbc_decrypt() 202 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 204 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | camellia_aesni_avx_glue.c | 159 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 161 return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes); in ecb_encrypt() 165 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 167 return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes); in ecb_decrypt() 171 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 174 dst, src, nbytes); in cbc_encrypt() 178 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 181 nbytes); in cbc_decrypt() 185 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 187 return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | twofish_glue_3way.c | 155 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 157 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes); in ecb_encrypt() 161 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 163 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes); in ecb_decrypt() 167 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 170 dst, src, nbytes); in cbc_encrypt() 174 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 177 nbytes); in cbc_decrypt() 181 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 183 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | salsa20_glue.c | 50 unsigned int nbytes) in encrypt() argument 57 blkcipher_walk_init(&walk, dst, src, nbytes); in encrypt() 62 while (walk.nbytes >= 64) { in encrypt() 65 walk.nbytes - (walk.nbytes % 64)); in encrypt() 66 err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64); in encrypt() 69 if (walk.nbytes) { in encrypt() 71 walk.dst.virt.addr, walk.nbytes); in encrypt()
|
D | serpent_sse2_glue.c | 143 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 145 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes); in ecb_encrypt() 149 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 151 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes); in ecb_decrypt() 155 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 158 dst, src, nbytes); in cbc_encrypt() 162 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 165 nbytes); in cbc_decrypt() 169 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 171 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | serpent_avx_glue.c | 175 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 177 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes); in ecb_encrypt() 181 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 183 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes); in ecb_decrypt() 187 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 190 dst, src, nbytes); in cbc_encrypt() 194 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 197 nbytes); in cbc_decrypt() 201 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 203 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | cast6_avx_glue.c | 162 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 164 return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes); in ecb_encrypt() 168 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 170 return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes); in ecb_decrypt() 174 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 177 dst, src, nbytes); in cbc_encrypt() 181 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 184 nbytes); in cbc_decrypt() 188 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 190 return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
/arch/s390/crypto/ |
D | des_s390.c | 89 unsigned int nbytes; in ecb_desall_crypt() local 91 while ((nbytes = walk->nbytes)) { in ecb_desall_crypt() 93 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); in ecb_desall_crypt() 101 nbytes &= DES_BLOCK_SIZE - 1; in ecb_desall_crypt() 102 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_desall_crypt() 113 unsigned int nbytes = walk->nbytes; in cbc_desall_crypt() local 119 if (!nbytes) in cbc_desall_crypt() 126 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); in cbc_desall_crypt() 134 nbytes &= DES_BLOCK_SIZE - 1; in cbc_desall_crypt() 135 ret = blkcipher_walk_done(desc, walk, nbytes); in cbc_desall_crypt() [all …]
|
D | aes_s390.c | 253 unsigned int nbytes) in fallback_blk_dec() argument 262 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); in fallback_blk_dec() 270 unsigned int nbytes) in fallback_blk_enc() argument 279 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); in fallback_blk_enc() 319 unsigned int nbytes; in ecb_aes_crypt() local 321 while ((nbytes = walk->nbytes)) { in ecb_aes_crypt() 323 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt() 331 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_crypt() 332 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_aes_crypt() 340 unsigned int nbytes) in ecb_aes_encrypt() argument [all …]
|
D | prng.c | 69 static void prng_seed(int nbytes) in prng_seed() argument 74 BUG_ON(nbytes > 16); in prng_seed() 75 get_random_bytes(buf, nbytes); in prng_seed() 78 while (nbytes >= 8) { in prng_seed() 82 nbytes -= 8; in prng_seed() 87 static ssize_t prng_read(struct file *file, char __user *ubuf, size_t nbytes, in prng_read() argument 95 while (nbytes) { in prng_read() 110 chunk = min_t(int, nbytes, prng_chunk_size); in prng_read() 143 nbytes -= chunk; in prng_read()
|
/arch/sparc/crypto/ |
D | des_glue.c | 95 unsigned int nbytes, bool encrypt) in __ecb_crypt() argument 101 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt() 109 while ((nbytes = walk.nbytes)) { in __ecb_crypt() 110 unsigned int block_len = nbytes & DES_BLOCK_MASK; in __ecb_crypt() 117 nbytes &= DES_BLOCK_SIZE - 1; in __ecb_crypt() 118 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt() 126 unsigned int nbytes) in ecb_encrypt() argument 128 return __ecb_crypt(desc, dst, src, nbytes, true); in ecb_encrypt() 133 unsigned int nbytes) in ecb_decrypt() argument 135 return __ecb_crypt(desc, dst, src, nbytes, false); in ecb_decrypt() [all …]
|
D | camellia_glue.c | 87 unsigned int nbytes, bool encrypt) in __ecb_crypt() argument 99 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt() 108 while ((nbytes = walk.nbytes)) { in __ecb_crypt() 109 unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK; in __ecb_crypt() 119 nbytes &= CAMELLIA_BLOCK_SIZE - 1; in __ecb_crypt() 120 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt() 128 unsigned int nbytes) in ecb_encrypt() argument 130 return __ecb_crypt(desc, dst, src, nbytes, true); in ecb_encrypt() 135 unsigned int nbytes) in ecb_decrypt() argument 137 return __ecb_crypt(desc, dst, src, nbytes, false); in ecb_decrypt() [all …]
|
D | aes_glue.c | 217 unsigned int nbytes) in ecb_encrypt() argument 223 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 228 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 229 unsigned int block_len = nbytes & AES_BLOCK_MASK; in ecb_encrypt() 237 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 238 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt() 246 unsigned int nbytes) in ecb_decrypt() argument 253 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 259 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 260 unsigned int block_len = nbytes & AES_BLOCK_MASK; in ecb_decrypt() [all …]
|
/arch/arm/kernel/ |
D | process.c | 307 static void show_data(unsigned long addr, int nbytes, const char *name) in show_data() argument 327 nbytes += (addr & (sizeof(u32) - 1)); in show_data() 328 nlines = (nbytes + 31) / 32; in show_data() 350 static void show_extra_register_data(struct pt_regs *regs, int nbytes) in show_extra_register_data() argument 356 show_data(regs->ARM_pc - nbytes, nbytes * 2, "PC"); in show_extra_register_data() 357 show_data(regs->ARM_lr - nbytes, nbytes * 2, "LR"); in show_extra_register_data() 358 show_data(regs->ARM_sp - nbytes, nbytes * 2, "SP"); in show_extra_register_data() 359 show_data(regs->ARM_ip - nbytes, nbytes * 2, "IP"); in show_extra_register_data() 360 show_data(regs->ARM_fp - nbytes, nbytes * 2, "FP"); in show_extra_register_data() 361 show_data(regs->ARM_r0 - nbytes, nbytes * 2, "R0"); in show_extra_register_data() [all …]
|
/arch/x86/lib/ |
D | insn.c | 108 prefixes->nbytes++; in insn_get_prefixes() 132 insn->rex_prefix.nbytes = 1; in insn_get_prefixes() 160 insn->vex_prefix.nbytes = 3; in insn_get_prefixes() 166 insn->vex_prefix.nbytes = 2; in insn_get_prefixes() 202 opcode->nbytes = 1; in insn_get_opcode() 219 opcode->bytes[opcode->nbytes++] = op; in insn_get_opcode() 252 modrm->nbytes = 1; in insn_get_modrm() 290 return (modrm->nbytes && (modrm->value & 0xc7) == 0x5); in insn_rip_relative() 308 if (insn->modrm.nbytes) { in insn_get_sib() 313 insn->sib.nbytes = 1; in insn_get_sib() [all …]
|
/arch/arm/crypto/ |
D | aesbs-glue.c | 106 struct scatterlist *src, unsigned int nbytes) in aesbs_cbc_encrypt() argument 112 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_encrypt() 115 while (walk.nbytes) { in aesbs_cbc_encrypt() 116 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; in aesbs_cbc_encrypt() 140 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in aesbs_cbc_encrypt() 147 struct scatterlist *src, unsigned int nbytes) in aesbs_cbc_decrypt() argument 153 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_decrypt() 156 while ((walk.nbytes / AES_BLOCK_SIZE) >= 8) { in aesbs_cbc_decrypt() 159 walk.nbytes, &ctx->dec, walk.iv); in aesbs_cbc_decrypt() 161 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in aesbs_cbc_decrypt() [all …]
|
D | aes-ce-glue.c | 172 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 180 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 184 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 188 walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 195 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 203 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 207 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 211 walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 218 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 226 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt() [all …]
|
/arch/arm64/crypto/ |
D | aes-glue.c | 94 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 102 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 106 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_encrypt() 109 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 116 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 124 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 128 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_decrypt() 131 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 138 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 146 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt() [all …]
|
/arch/x86/include/asm/ |
D | insn.h | 33 unsigned char nbytes; member 143 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_m_bits() 151 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_p_bits() 172 return insn->prefixes.nbytes; in insn_offset_rex_prefix() 176 return insn_offset_rex_prefix(insn) + insn->rex_prefix.nbytes; in insn_offset_vex_prefix() 180 return insn_offset_vex_prefix(insn) + insn->vex_prefix.nbytes; in insn_offset_opcode() 184 return insn_offset_opcode(insn) + insn->opcode.nbytes; in insn_offset_modrm() 188 return insn_offset_modrm(insn) + insn->modrm.nbytes; in insn_offset_sib() 192 return insn_offset_sib(insn) + insn->sib.nbytes; in insn_offset_displacement() 196 return insn_offset_displacement(insn) + insn->displacement.nbytes; in insn_offset_immediate()
|