/arch/x86/crypto/ |
D | blowfish_glue.c | 86 unsigned int nbytes; in ecb_crypt() local 91 while ((nbytes = walk->nbytes)) { in ecb_crypt() 96 if (nbytes >= bsize * 4) { in ecb_crypt() 102 nbytes -= bsize * 4; in ecb_crypt() 103 } while (nbytes >= bsize * 4); in ecb_crypt() 105 if (nbytes < bsize) in ecb_crypt() 115 nbytes -= bsize; in ecb_crypt() 116 } while (nbytes >= bsize); in ecb_crypt() 119 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt() 126 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument [all …]
|
D | twofish_glue_3way.c | 69 unsigned int nbytes; in ecb_crypt() local 74 while ((nbytes = walk->nbytes)) { in ecb_crypt() 79 if (nbytes >= bsize * 3) { in ecb_crypt() 85 nbytes -= bsize * 3; in ecb_crypt() 86 } while (nbytes >= bsize * 3); in ecb_crypt() 88 if (nbytes < bsize) in ecb_crypt() 98 nbytes -= bsize; in ecb_crypt() 99 } while (nbytes >= bsize); in ecb_crypt() 102 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt() 109 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument [all …]
|
D | serpent_sse2_glue.c | 54 static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes) in serpent_fpu_begin() argument 62 if (nbytes < SERPENT_BLOCK_SIZE * SERPENT_PARALLEL_BLOCKS) in serpent_fpu_begin() 81 unsigned int nbytes; in ecb_crypt() local 87 while ((nbytes = walk->nbytes)) { in ecb_crypt() 91 fpu_enabled = serpent_fpu_begin(fpu_enabled, nbytes); in ecb_crypt() 94 if (nbytes >= bsize * SERPENT_PARALLEL_BLOCKS) { in ecb_crypt() 103 nbytes -= bsize * SERPENT_PARALLEL_BLOCKS; in ecb_crypt() 104 } while (nbytes >= bsize * SERPENT_PARALLEL_BLOCKS); in ecb_crypt() 106 if (nbytes < bsize) in ecb_crypt() 119 nbytes -= bsize; in ecb_crypt() [all …]
|
D | salsa20_glue.c | 55 unsigned int nbytes) in encrypt() argument 62 blkcipher_walk_init(&walk, dst, src, nbytes); in encrypt() 67 if (likely(walk.nbytes == nbytes)) in encrypt() 70 walk.dst.virt.addr, nbytes); in encrypt() 74 while (walk.nbytes >= 64) { in encrypt() 77 walk.nbytes - (walk.nbytes % 64)); in encrypt() 78 err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64); in encrypt() 81 if (walk.nbytes) { in encrypt() 83 walk.dst.virt.addr, walk.nbytes); in encrypt()
|
D | camellia_glue.c | 1321 unsigned int nbytes; in ecb_crypt() local 1326 while ((nbytes = walk->nbytes)) { in ecb_crypt() 1331 if (nbytes >= bsize * 2) { in ecb_crypt() 1337 nbytes -= bsize * 2; in ecb_crypt() 1338 } while (nbytes >= bsize * 2); in ecb_crypt() 1340 if (nbytes < bsize) in ecb_crypt() 1350 nbytes -= bsize; in ecb_crypt() 1351 } while (nbytes >= bsize); in ecb_crypt() 1354 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt() 1361 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument [all …]
|
D | aesni-intel_glue.c | 283 unsigned int nbytes) in ecb_encrypt() argument 289 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 294 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 296 nbytes & AES_BLOCK_MASK); in ecb_encrypt() 297 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 298 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt() 307 unsigned int nbytes) in ecb_decrypt() argument 313 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 318 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 320 nbytes & AES_BLOCK_MASK); in ecb_decrypt() [all …]
|
D | fpu.c | 44 unsigned int nbytes) in crypto_fpu_encrypt() argument 56 err = crypto_blkcipher_crt(desc.tfm)->encrypt(&desc, dst, src, nbytes); in crypto_fpu_encrypt() 63 unsigned int nbytes) in crypto_fpu_decrypt() argument 75 err = crypto_blkcipher_crt(desc.tfm)->decrypt(&desc, dst, src, nbytes); in crypto_fpu_decrypt()
|
/arch/arm/kernel/ |
D | process.c | 349 static void show_data(unsigned long addr, int nbytes, const char *name) in show_data() argument 369 nbytes += (addr & (sizeof(u32) - 1)); in show_data() 370 nlines = (nbytes + 31) / 32; in show_data() 392 static void show_extra_register_data(struct pt_regs *regs, int nbytes) in show_extra_register_data() argument 398 show_data(regs->ARM_pc - nbytes, nbytes * 2, "PC"); in show_extra_register_data() 399 show_data(regs->ARM_lr - nbytes, nbytes * 2, "LR"); in show_extra_register_data() 400 show_data(regs->ARM_sp - nbytes, nbytes * 2, "SP"); in show_extra_register_data() 401 show_data(regs->ARM_ip - nbytes, nbytes * 2, "IP"); in show_extra_register_data() 402 show_data(regs->ARM_fp - nbytes, nbytes * 2, "FP"); in show_extra_register_data() 403 show_data(regs->ARM_r0 - nbytes, nbytes * 2, "R0"); in show_extra_register_data() [all …]
|
/arch/s390/crypto/ |
D | des_s390.c | 89 unsigned int nbytes; in ecb_desall_crypt() local 91 while ((nbytes = walk->nbytes)) { in ecb_desall_crypt() 93 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); in ecb_desall_crypt() 100 nbytes &= DES_BLOCK_SIZE - 1; in ecb_desall_crypt() 101 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_desall_crypt() 111 unsigned int nbytes = walk->nbytes; in cbc_desall_crypt() local 113 if (!nbytes) in cbc_desall_crypt() 119 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); in cbc_desall_crypt() 126 nbytes &= DES_BLOCK_SIZE - 1; in cbc_desall_crypt() 127 ret = blkcipher_walk_done(desc, walk, nbytes); in cbc_desall_crypt() [all …]
|
D | aes_s390.c | 254 unsigned int nbytes) in fallback_blk_dec() argument 263 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); in fallback_blk_dec() 271 unsigned int nbytes) in fallback_blk_enc() argument 280 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); in fallback_blk_enc() 320 unsigned int nbytes; in ecb_aes_crypt() local 322 while ((nbytes = walk->nbytes)) { in ecb_aes_crypt() 324 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt() 331 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_crypt() 332 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_aes_crypt() 340 unsigned int nbytes) in ecb_aes_encrypt() argument [all …]
|
D | prng.c | 69 static void prng_seed(int nbytes) in prng_seed() argument 74 BUG_ON(nbytes > 16); in prng_seed() 75 get_random_bytes(buf, nbytes); in prng_seed() 78 while (nbytes >= 8) { in prng_seed() 82 nbytes -= 8; in prng_seed() 87 static ssize_t prng_read(struct file *file, char __user *ubuf, size_t nbytes, in prng_read() argument 95 while (nbytes) { in prng_read() 110 chunk = min_t(int, nbytes, prng_chunk_size); in prng_read() 143 nbytes -= chunk; in prng_read()
|
/arch/x86/lib/ |
D | insn.c | 104 prefixes->nbytes++; in insn_get_prefixes() 128 insn->rex_prefix.nbytes = 1; in insn_get_prefixes() 156 insn->vex_prefix.nbytes = 3; in insn_get_prefixes() 162 insn->vex_prefix.nbytes = 2; in insn_get_prefixes() 198 opcode->nbytes = 1; in insn_get_opcode() 215 opcode->bytes[opcode->nbytes++] = op; in insn_get_opcode() 248 modrm->nbytes = 1; in insn_get_modrm() 286 return (modrm->nbytes && (modrm->value & 0xc7) == 0x5); in insn_rip_relative() 304 if (insn->modrm.nbytes) { in insn_get_sib() 309 insn->sib.nbytes = 1; in insn_get_sib() [all …]
|
/arch/arm/crypto/ |
D | aesbs-glue.c | 106 struct scatterlist *src, unsigned int nbytes) in aesbs_cbc_encrypt() argument 112 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_encrypt() 115 while (walk.nbytes) { in aesbs_cbc_encrypt() 116 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; in aesbs_cbc_encrypt() 140 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in aesbs_cbc_encrypt() 147 struct scatterlist *src, unsigned int nbytes) in aesbs_cbc_decrypt() argument 153 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_decrypt() 156 while ((walk.nbytes / AES_BLOCK_SIZE) >= 8) { in aesbs_cbc_decrypt() 159 walk.nbytes, &ctx->dec, walk.iv); in aesbs_cbc_decrypt() 161 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in aesbs_cbc_decrypt() [all …]
|
/arch/x86/include/asm/ |
D | insn.h | 33 unsigned char nbytes; member 143 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_m_bits() 151 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_p_bits() 172 return insn->prefixes.nbytes; in insn_offset_rex_prefix() 176 return insn_offset_rex_prefix(insn) + insn->rex_prefix.nbytes; in insn_offset_vex_prefix() 180 return insn_offset_vex_prefix(insn) + insn->vex_prefix.nbytes; in insn_offset_opcode() 184 return insn_offset_opcode(insn) + insn->opcode.nbytes; in insn_offset_modrm() 188 return insn_offset_modrm(insn) + insn->modrm.nbytes; in insn_offset_sib() 192 return insn_offset_sib(insn) + insn->sib.nbytes; in insn_offset_displacement() 196 return insn_offset_displacement(insn) + insn->displacement.nbytes; in insn_offset_immediate()
|
/arch/alpha/boot/ |
D | bootp.c | 148 static long nbytes; in start_kernel() local 179 nbytes = callback_getenv(ENV_BOOTED_OSFLAGS, envval, sizeof(envval)); in start_kernel() 180 if (nbytes < 0 || nbytes >= sizeof(envval)) { in start_kernel() 181 nbytes = 0; in start_kernel() 183 envval[nbytes] = '\0'; in start_kernel()
|
D | main.c | 156 int nbytes; in start_kernel() local 179 nbytes = callback_getenv(ENV_BOOTED_OSFLAGS, envval, sizeof(envval)); in start_kernel() 180 if (nbytes < 0) { in start_kernel() 181 nbytes = 0; in start_kernel() 183 envval[nbytes] = '\0'; in start_kernel()
|
D | bootpz.c | 288 static long nbytes; in start_kernel() local 312 nbytes = callback_getenv(ENV_BOOTED_OSFLAGS, envval, sizeof(envval)); in start_kernel() 313 if (nbytes < 0 || nbytes >= sizeof(envval)) { in start_kernel() 314 nbytes = 0; in start_kernel() 316 envval[nbytes] = '\0'; in start_kernel()
|
/arch/cris/arch-v32/drivers/ |
D | i2c.h | 6 int i2c_write(unsigned char theSlave, void *data, size_t nbytes); 7 int i2c_read(unsigned char theSlave, void *data, size_t nbytes);
|
D | i2c.c | 391 i2c_write(unsigned char theSlave, void *data, size_t nbytes) in i2c_write() argument 416 for (bytes_wrote = 0; bytes_wrote < nbytes; bytes_wrote++) { in i2c_write() 447 i2c_read(unsigned char theSlave, void *data, size_t nbytes) in i2c_read() argument 458 memset(data, 0, nbytes); in i2c_read() 475 for (bytes_read = 0; bytes_read < nbytes; bytes_read++) { in i2c_read() 479 if (bytes_read < (nbytes - 1)) in i2c_read()
|
/arch/powerpc/kernel/ |
D | proc_powerpc.c | 55 static ssize_t page_map_read( struct file *file, char __user *buf, size_t nbytes, in page_map_read() argument 59 return simple_read_from_buffer(buf, nbytes, ppos, dp->data, dp->size); in page_map_read()
|
/arch/mips/alchemy/common/ |
D | dbdma.c | 591 u32 au1xxx_dbdma_put_source(u32 chanid, dma_addr_t buf, int nbytes, u32 flags) in au1xxx_dbdma_put_source() argument 618 dp->dscr_cmd1 = nbytes; in au1xxx_dbdma_put_source() 632 dma_cache_wback_inv((unsigned long)buf, nbytes); in au1xxx_dbdma_put_source() 642 return nbytes; in au1xxx_dbdma_put_source() 650 u32 au1xxx_dbdma_put_dest(u32 chanid, dma_addr_t buf, int nbytes, u32 flags) in au1xxx_dbdma_put_dest() argument 681 dp->dscr_cmd1 = nbytes; in au1xxx_dbdma_put_dest() 694 dma_cache_inv((unsigned long)buf, nbytes); in au1xxx_dbdma_put_dest() 704 return nbytes; in au1xxx_dbdma_put_dest() 714 u32 au1xxx_dbdma_get_dest(u32 chanid, void **buf, int *nbytes) in au1xxx_dbdma_get_dest() argument 742 *nbytes = dp->dscr_cmd1; in au1xxx_dbdma_get_dest() [all …]
|
/arch/alpha/kernel/ |
D | osf_sys.c | 451 long nbytes; member 456 long nbytes; member 463 long nbytes; member 470 long nbytes; member 499 if (get_user(error, &args->set.nbytes)) in SYSCALL_DEFINE2() 503 if (get_user(error, &args->fset.nbytes)) in SYSCALL_DEFINE2() 609 unsigned long, nbytes, int __user *, start, void __user *, arg) in SYSCALL_DEFINE5() argument 634 if (nbytes < sizeof(unsigned int)) in SYSCALL_DEFINE5() 643 if (nbytes < sizeof(unsigned long)) in SYSCALL_DEFINE5() 653 if (nbytes > sizeof(*hwrpb)) in SYSCALL_DEFINE5() [all …]
|
/arch/ia64/include/asm/sn/ |
D | tioce_provider.h | 51 u64 nbytes; /* # bytes mapped */ member
|
/arch/m32r/kernel/ |
D | sys_m32r.c | 86 asmlinkage int sys_cachectl(char *addr, int nbytes, int op) in sys_cachectl() argument
|
/arch/mips/include/asm/mach-au1x00/ |
D | au1xxx_dbdma.h | 361 u32 au1xxx_dbdma_put_source(u32 chanid, dma_addr_t buf, int nbytes, u32 flags); 362 u32 au1xxx_dbdma_put_dest(u32 chanid, dma_addr_t buf, int nbytes, u32 flags); 365 u32 au1xxx_dbdma_get_dest(u32 chanid, void **buf, int *nbytes);
|