/drivers/crypto/vmx/ |
D | aes_ctr.c | 96 struct blkcipher_walk *walk) in p8_aes_ctr_final() argument 98 u8 *ctrblk = walk->iv; in p8_aes_ctr_final() 100 u8 *src = walk->src.virt.addr; in p8_aes_ctr_final() 101 u8 *dst = walk->dst.virt.addr; in p8_aes_ctr_final() 102 unsigned int nbytes = walk->nbytes; in p8_aes_ctr_final() 123 struct blkcipher_walk walk; in p8_aes_ctr_crypt() local 136 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_ctr_crypt() 137 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 138 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 142 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, in p8_aes_ctr_crypt() [all …]
|
D | aes_cbc.c | 103 struct blkcipher_walk walk; in p8_aes_cbc_encrypt() local 120 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_cbc_encrypt() 121 ret = blkcipher_walk_virt(desc, &walk); in p8_aes_cbc_encrypt() 122 while ((nbytes = walk.nbytes)) { in p8_aes_cbc_encrypt() 123 aes_p8_cbc_encrypt(walk.src.virt.addr, in p8_aes_cbc_encrypt() 124 walk.dst.virt.addr, in p8_aes_cbc_encrypt() 126 &ctx->enc_key, walk.iv, 1); in p8_aes_cbc_encrypt() 128 ret = blkcipher_walk_done(desc, &walk, nbytes); in p8_aes_cbc_encrypt() 144 struct blkcipher_walk walk; in p8_aes_cbc_decrypt() local 161 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_cbc_decrypt() [all …]
|
D | aes_xts.c | 113 struct blkcipher_walk walk; in p8_aes_xts_crypt() local 130 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_xts_crypt() 132 ret = blkcipher_walk_virt(desc, &walk); in p8_aes_xts_crypt() 133 iv = walk.iv; in p8_aes_xts_crypt() 137 while ((nbytes = walk.nbytes)) { in p8_aes_xts_crypt() 139 aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, in p8_aes_xts_crypt() 142 aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, in p8_aes_xts_crypt() 146 ret = blkcipher_walk_done(desc, &walk, nbytes); in p8_aes_xts_crypt()
|
/drivers/atm/ |
D | idt77105.c | 85 struct idt77105_priv *walk; in idt77105_stats_timer_func() local 90 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_stats_timer_func() 91 dev = walk->dev; in idt77105_stats_timer_func() 93 stats = &walk->stats; in idt77105_stats_timer_func() 114 struct idt77105_priv *walk; in idt77105_restart_timer_func() local 119 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_restart_timer_func() 120 dev = walk->dev; in idt77105_restart_timer_func() 134 PUT( walk->old_mcr ,MCR); in idt77105_restart_timer_func() 324 struct idt77105_priv *walk, *prev; in idt77105_stop() local 332 for (prev = NULL, walk = idt77105_all ; in idt77105_stop() [all …]
|
D | suni.c | 58 struct suni_priv *walk; in suni_hz() local 62 for (walk = sunis; walk; walk = walk->next) { in suni_hz() 63 dev = walk->dev; in suni_hz() 64 stats = &walk->sonet_stats; in suni_hz() 344 struct suni_priv **walk; in suni_stop() local 349 for (walk = &sunis; *walk != PRIV(dev); in suni_stop() 350 walk = &PRIV((*walk)->dev)->next); in suni_stop() 351 *walk = PRIV((*walk)->dev)->next; in suni_stop()
|
/drivers/crypto/ |
D | padlock-aes.c | 347 struct blkcipher_walk walk; in ecb_aes_encrypt() local 353 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_encrypt() 354 err = blkcipher_walk_virt(desc, &walk); in ecb_aes_encrypt() 357 while ((nbytes = walk.nbytes)) { in ecb_aes_encrypt() 358 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, in ecb_aes_encrypt() 362 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_aes_encrypt() 376 struct blkcipher_walk walk; in ecb_aes_decrypt() local 382 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_decrypt() 383 err = blkcipher_walk_virt(desc, &walk); in ecb_aes_decrypt() 386 while ((nbytes = walk.nbytes)) { in ecb_aes_decrypt() [all …]
|
D | geode-aes.c | 309 struct blkcipher_walk walk; in geode_cbc_decrypt() local 315 blkcipher_walk_init(&walk, dst, src, nbytes); in geode_cbc_decrypt() 316 err = blkcipher_walk_virt(desc, &walk); in geode_cbc_decrypt() 317 op->iv = walk.iv; in geode_cbc_decrypt() 319 while ((nbytes = walk.nbytes)) { in geode_cbc_decrypt() 320 op->src = walk.src.virt.addr, in geode_cbc_decrypt() 321 op->dst = walk.dst.virt.addr; in geode_cbc_decrypt() 329 err = blkcipher_walk_done(desc, &walk, nbytes); in geode_cbc_decrypt() 341 struct blkcipher_walk walk; in geode_cbc_encrypt() local 347 blkcipher_walk_init(&walk, dst, src, nbytes); in geode_cbc_encrypt() [all …]
|
D | n2_core.c | 513 struct crypto_hash_walk walk; in n2_do_async_digest() local 536 nbytes = crypto_hash_walk_first(req, &walk); in n2_do_async_digest() 555 ent->src_addr = __pa(walk.data); in n2_do_async_digest() 563 nbytes = crypto_hash_walk_done(&walk, 0); in n2_do_async_digest() 568 ent->src_addr = __pa(walk.data); in n2_do_async_digest() 576 nbytes = crypto_hash_walk_done(&walk, 0); in n2_do_async_digest() 672 struct ablkcipher_walk walk; member 711 struct ablkcipher_walk walk; member 876 struct ablkcipher_walk *walk = &rctx->walk; in n2_compute_chunks() local 883 ablkcipher_walk_init(walk, req->dst, req->src, req->nbytes); in n2_compute_chunks() [all …]
|
D | hifn_795x.c | 632 struct hifn_cipher_walk walk; member 1348 t = &rctx->walk.cache[0]; in hifn_setup_dma() 1351 if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_setup_dma() 1543 rctx->walk.flags = 0; in hifn_setup_session() 1551 rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED; in hifn_setup_session() 1557 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_setup_session() 1558 err = hifn_cipher_walk_init(&rctx->walk, idx, GFP_ATOMIC); in hifn_setup_session() 1563 sg_num = hifn_cipher_walk(req, &rctx->walk); in hifn_setup_session() 1676 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_process_ready() 1683 t = &rctx->walk.cache[idx]; in hifn_process_ready() [all …]
|
D | s5p-sss.c | 263 struct scatter_walk walk; in s5p_sg_copy_buf() local 268 scatterwalk_start(&walk, sg); in s5p_sg_copy_buf() 269 scatterwalk_copychunks(buf, &walk, nbytes, out); in s5p_sg_copy_buf() 270 scatterwalk_done(&walk, out, 0); in s5p_sg_copy_buf()
|
D | omap-des.c | 376 struct scatter_walk walk; in sg_copy_buf() local 381 scatterwalk_start(&walk, sg); in sg_copy_buf() 382 scatterwalk_advance(&walk, start); in sg_copy_buf() 383 scatterwalk_copychunks(buf, &walk, nbytes, out); in sg_copy_buf() 384 scatterwalk_done(&walk, out, 0); in sg_copy_buf()
|
D | omap-aes.c | 393 struct scatter_walk walk; in sg_copy_buf() local 398 scatterwalk_start(&walk, sg); in sg_copy_buf() 399 scatterwalk_advance(&walk, start); in sg_copy_buf() 400 scatterwalk_copychunks(buf, &walk, nbytes, out); in sg_copy_buf() 401 scatterwalk_done(&walk, out, 0); in sg_copy_buf()
|
/drivers/crypto/nx/ |
D | nx.c | 167 struct scatter_walk walk; in nx_walk_and_build() local 174 scatterwalk_start(&walk, sg_src); in nx_walk_and_build() 185 scatterwalk_advance(&walk, start - offset); in nx_walk_and_build() 188 n = scatterwalk_clamp(&walk, len); in nx_walk_and_build() 192 scatterwalk_start(&walk, sg_next(walk.sg)); in nx_walk_and_build() 193 n = scatterwalk_clamp(&walk, len); in nx_walk_and_build() 195 dst = scatterwalk_map(&walk); in nx_walk_and_build() 201 scatterwalk_advance(&walk, n); in nx_walk_and_build() 202 scatterwalk_done(&walk, SCATTERWALK_FROM_SG, len); in nx_walk_and_build()
|
D | nx-aes-gcm.c | 117 struct scatter_walk walk; in nx_gca() local 124 scatterwalk_start(&walk, req->src); in nx_gca() 125 scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG); in nx_gca() 126 scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0); in nx_gca()
|
/drivers/vfio/pci/ |
D | vfio_pci.c | 536 struct vfio_pci_walk_info *walk = data; in vfio_pci_walk_wrapper() local 538 if (!walk->slot || vfio_pci_dev_below_slot(pdev, walk->pdev->slot)) in vfio_pci_walk_wrapper() 539 walk->ret = walk->fn(pdev, walk->data); in vfio_pci_walk_wrapper() 541 return walk->ret; in vfio_pci_walk_wrapper() 549 struct vfio_pci_walk_info walk = { in vfio_pci_for_each_slot_or_bus() local 553 pci_walk_bus(pdev->bus, vfio_pci_walk_wrapper, &walk); in vfio_pci_for_each_slot_or_bus() 555 return walk.ret; in vfio_pci_for_each_slot_or_bus()
|
/drivers/crypto/ux500/cryp/ |
D | cryp_core.c | 883 struct ablkcipher_walk walk; in ablk_crypt() local 898 ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes); in ablk_crypt() 899 ret = ablkcipher_walk_phys(areq, &walk); in ablk_crypt() 907 while ((nbytes = walk.nbytes) > 0) { in ablk_crypt() 908 ctx->iv = walk.iv; in ablk_crypt() 909 src_paddr = (page_to_phys(walk.src.page) + walk.src.offset); in ablk_crypt() 912 dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset); in ablk_crypt() 922 ret = ablkcipher_walk_done(areq, &walk, nbytes); in ablk_crypt() 926 ablkcipher_walk_complete(&walk); in ablk_crypt()
|
/drivers/crypto/ux500/hash/ |
D | hash_core.c | 1071 struct crypto_hash_walk walk; in hash_hw_update() local 1072 int msg_length = crypto_hash_walk_first(req, &walk); in hash_hw_update() 1095 data_buffer = walk.data; in hash_hw_update() 1105 msg_length = crypto_hash_walk_done(&walk, 0); in hash_hw_update()
|
/drivers/net/ethernet/sun/ |
D | sungem.c | 662 int walk = entry; in gem_tx() local 667 walk = NEXT_TX(walk); in gem_tx() 668 if (walk == limit) in gem_tx() 670 if (walk == last) in gem_tx()
|
/drivers/scsi/aic7xxx/ |
D | aic79xx.seq | 556 * manually walk the list counting MAXCMDCNT elements 722 * Brute force walk.
|