/drivers/atm/ |
D | idt77105.c | 86 struct idt77105_priv *walk; in idt77105_stats_timer_func() local 91 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_stats_timer_func() 92 dev = walk->dev; in idt77105_stats_timer_func() 94 stats = &walk->stats; in idt77105_stats_timer_func() 115 struct idt77105_priv *walk; in idt77105_restart_timer_func() local 120 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_restart_timer_func() 121 dev = walk->dev; in idt77105_restart_timer_func() 135 PUT( walk->old_mcr ,MCR); in idt77105_restart_timer_func() 323 struct idt77105_priv *walk, *prev; in idt77105_stop() local 331 for (prev = NULL, walk = idt77105_all ; in idt77105_stop() [all …]
|
D | suni.c | 59 struct suni_priv *walk; in suni_hz() local 63 for (walk = sunis; walk; walk = walk->next) { in suni_hz() 64 dev = walk->dev; in suni_hz() 65 stats = &walk->sonet_stats; in suni_hz() 343 struct suni_priv **walk; in suni_stop() local 348 for (walk = &sunis; *walk != PRIV(dev); in suni_stop() 349 walk = &PRIV((*walk)->dev)->next); in suni_stop() 350 *walk = PRIV((*walk)->dev)->next; in suni_stop()
|
/drivers/crypto/vmx/ |
D | aes_ctr.c | 70 struct skcipher_walk *walk) in p8_aes_ctr_final() argument 72 u8 *ctrblk = walk->iv; in p8_aes_ctr_final() 74 u8 *src = walk->src.virt.addr; in p8_aes_ctr_final() 75 u8 *dst = walk->dst.virt.addr; in p8_aes_ctr_final() 76 unsigned int nbytes = walk->nbytes; in p8_aes_ctr_final() 94 struct skcipher_walk walk; in p8_aes_ctr_crypt() local 106 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_ctr_crypt() 107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 111 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, in p8_aes_ctr_crypt() 112 walk.dst.virt.addr, in p8_aes_ctr_crypt() [all …]
|
D | aes_xts.c | 82 struct skcipher_walk walk; in p8_aes_xts_crypt() local 99 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_xts_crypt() 107 aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key); in p8_aes_xts_crypt() 113 while ((nbytes = walk.nbytes) != 0) { in p8_aes_xts_crypt() 118 aes_p8_xts_encrypt(walk.src.virt.addr, in p8_aes_xts_crypt() 119 walk.dst.virt.addr, in p8_aes_xts_crypt() 123 aes_p8_xts_decrypt(walk.src.virt.addr, in p8_aes_xts_crypt() 124 walk.dst.virt.addr, in p8_aes_xts_crypt() 131 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt()
|
D | aes_cbc.c | 75 struct skcipher_walk walk; in p8_aes_cbc_crypt() local 88 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_cbc_crypt() 89 while ((nbytes = walk.nbytes) != 0) { in p8_aes_cbc_crypt() 93 aes_p8_cbc_encrypt(walk.src.virt.addr, in p8_aes_cbc_crypt() 94 walk.dst.virt.addr, in p8_aes_cbc_crypt() 97 walk.iv, enc); in p8_aes_cbc_crypt() 102 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_cbc_crypt()
|
/drivers/crypto/ |
D | padlock-aes.c | 348 struct skcipher_walk walk; in ecb_aes_encrypt() local 354 err = skcipher_walk_virt(&walk, req, false); in ecb_aes_encrypt() 356 while ((nbytes = walk.nbytes) != 0) { in ecb_aes_encrypt() 357 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, in ecb_aes_encrypt() 361 err = skcipher_walk_done(&walk, nbytes); in ecb_aes_encrypt() 373 struct skcipher_walk walk; in ecb_aes_decrypt() local 379 err = skcipher_walk_virt(&walk, req, false); in ecb_aes_decrypt() 381 while ((nbytes = walk.nbytes) != 0) { in ecb_aes_decrypt() 382 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, in ecb_aes_decrypt() 386 err = skcipher_walk_done(&walk, nbytes); in ecb_aes_decrypt() [all …]
|
D | geode-aes.c | 267 struct skcipher_walk walk; in geode_skcipher_crypt() local 282 err = skcipher_walk_virt(&walk, req, false); in geode_skcipher_crypt() 284 while ((nbytes = walk.nbytes) != 0) { in geode_skcipher_crypt() 285 geode_aes_crypt(tctx, walk.src.virt.addr, walk.dst.virt.addr, in geode_skcipher_crypt() 287 walk.iv, mode, dir); in geode_skcipher_crypt() 288 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in geode_skcipher_crypt()
|
D | n2_core.c | 525 struct crypto_hash_walk walk; in n2_do_async_digest() local 548 nbytes = crypto_hash_walk_first(req, &walk); in n2_do_async_digest() 567 ent->src_addr = __pa(walk.data); in n2_do_async_digest() 575 nbytes = crypto_hash_walk_done(&walk, 0); in n2_do_async_digest() 580 ent->src_addr = __pa(walk.data); in n2_do_async_digest() 588 nbytes = crypto_hash_walk_done(&walk, 0); in n2_do_async_digest() 683 struct skcipher_walk walk; member 722 struct skcipher_walk walk; member 850 struct skcipher_walk *walk = &rctx->walk; in n2_compute_chunks() local 857 err = skcipher_walk_async(walk, req); in n2_compute_chunks() [all …]
|
D | hifn_795x.c | 624 struct hifn_cipher_walk walk; member 1342 t = &rctx->walk.cache[0]; in hifn_setup_dma() 1345 if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_setup_dma() 1537 rctx->walk.flags = 0; in hifn_setup_session() 1545 rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED; in hifn_setup_session() 1551 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_setup_session() 1552 err = hifn_cipher_walk_init(&rctx->walk, idx, GFP_ATOMIC); in hifn_setup_session() 1557 sg_num = hifn_cipher_walk(req, &rctx->walk); in hifn_setup_session() 1670 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_process_ready() 1677 t = &rctx->walk.cache[idx]; in hifn_process_ready() [all …]
|
D | s5p-sss.c | 470 struct scatter_walk walk; in s5p_sg_copy_buf() local 475 scatterwalk_start(&walk, sg); in s5p_sg_copy_buf() 476 scatterwalk_copychunks(buf, &walk, nbytes, out); in s5p_sg_copy_buf() 477 scatterwalk_done(&walk, out, 0); in s5p_sg_copy_buf()
|
/drivers/crypto/axis/ |
D | artpec6_crypto.c | 797 struct artpec6_crypto_walk *walk, size_t size) in setup_bounce_buffer_in() argument 807 bbuf->sg = walk->sg; in setup_bounce_buffer_in() 808 bbuf->offset = walk->offset; in setup_bounce_buffer_in() 816 pr_debug("BOUNCE %zu offset %zu\n", size, walk->offset); in setup_bounce_buffer_in() 823 struct artpec6_crypto_walk *walk, in artpec6_crypto_setup_sg_descrs_in() argument 830 while (walk->sg && count) { in artpec6_crypto_setup_sg_descrs_in() 831 chunk = min(count, artpec6_crypto_walk_chunklen(walk)); in artpec6_crypto_setup_sg_descrs_in() 832 addr = artpec6_crypto_walk_chunk_phys(walk); in artpec6_crypto_setup_sg_descrs_in() 845 ret = setup_bounce_buffer_in(common, walk, chunk); in artpec6_crypto_setup_sg_descrs_in() 848 ret = setup_bounce_buffer_in(common, walk, chunk); in artpec6_crypto_setup_sg_descrs_in() [all …]
|
/drivers/crypto/chelsio/ |
D | chcr_algo.c | 379 static inline void dsgl_walk_init(struct dsgl_walk *walk, in dsgl_walk_init() argument 382 walk->dsgl = dsgl; in dsgl_walk_init() 383 walk->nents = 0; in dsgl_walk_init() 384 walk->to = (struct phys_sge_pairs *)(dsgl + 1); in dsgl_walk_init() 387 static inline void dsgl_walk_end(struct dsgl_walk *walk, unsigned short qid, in dsgl_walk_end() argument 392 phys_cpl = walk->dsgl; in dsgl_walk_end() 402 CPL_RX_PHYS_DSGL_NOOFSGENTR_V(walk->nents)); in dsgl_walk_end() 409 static inline void dsgl_walk_add_page(struct dsgl_walk *walk, in dsgl_walk_add_page() argument 417 j = walk->nents; in dsgl_walk_add_page() 418 walk->to->len[j % 8] = htons(size); in dsgl_walk_add_page() [all …]
|
/drivers/crypto/nx/ |
D | nx.c | 154 struct scatter_walk walk; in nx_walk_and_build() local 161 scatterwalk_start(&walk, sg_src); in nx_walk_and_build() 172 scatterwalk_advance(&walk, start - offset); in nx_walk_and_build() 175 n = scatterwalk_clamp(&walk, len); in nx_walk_and_build() 179 scatterwalk_start(&walk, sg_next(walk.sg)); in nx_walk_and_build() 180 n = scatterwalk_clamp(&walk, len); in nx_walk_and_build() 182 dst = scatterwalk_map(&walk); in nx_walk_and_build() 188 scatterwalk_advance(&walk, n); in nx_walk_and_build() 189 scatterwalk_done(&walk, SCATTERWALK_FROM_SG, len); in nx_walk_and_build()
|
D | nx-aes-gcm.c | 106 struct scatter_walk walk; in nx_gca() local 113 scatterwalk_start(&walk, req->src); in nx_gca() 114 scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG); in nx_gca() 115 scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0); in nx_gca()
|
/drivers/crypto/ux500/cryp/ |
D | cryp_core.c | 883 struct skcipher_walk walk; in ablk_crypt() local 898 ret = skcipher_walk_async(&walk, areq); in ablk_crypt() 906 while ((nbytes = walk.nbytes) > 0) { in ablk_crypt() 907 ctx->iv = walk.iv; in ablk_crypt() 908 src_paddr = (page_to_phys(walk.src.phys.page) + walk.src.phys.offset); in ablk_crypt() 911 dst_paddr = (page_to_phys(walk.dst.phys.page) + walk.dst.phys.offset); in ablk_crypt() 921 ret = skcipher_walk_done(&walk, nbytes); in ablk_crypt()
|
/drivers/vfio/pci/ |
D | vfio_pci.c | 735 struct vfio_pci_walk_info *walk = data; in vfio_pci_walk_wrapper() local 737 if (!walk->slot || vfio_pci_dev_below_slot(pdev, walk->pdev->slot)) in vfio_pci_walk_wrapper() 738 walk->ret = walk->fn(pdev, walk->data); in vfio_pci_walk_wrapper() 740 return walk->ret; in vfio_pci_walk_wrapper() 748 struct vfio_pci_walk_info walk = { in vfio_pci_for_each_slot_or_bus() local 752 pci_walk_bus(pdev->bus, vfio_pci_walk_wrapper, &walk); in vfio_pci_for_each_slot_or_bus() 754 return walk.ret; in vfio_pci_for_each_slot_or_bus()
|
/drivers/crypto/ux500/hash/ |
D | hash_core.c | 1075 struct crypto_hash_walk walk; in hash_hw_update() local 1085 msg_length = crypto_hash_walk_first(req, &walk); in hash_hw_update() 1098 ret = crypto_hash_walk_done(&walk, -EPERM); in hash_hw_update() 1104 data_buffer = walk.data; in hash_hw_update() 1111 crypto_hash_walk_done(&walk, ret); in hash_hw_update() 1115 msg_length = crypto_hash_walk_done(&walk, 0); in hash_hw_update()
|
/drivers/net/ethernet/sun/ |
D | sungem.c | 650 int walk = entry; in gem_tx() local 655 walk = NEXT_TX(walk); in gem_tx() 656 if (walk == limit) in gem_tx() 658 if (walk == last) in gem_tx()
|
/drivers/fpga/ |
D | Kconfig | 150 Driver can walk through the feature headers to enumerate feature
|
/drivers/scsi/aic7xxx/ |
D | aic79xx.seq | 556 * manually walk the list counting MAXCMDCNT elements 722 * Brute force walk.
|