/drivers/atm/ |
D | idt77105.c | 86 struct idt77105_priv *walk; in idt77105_stats_timer_func() local 91 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_stats_timer_func() 92 dev = walk->dev; in idt77105_stats_timer_func() 94 stats = &walk->stats; in idt77105_stats_timer_func() 115 struct idt77105_priv *walk; in idt77105_restart_timer_func() local 120 for (walk = idt77105_all; walk; walk = walk->next) { in idt77105_restart_timer_func() 121 dev = walk->dev; in idt77105_restart_timer_func() 135 PUT( walk->old_mcr ,MCR); in idt77105_restart_timer_func() 323 struct idt77105_priv *walk, *prev; in idt77105_stop() local 331 for (prev = NULL, walk = idt77105_all ; in idt77105_stop() [all …]
|
D | suni.c | 58 struct suni_priv *walk; in suni_hz() local 62 for (walk = sunis; walk; walk = walk->next) { in suni_hz() 63 dev = walk->dev; in suni_hz() 64 stats = &walk->sonet_stats; in suni_hz() 342 struct suni_priv **walk; in suni_stop() local 347 for (walk = &sunis; *walk != PRIV(dev); in suni_stop() 348 walk = &PRIV((*walk)->dev)->next); in suni_stop() 349 *walk = PRIV((*walk)->dev)->next; in suni_stop()
|
/drivers/crypto/vmx/ |
D | aes_ctr.c | 70 struct skcipher_walk *walk) in p8_aes_ctr_final() argument 72 u8 *ctrblk = walk->iv; in p8_aes_ctr_final() 74 u8 *src = walk->src.virt.addr; in p8_aes_ctr_final() 75 u8 *dst = walk->dst.virt.addr; in p8_aes_ctr_final() 76 unsigned int nbytes = walk->nbytes; in p8_aes_ctr_final() 94 struct skcipher_walk walk; in p8_aes_ctr_crypt() local 106 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_ctr_crypt() 107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 111 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, in p8_aes_ctr_crypt() 112 walk.dst.virt.addr, in p8_aes_ctr_crypt() [all …]
|
D | aes_xts.c | 82 struct skcipher_walk walk; in p8_aes_xts_crypt() local 99 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_xts_crypt() 107 aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key); in p8_aes_xts_crypt() 113 while ((nbytes = walk.nbytes) != 0) { in p8_aes_xts_crypt() 118 aes_p8_xts_encrypt(walk.src.virt.addr, in p8_aes_xts_crypt() 119 walk.dst.virt.addr, in p8_aes_xts_crypt() 123 aes_p8_xts_decrypt(walk.src.virt.addr, in p8_aes_xts_crypt() 124 walk.dst.virt.addr, in p8_aes_xts_crypt() 131 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt()
|
D | aes_cbc.c | 75 struct skcipher_walk walk; in p8_aes_cbc_crypt() local 88 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_cbc_crypt() 89 while ((nbytes = walk.nbytes) != 0) { in p8_aes_cbc_crypt() 93 aes_p8_cbc_encrypt(walk.src.virt.addr, in p8_aes_cbc_crypt() 94 walk.dst.virt.addr, in p8_aes_cbc_crypt() 97 walk.iv, enc); in p8_aes_cbc_crypt() 102 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_cbc_crypt()
|
/drivers/crypto/ |
D | padlock-aes.c | 348 struct skcipher_walk walk; in ecb_aes_encrypt() local 354 err = skcipher_walk_virt(&walk, req, false); in ecb_aes_encrypt() 356 while ((nbytes = walk.nbytes) != 0) { in ecb_aes_encrypt() 357 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, in ecb_aes_encrypt() 361 err = skcipher_walk_done(&walk, nbytes); in ecb_aes_encrypt() 373 struct skcipher_walk walk; in ecb_aes_decrypt() local 379 err = skcipher_walk_virt(&walk, req, false); in ecb_aes_decrypt() 381 while ((nbytes = walk.nbytes) != 0) { in ecb_aes_decrypt() 382 padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, in ecb_aes_decrypt() 386 err = skcipher_walk_done(&walk, nbytes); in ecb_aes_decrypt() [all …]
|
D | geode-aes.c | 267 struct skcipher_walk walk; in geode_skcipher_crypt() local 282 err = skcipher_walk_virt(&walk, req, false); in geode_skcipher_crypt() 284 while ((nbytes = walk.nbytes) != 0) { in geode_skcipher_crypt() 285 geode_aes_crypt(tctx, walk.src.virt.addr, walk.dst.virt.addr, in geode_skcipher_crypt() 287 walk.iv, mode, dir); in geode_skcipher_crypt() 288 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in geode_skcipher_crypt()
|
D | n2_core.c | 526 struct crypto_hash_walk walk; in n2_do_async_digest() local 549 nbytes = crypto_hash_walk_first(req, &walk); in n2_do_async_digest() 568 ent->src_addr = __pa(walk.data); in n2_do_async_digest() 576 nbytes = crypto_hash_walk_done(&walk, 0); in n2_do_async_digest() 581 ent->src_addr = __pa(walk.data); in n2_do_async_digest() 589 nbytes = crypto_hash_walk_done(&walk, 0); in n2_do_async_digest() 684 struct skcipher_walk walk; member 723 struct skcipher_walk walk; member 851 struct skcipher_walk *walk = &rctx->walk; in n2_compute_chunks() local 858 err = skcipher_walk_async(walk, req); in n2_compute_chunks() [all …]
|
D | hifn_795x.c | 624 struct hifn_cipher_walk walk; member 1342 t = &rctx->walk.cache[0]; in hifn_setup_dma() 1345 if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_setup_dma() 1537 rctx->walk.flags = 0; in hifn_setup_session() 1545 rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED; in hifn_setup_session() 1551 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_setup_session() 1552 err = hifn_cipher_walk_init(&rctx->walk, idx, GFP_ATOMIC); in hifn_setup_session() 1557 sg_num = hifn_cipher_walk(req, &rctx->walk); in hifn_setup_session() 1670 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { in hifn_process_ready() 1677 t = &rctx->walk.cache[idx]; in hifn_process_ready() [all …]
|
D | s5p-sss.c | 468 struct scatter_walk walk; in s5p_sg_copy_buf() local 473 scatterwalk_start(&walk, sg); in s5p_sg_copy_buf() 474 scatterwalk_copychunks(buf, &walk, nbytes, out); in s5p_sg_copy_buf() 475 scatterwalk_done(&walk, out, 0); in s5p_sg_copy_buf()
|
/drivers/crypto/axis/ |
D | artpec6_crypto.c | 798 struct artpec6_crypto_walk *walk, size_t size) in setup_bounce_buffer_in() argument 808 bbuf->sg = walk->sg; in setup_bounce_buffer_in() 809 bbuf->offset = walk->offset; in setup_bounce_buffer_in() 817 pr_debug("BOUNCE %zu offset %zu\n", size, walk->offset); in setup_bounce_buffer_in() 824 struct artpec6_crypto_walk *walk, in artpec6_crypto_setup_sg_descrs_in() argument 831 while (walk->sg && count) { in artpec6_crypto_setup_sg_descrs_in() 832 chunk = min(count, artpec6_crypto_walk_chunklen(walk)); in artpec6_crypto_setup_sg_descrs_in() 833 addr = artpec6_crypto_walk_chunk_phys(walk); in artpec6_crypto_setup_sg_descrs_in() 846 ret = setup_bounce_buffer_in(common, walk, chunk); in artpec6_crypto_setup_sg_descrs_in() 849 ret = setup_bounce_buffer_in(common, walk, chunk); in artpec6_crypto_setup_sg_descrs_in() [all …]
|
/drivers/crypto/chelsio/ |
D | chcr_algo.c | 375 static inline void dsgl_walk_init(struct dsgl_walk *walk, in dsgl_walk_init() argument 378 walk->dsgl = dsgl; in dsgl_walk_init() 379 walk->nents = 0; in dsgl_walk_init() 380 walk->to = (struct phys_sge_pairs *)(dsgl + 1); in dsgl_walk_init() 383 static inline void dsgl_walk_end(struct dsgl_walk *walk, unsigned short qid, in dsgl_walk_end() argument 388 phys_cpl = walk->dsgl; in dsgl_walk_end() 398 CPL_RX_PHYS_DSGL_NOOFSGENTR_V(walk->nents)); in dsgl_walk_end() 405 static inline void dsgl_walk_add_page(struct dsgl_walk *walk, in dsgl_walk_add_page() argument 413 j = walk->nents; in dsgl_walk_add_page() 414 walk->to->len[j % 8] = htons(size); in dsgl_walk_add_page() [all …]
|
/drivers/crypto/nx/ |
D | nx.c | 154 struct scatter_walk walk; in nx_walk_and_build() local 161 scatterwalk_start(&walk, sg_src); in nx_walk_and_build() 172 scatterwalk_advance(&walk, start - offset); in nx_walk_and_build() 175 n = scatterwalk_clamp(&walk, len); in nx_walk_and_build() 179 scatterwalk_start(&walk, sg_next(walk.sg)); in nx_walk_and_build() 180 n = scatterwalk_clamp(&walk, len); in nx_walk_and_build() 182 dst = scatterwalk_map(&walk); in nx_walk_and_build() 188 scatterwalk_advance(&walk, n); in nx_walk_and_build() 189 scatterwalk_done(&walk, SCATTERWALK_FROM_SG, len); in nx_walk_and_build()
|
D | nx-aes-gcm.c | 106 struct scatter_walk walk; in nx_gca() local 113 scatterwalk_start(&walk, req->src); in nx_gca() 114 scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG); in nx_gca() 115 scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0); in nx_gca()
|
/drivers/crypto/ux500/cryp/ |
D | cryp_core.c | 884 struct skcipher_walk walk; in ablk_crypt() local 899 ret = skcipher_walk_async(&walk, areq); in ablk_crypt() 907 while ((nbytes = walk.nbytes) > 0) { in ablk_crypt() 908 ctx->iv = walk.iv; in ablk_crypt() 909 src_paddr = (page_to_phys(walk.src.phys.page) + walk.src.phys.offset); in ablk_crypt() 912 dst_paddr = (page_to_phys(walk.dst.phys.page) + walk.dst.phys.offset); in ablk_crypt() 922 ret = skcipher_walk_done(&walk, nbytes); in ablk_crypt()
|
/drivers/vfio/pci/ |
D | vfio_pci_core.c | 580 struct vfio_pci_walk_info *walk = data; in vfio_pci_walk_wrapper() local 582 if (!walk->slot || vfio_pci_dev_below_slot(pdev, walk->pdev->slot)) in vfio_pci_walk_wrapper() 583 walk->ret = walk->fn(pdev, walk->data); in vfio_pci_walk_wrapper() 585 return walk->ret; in vfio_pci_walk_wrapper() 593 struct vfio_pci_walk_info walk = { in vfio_pci_for_each_slot_or_bus() local 597 pci_walk_bus(pdev->bus, vfio_pci_walk_wrapper, &walk); in vfio_pci_for_each_slot_or_bus() 599 return walk.ret; in vfio_pci_for_each_slot_or_bus()
|
/drivers/crypto/ux500/hash/ |
D | hash_core.c | 1077 struct crypto_hash_walk walk; in hash_hw_update() local 1087 msg_length = crypto_hash_walk_first(req, &walk); in hash_hw_update() 1100 ret = crypto_hash_walk_done(&walk, -EPERM); in hash_hw_update() 1106 data_buffer = walk.data; in hash_hw_update() 1113 crypto_hash_walk_done(&walk, ret); in hash_hw_update() 1117 msg_length = crypto_hash_walk_done(&walk, 0); in hash_hw_update()
|
/drivers/net/ethernet/sun/ |
D | sungem.c | 650 int walk = entry; in gem_tx() local 655 walk = NEXT_TX(walk); in gem_tx() 656 if (walk == limit) in gem_tx() 658 if (walk == last) in gem_tx()
|
/drivers/fpga/ |
D | Kconfig | 157 Driver can walk through the feature headers to enumerate feature
|
/drivers/scsi/aic7xxx/ |
D | aic79xx.seq | 556 * manually walk the list counting MAXCMDCNT elements 722 * Brute force walk.
|