Home
last modified time | relevance | path

Searched refs:walk (Results 1 – 13 of 13) sorted by relevance

/arch/sparc/mm/
Dextable.c19 const struct exception_table_entry *walk; in search_extable() local
39 for (walk = start; walk <= last; walk++) { in search_extable()
40 if (walk->fixup == 0) { in search_extable()
42 walk++; in search_extable()
47 if (walk->fixup == -1) in search_extable()
50 if (walk->insn == value) in search_extable()
51 return walk; in search_extable()
55 for (walk = start; walk <= (last - 1); walk++) { in search_extable()
56 if (walk->fixup) in search_extable()
59 if (walk[0].insn <= value && walk[1].insn > value) in search_extable()
[all …]
/arch/arm/crypto/
Daesbs-glue.c109 struct blkcipher_walk walk; in aesbs_cbc_encrypt() local
112 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_encrypt()
113 err = blkcipher_walk_virt(desc, &walk); in aesbs_cbc_encrypt()
115 while (walk.nbytes) { in aesbs_cbc_encrypt()
116 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; in aesbs_cbc_encrypt()
117 u8 *src = walk.src.virt.addr; in aesbs_cbc_encrypt()
119 if (walk.dst.virt.addr == walk.src.virt.addr) { in aesbs_cbc_encrypt()
120 u8 *iv = walk.iv; in aesbs_cbc_encrypt()
128 memcpy(walk.iv, iv, AES_BLOCK_SIZE); in aesbs_cbc_encrypt()
130 u8 *dst = walk.dst.virt.addr; in aesbs_cbc_encrypt()
[all …]
/arch/x86/crypto/
Dsalsa20_glue.c57 struct blkcipher_walk walk; in encrypt() local
62 blkcipher_walk_init(&walk, dst, src, nbytes); in encrypt()
63 err = blkcipher_walk_virt_block(desc, &walk, 64); in encrypt()
65 salsa20_ivsetup(ctx, walk.iv); in encrypt()
67 if (likely(walk.nbytes == nbytes)) in encrypt()
69 salsa20_encrypt_bytes(ctx, walk.src.virt.addr, in encrypt()
70 walk.dst.virt.addr, nbytes); in encrypt()
71 return blkcipher_walk_done(desc, &walk, 0); in encrypt()
74 while (walk.nbytes >= 64) { in encrypt()
75 salsa20_encrypt_bytes(ctx, walk.src.virt.addr, in encrypt()
[all …]
Dblowfish_glue.c80 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
89 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
91 while ((nbytes = walk->nbytes)) { in ecb_crypt()
92 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
93 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
119 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
128 struct blkcipher_walk walk; in ecb_encrypt() local
130 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
131 return ecb_crypt(desc, &walk, blowfish_enc_blk, blowfish_enc_blk_4way); in ecb_encrypt()
137 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Dtwofish_glue_3way.c63 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
72 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
74 while ((nbytes = walk->nbytes)) { in ecb_crypt()
75 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
76 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
102 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
111 struct blkcipher_walk walk; in ecb_encrypt() local
113 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
114 return ecb_crypt(desc, &walk, twofish_enc_blk, twofish_enc_blk_3way); in ecb_encrypt()
120 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Dserpent_sse2_glue.c75 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
84 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
87 while ((nbytes = walk->nbytes)) { in ecb_crypt()
88 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
89 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
123 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
133 struct blkcipher_walk walk; in ecb_encrypt() local
135 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
136 return ecb_crypt(desc, &walk, true); in ecb_encrypt()
142 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Dcamellia_glue.c1315 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
1324 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
1326 while ((nbytes = walk->nbytes)) { in ecb_crypt()
1327 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
1328 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
1354 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
1363 struct blkcipher_walk walk; in ecb_encrypt() local
1365 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
1366 return ecb_crypt(desc, &walk, camellia_enc_blk, camellia_enc_blk_2way); in ecb_encrypt()
1372 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Daesni-intel_glue.c286 struct blkcipher_walk walk; in ecb_encrypt() local
289 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
290 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
294 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
295 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
298 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt()
310 struct blkcipher_walk walk; in ecb_decrypt() local
313 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
314 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
318 while ((nbytes = walk.nbytes)) { in ecb_decrypt()
[all …]
/arch/s390/crypto/
Ddes_s390.c86 u8 *key, struct blkcipher_walk *walk) in ecb_desall_crypt() argument
88 int ret = blkcipher_walk_virt(desc, walk); in ecb_desall_crypt()
91 while ((nbytes = walk->nbytes)) { in ecb_desall_crypt()
94 u8 *out = walk->dst.virt.addr; in ecb_desall_crypt()
95 u8 *in = walk->src.virt.addr; in ecb_desall_crypt()
101 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_desall_crypt()
108 u8 *iv, struct blkcipher_walk *walk) in cbc_desall_crypt() argument
110 int ret = blkcipher_walk_virt(desc, walk); in cbc_desall_crypt()
111 unsigned int nbytes = walk->nbytes; in cbc_desall_crypt()
116 memcpy(iv, walk->iv, DES_BLOCK_SIZE); in cbc_desall_crypt()
[all …]
Daes_s390.c317 struct blkcipher_walk *walk) in ecb_aes_crypt() argument
319 int ret = blkcipher_walk_virt(desc, walk); in ecb_aes_crypt()
322 while ((nbytes = walk->nbytes)) { in ecb_aes_crypt()
325 u8 *out = walk->dst.virt.addr; in ecb_aes_crypt()
326 u8 *in = walk->src.virt.addr; in ecb_aes_crypt()
332 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_aes_crypt()
343 struct blkcipher_walk walk; in ecb_aes_encrypt() local
348 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_encrypt()
349 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); in ecb_aes_encrypt()
357 struct blkcipher_walk walk; in ecb_aes_decrypt() local
[all …]
/arch/openrisc/kernel/
Ddma.c30 unsigned long next, struct mm_walk *walk) in page_set_nocache() argument
50 unsigned long next, struct mm_walk *walk) in page_clear_nocache() argument
76 struct mm_walk walk = { in or1k_dma_alloc_coherent() local
94 if (walk_page_range(va, va + size, &walk)) { in or1k_dma_alloc_coherent()
106 struct mm_walk walk = { in or1k_dma_free_coherent() local
112 WARN_ON(walk_page_range(va, va + size, &walk)); in or1k_dma_free_coherent()
/arch/ia64/kernel/
Defi.c300 walk (efi_freemem_callback_t callback, void *arg, u64 attr) in walk() function
324 walk(callback, arg, EFI_MEMORY_WB); in efi_memmap_walk()
334 walk(callback, arg, EFI_MEMORY_UC); in efi_memmap_walk_uc()
/arch/arm/
DKconfig1375 r3p*) erratum. A speculative memory access may cause a page table walk