Home
last modified time | relevance | path

Searched refs:walk (Results 1 – 25 of 28) sorted by relevance

12

/arch/sparc/mm/
Dextable.c19 const struct exception_table_entry *walk; in search_extable() local
39 for (walk = start; walk <= last; walk++) { in search_extable()
40 if (walk->fixup == 0) { in search_extable()
42 walk++; in search_extable()
47 if (walk->fixup == -1) in search_extable()
50 if (walk->insn == value) in search_extable()
51 return walk; in search_extable()
55 for (walk = start; walk <= (last - 1); walk++) { in search_extable()
56 if (walk->fixup) in search_extable()
59 if (walk[0].insn <= value && walk[1].insn > value) in search_extable()
[all …]
/arch/x86/crypto/
Dglue_helper.c37 struct blkcipher_walk *walk) in __glue_ecb_crypt_128bit() argument
45 err = blkcipher_walk_virt(desc, walk); in __glue_ecb_crypt_128bit()
47 while ((nbytes = walk->nbytes)) { in __glue_ecb_crypt_128bit()
48 u8 *wsrc = walk->src.virt.addr; in __glue_ecb_crypt_128bit()
49 u8 *wdst = walk->dst.virt.addr; in __glue_ecb_crypt_128bit()
74 err = blkcipher_walk_done(desc, walk, nbytes); in __glue_ecb_crypt_128bit()
85 struct blkcipher_walk walk; in glue_ecb_crypt_128bit() local
87 blkcipher_walk_init(&walk, dst, src, nbytes); in glue_ecb_crypt_128bit()
88 return __glue_ecb_crypt_128bit(gctx, desc, &walk); in glue_ecb_crypt_128bit()
94 struct blkcipher_walk *walk) in __glue_cbc_encrypt_128bit() argument
[all …]
Dblowfish_glue.c80 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
89 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
91 while ((nbytes = walk->nbytes)) { in ecb_crypt()
92 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
93 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
119 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
128 struct blkcipher_walk walk; in ecb_encrypt() local
130 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
131 return ecb_crypt(desc, &walk, blowfish_enc_blk, blowfish_enc_blk_4way); in ecb_encrypt()
137 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Dcast5_avx_glue.c59 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
69 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
72 while ((nbytes = walk->nbytes)) { in ecb_crypt()
73 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
74 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
105 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
115 struct blkcipher_walk walk; in ecb_encrypt() local
117 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
118 return ecb_crypt(desc, &walk, true); in ecb_encrypt()
124 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Ddes3_ede_glue.c86 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
93 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
95 while ((nbytes = walk->nbytes)) { in ecb_crypt()
96 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
97 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
124 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
134 struct blkcipher_walk walk; in ecb_encrypt() local
136 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
137 return ecb_crypt(desc, &walk, ctx->enc_expkey); in ecb_encrypt()
144 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Dsalsa20_glue.c52 struct blkcipher_walk walk; in encrypt() local
57 blkcipher_walk_init(&walk, dst, src, nbytes); in encrypt()
58 err = blkcipher_walk_virt_block(desc, &walk, 64); in encrypt()
60 salsa20_ivsetup(ctx, walk.iv); in encrypt()
62 while (walk.nbytes >= 64) { in encrypt()
63 salsa20_encrypt_bytes(ctx, walk.src.virt.addr, in encrypt()
64 walk.dst.virt.addr, in encrypt()
65 walk.nbytes - (walk.nbytes % 64)); in encrypt()
66 err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64); in encrypt()
69 if (walk.nbytes) { in encrypt()
[all …]
Dchacha20_glue.c70 struct blkcipher_walk walk; in chacha20_simd() local
78 blkcipher_walk_init(&walk, dst, src, nbytes); in chacha20_simd()
79 err = blkcipher_walk_virt_block(desc, &walk, CHACHA20_BLOCK_SIZE); in chacha20_simd()
82 crypto_chacha20_init(state, crypto_blkcipher_ctx(desc->tfm), walk.iv); in chacha20_simd()
86 while (walk.nbytes >= CHACHA20_BLOCK_SIZE) { in chacha20_simd()
87 chacha20_dosimd(state, walk.dst.virt.addr, walk.src.virt.addr, in chacha20_simd()
88 rounddown(walk.nbytes, CHACHA20_BLOCK_SIZE)); in chacha20_simd()
89 err = blkcipher_walk_done(desc, &walk, in chacha20_simd()
90 walk.nbytes % CHACHA20_BLOCK_SIZE); in chacha20_simd()
93 if (walk.nbytes) { in chacha20_simd()
[all …]
Daesni-intel_glue.c379 struct blkcipher_walk walk; in ecb_encrypt() local
382 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
383 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
387 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
388 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
391 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt()
403 struct blkcipher_walk walk; in ecb_decrypt() local
406 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
407 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
411 while ((nbytes = walk.nbytes)) { in ecb_decrypt()
[all …]
/arch/arm/crypto/
Daesbs-glue.c109 struct blkcipher_walk walk; in aesbs_cbc_encrypt() local
112 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_encrypt()
113 err = blkcipher_walk_virt(desc, &walk); in aesbs_cbc_encrypt()
115 while (walk.nbytes) { in aesbs_cbc_encrypt()
116 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; in aesbs_cbc_encrypt()
117 u8 *src = walk.src.virt.addr; in aesbs_cbc_encrypt()
119 if (walk.dst.virt.addr == walk.src.virt.addr) { in aesbs_cbc_encrypt()
120 u8 *iv = walk.iv; in aesbs_cbc_encrypt()
128 memcpy(walk.iv, iv, AES_BLOCK_SIZE); in aesbs_cbc_encrypt()
130 u8 *dst = walk.dst.virt.addr; in aesbs_cbc_encrypt()
[all …]
Daes-ce-glue.c175 struct blkcipher_walk walk; in ecb_encrypt() local
180 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
181 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
184 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
185 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
187 err = blkcipher_walk_done(desc, &walk, in ecb_encrypt()
188 walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
198 struct blkcipher_walk walk; in ecb_decrypt() local
203 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
204 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
[all …]
Dspeck-neon-glue.c56 struct blkcipher_walk walk; in __speck128_xts_crypt() local
60 blkcipher_walk_init(&walk, dst, src, nbytes); in __speck128_xts_crypt()
61 err = blkcipher_walk_virt_block(desc, &walk, SPECK_NEON_CHUNK_SIZE); in __speck128_xts_crypt()
63 crypto_speck128_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv); in __speck128_xts_crypt()
65 while (walk.nbytes > 0) { in __speck128_xts_crypt()
66 unsigned int nbytes = walk.nbytes; in __speck128_xts_crypt()
67 u8 *dst = walk.dst.virt.addr; in __speck128_xts_crypt()
68 const u8 *src = walk.src.virt.addr; in __speck128_xts_crypt()
95 err = blkcipher_walk_done(desc, &walk, nbytes); in __speck128_xts_crypt()
167 struct blkcipher_walk walk; in __speck64_xts_crypt() local
[all …]
/arch/arm64/crypto/
Daes-glue.c104 struct blkcipher_walk walk; in ecb_encrypt() local
108 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
109 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
112 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_encrypt()
113 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
115 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
126 struct blkcipher_walk walk; in ecb_decrypt() local
130 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
131 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
134 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_decrypt()
[all …]
Daes-ce-ccm-glue.c111 struct scatter_walk walk; in ccm_calculate_auth_mac() local
127 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac()
130 u32 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac()
134 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac()
135 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac()
137 p = scatterwalk_map(&walk); in ccm_calculate_auth_mac()
143 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac()
144 scatterwalk_done(&walk, 0, len); in ccm_calculate_auth_mac()
153 struct blkcipher_walk walk; in ccm_encrypt() local
180 blkcipher_walk_init(&walk, dst, src, len); in ccm_encrypt()
[all …]
Dspeck-neon-glue.c50 struct blkcipher_walk walk; in __speck128_xts_crypt() local
54 blkcipher_walk_init(&walk, dst, src, nbytes); in __speck128_xts_crypt()
55 err = blkcipher_walk_virt_block(desc, &walk, SPECK_NEON_CHUNK_SIZE); in __speck128_xts_crypt()
57 crypto_speck128_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv); in __speck128_xts_crypt()
59 while (walk.nbytes > 0) { in __speck128_xts_crypt()
60 unsigned int nbytes = walk.nbytes; in __speck128_xts_crypt()
61 u8 *dst = walk.dst.virt.addr; in __speck128_xts_crypt()
62 const u8 *src = walk.src.virt.addr; in __speck128_xts_crypt()
89 err = blkcipher_walk_done(desc, &walk, nbytes); in __speck128_xts_crypt()
161 struct blkcipher_walk walk; in __speck64_xts_crypt() local
[all …]
/arch/s390/crypto/
Ddes_s390.c87 u8 *key, struct blkcipher_walk *walk) in ecb_desall_crypt() argument
89 int ret = blkcipher_walk_virt(desc, walk); in ecb_desall_crypt()
92 while ((nbytes = walk->nbytes)) { in ecb_desall_crypt()
95 u8 *out = walk->dst.virt.addr; in ecb_desall_crypt()
96 u8 *in = walk->src.virt.addr; in ecb_desall_crypt()
103 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_desall_crypt()
110 struct blkcipher_walk *walk) in cbc_desall_crypt() argument
113 int ret = blkcipher_walk_virt(desc, walk); in cbc_desall_crypt()
114 unsigned int nbytes = walk->nbytes; in cbc_desall_crypt()
123 memcpy(param.iv, walk->iv, DES_BLOCK_SIZE); in cbc_desall_crypt()
[all …]
Daes_s390.c317 struct blkcipher_walk *walk) in ecb_aes_crypt() argument
319 int ret = blkcipher_walk_virt(desc, walk); in ecb_aes_crypt()
322 while ((nbytes = walk->nbytes)) { in ecb_aes_crypt()
325 u8 *out = walk->dst.virt.addr; in ecb_aes_crypt()
326 u8 *in = walk->src.virt.addr; in ecb_aes_crypt()
333 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_aes_crypt()
344 struct blkcipher_walk walk; in ecb_aes_encrypt() local
349 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_encrypt()
350 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); in ecb_aes_encrypt()
358 struct blkcipher_walk walk; in ecb_aes_decrypt() local
[all …]
/arch/powerpc/crypto/
Daes-spe-glue.c183 struct blkcipher_walk walk; in ppc_ecb_encrypt() local
188 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_encrypt()
189 err = blkcipher_walk_virt(desc, &walk); in ppc_ecb_encrypt()
191 while ((nbytes = walk.nbytes)) { in ppc_ecb_encrypt()
197 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_encrypt()
201 err = blkcipher_walk_done(desc, &walk, ubytes); in ppc_ecb_encrypt()
211 struct blkcipher_walk walk; in ppc_ecb_decrypt() local
216 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_decrypt()
217 err = blkcipher_walk_virt(desc, &walk); in ppc_ecb_decrypt()
219 while ((nbytes = walk.nbytes)) { in ppc_ecb_decrypt()
[all …]
/arch/sparc/crypto/
Daes_glue.c220 struct blkcipher_walk walk; in ecb_encrypt() local
223 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
224 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
228 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
233 (const u64 *)walk.src.virt.addr, in ecb_encrypt()
234 (u64 *) walk.dst.virt.addr, in ecb_encrypt()
238 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt()
249 struct blkcipher_walk walk; in ecb_decrypt() local
253 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
254 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
[all …]
Ddes_glue.c98 struct blkcipher_walk walk; in __ecb_crypt() local
101 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt()
102 err = blkcipher_walk_virt(desc, &walk); in __ecb_crypt()
109 while ((nbytes = walk.nbytes)) { in __ecb_crypt()
113 des_sparc64_ecb_crypt((const u64 *)walk.src.virt.addr, in __ecb_crypt()
114 (u64 *) walk.dst.virt.addr, in __ecb_crypt()
118 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt()
146 struct blkcipher_walk walk; in cbc_encrypt() local
149 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt()
150 err = blkcipher_walk_virt(desc, &walk); in cbc_encrypt()
[all …]
Dcamellia_glue.c90 struct blkcipher_walk walk; in __ecb_crypt() local
99 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt()
100 err = blkcipher_walk_virt(desc, &walk); in __ecb_crypt()
108 while ((nbytes = walk.nbytes)) { in __ecb_crypt()
115 src64 = (const u64 *)walk.src.virt.addr; in __ecb_crypt()
116 dst64 = (u64 *) walk.dst.virt.addr; in __ecb_crypt()
120 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt()
153 struct blkcipher_walk walk; in cbc_encrypt() local
162 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt()
163 err = blkcipher_walk_virt(desc, &walk); in cbc_encrypt()
[all …]
/arch/openrisc/kernel/
Ddma.c33 unsigned long next, struct mm_walk *walk) in page_set_nocache() argument
54 unsigned long next, struct mm_walk *walk) in page_clear_nocache() argument
90 struct mm_walk walk = { in or1k_dma_alloc() local
109 if (walk_page_range(va, va + size, &walk)) { in or1k_dma_alloc()
123 struct mm_walk walk = { in or1k_dma_free() local
130 WARN_ON(walk_page_range(va, va + size, &walk)); in or1k_dma_free()
/arch/x86/crypto/sha-mb/
Dsha1_mb.c385 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk()
391 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk()
399 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); in sha_finish_walk()
510 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha1_mb_update()
517 if (crypto_ahash_walk_last(&rctx->walk)) in sha1_mb_update()
524 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, HASH_UPDATE); in sha1_mb_update()
568 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha1_mb_finup()
575 if (crypto_ahash_walk_last(&rctx->walk)) { in sha1_mb_finup()
587 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); in sha1_mb_finup()
/arch/s390/mm/
Dpgtable.c1156 unsigned long next, struct mm_walk *walk) in __s390_enable_skey() argument
1168 ptep_flush_direct(walk->mm, addr, pte); in __s390_enable_skey()
1183 struct mm_walk walk = { .pte_entry = __s390_enable_skey }; in s390_enable_skey() local
1203 walk.mm = mm; in s390_enable_skey()
1204 walk_page_range(0, TASK_SIZE, &walk); in s390_enable_skey()
1216 unsigned long next, struct mm_walk *walk) in __s390_reset_cmma() argument
1228 struct mm_walk walk = { .pte_entry = __s390_reset_cmma }; in s390_reset_cmma() local
1231 walk.mm = mm; in s390_reset_cmma()
1232 walk_page_range(0, TASK_SIZE, &walk); in s390_reset_cmma()
/arch/powerpc/mm/
Dsubpage-prot.c135 unsigned long end, struct mm_walk *walk) in subpage_walk_pmd_entry() argument
137 struct vm_area_struct *vma = walk->vma; in subpage_walk_pmd_entry()
/arch/ia64/kernel/
Defi.c305 walk (efi_freemem_callback_t callback, void *arg, u64 attr) in walk() function
329 walk(callback, arg, EFI_MEMORY_WB); in efi_memmap_walk()
339 walk(callback, arg, EFI_MEMORY_UC); in efi_memmap_walk_uc()

12