Home
last modified time | relevance | path

Searched refs:walk (Results 1 – 25 of 30) sorted by relevance

12

/arch/sparc/mm/
Dextable.c19 const struct exception_table_entry *walk; in search_extable() local
39 for (walk = start; walk <= last; walk++) { in search_extable()
40 if (walk->fixup == 0) { in search_extable()
42 walk++; in search_extable()
47 if (walk->fixup == -1) in search_extable()
50 if (walk->insn == value) in search_extable()
51 return walk; in search_extable()
55 for (walk = start; walk <= (last - 1); walk++) { in search_extable()
56 if (walk->fixup) in search_extable()
59 if (walk[0].insn <= value && walk[1].insn > value) in search_extable()
[all …]
/arch/x86/crypto/
Dglue_helper.c37 struct blkcipher_walk *walk) in __glue_ecb_crypt_128bit() argument
45 err = blkcipher_walk_virt(desc, walk); in __glue_ecb_crypt_128bit()
47 while ((nbytes = walk->nbytes)) { in __glue_ecb_crypt_128bit()
48 u8 *wsrc = walk->src.virt.addr; in __glue_ecb_crypt_128bit()
49 u8 *wdst = walk->dst.virt.addr; in __glue_ecb_crypt_128bit()
74 err = blkcipher_walk_done(desc, walk, nbytes); in __glue_ecb_crypt_128bit()
85 struct blkcipher_walk walk; in glue_ecb_crypt_128bit() local
87 blkcipher_walk_init(&walk, dst, src, nbytes); in glue_ecb_crypt_128bit()
88 return __glue_ecb_crypt_128bit(gctx, desc, &walk); in glue_ecb_crypt_128bit()
94 struct blkcipher_walk *walk) in __glue_cbc_encrypt_128bit() argument
[all …]
Dblowfish_glue.c80 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
89 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
91 while ((nbytes = walk->nbytes)) { in ecb_crypt()
92 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
93 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
119 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
128 struct blkcipher_walk walk; in ecb_encrypt() local
130 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
131 return ecb_crypt(desc, &walk, blowfish_enc_blk, blowfish_enc_blk_4way); in ecb_encrypt()
137 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Dcast5_avx_glue.c59 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
69 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
72 while ((nbytes = walk->nbytes)) { in ecb_crypt()
73 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
74 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
105 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
115 struct blkcipher_walk walk; in ecb_encrypt() local
117 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
118 return ecb_crypt(desc, &walk, true); in ecb_encrypt()
124 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Ddes3_ede_glue.c86 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk, in ecb_crypt() argument
93 err = blkcipher_walk_virt(desc, walk); in ecb_crypt()
95 while ((nbytes = walk->nbytes)) { in ecb_crypt()
96 u8 *wsrc = walk->src.virt.addr; in ecb_crypt()
97 u8 *wdst = walk->dst.virt.addr; in ecb_crypt()
124 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt()
134 struct blkcipher_walk walk; in ecb_encrypt() local
136 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
137 return ecb_crypt(desc, &walk, ctx->enc_expkey); in ecb_encrypt()
144 struct blkcipher_walk walk; in ecb_decrypt() local
[all …]
Dsalsa20_glue.c52 struct blkcipher_walk walk; in encrypt() local
57 blkcipher_walk_init(&walk, dst, src, nbytes); in encrypt()
58 err = blkcipher_walk_virt_block(desc, &walk, 64); in encrypt()
60 salsa20_ivsetup(ctx, walk.iv); in encrypt()
62 while (walk.nbytes >= 64) { in encrypt()
63 salsa20_encrypt_bytes(ctx, walk.src.virt.addr, in encrypt()
64 walk.dst.virt.addr, in encrypt()
65 walk.nbytes - (walk.nbytes % 64)); in encrypt()
66 err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64); in encrypt()
69 if (walk.nbytes) { in encrypt()
[all …]
Dchacha20_glue.c70 struct blkcipher_walk walk; in chacha20_simd() local
78 blkcipher_walk_init(&walk, dst, src, nbytes); in chacha20_simd()
79 err = blkcipher_walk_virt_block(desc, &walk, CHACHA20_BLOCK_SIZE); in chacha20_simd()
81 crypto_chacha20_init(state, crypto_blkcipher_ctx(desc->tfm), walk.iv); in chacha20_simd()
85 while (walk.nbytes >= CHACHA20_BLOCK_SIZE) { in chacha20_simd()
86 chacha20_dosimd(state, walk.dst.virt.addr, walk.src.virt.addr, in chacha20_simd()
87 rounddown(walk.nbytes, CHACHA20_BLOCK_SIZE)); in chacha20_simd()
88 err = blkcipher_walk_done(desc, &walk, in chacha20_simd()
89 walk.nbytes % CHACHA20_BLOCK_SIZE); in chacha20_simd()
92 if (walk.nbytes) { in chacha20_simd()
[all …]
Daesni-intel_glue.c368 struct blkcipher_walk walk; in ecb_encrypt() local
371 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
372 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
376 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
377 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
380 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt()
392 struct blkcipher_walk walk; in ecb_decrypt() local
395 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
396 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
400 while ((nbytes = walk.nbytes)) { in ecb_decrypt()
[all …]
/arch/arm/crypto/
Daesbs-glue.c115 struct blkcipher_walk walk; in aesbs_cbc_encrypt() local
118 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_encrypt()
119 err = blkcipher_walk_virt(desc, &walk); in aesbs_cbc_encrypt()
121 while (walk.nbytes) { in aesbs_cbc_encrypt()
122 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; in aesbs_cbc_encrypt()
123 u8 *src = walk.src.virt.addr; in aesbs_cbc_encrypt()
125 if (walk.dst.virt.addr == walk.src.virt.addr) { in aesbs_cbc_encrypt()
126 u8 *iv = walk.iv; in aesbs_cbc_encrypt()
134 memcpy(walk.iv, iv, AES_BLOCK_SIZE); in aesbs_cbc_encrypt()
136 u8 *dst = walk.dst.virt.addr; in aesbs_cbc_encrypt()
[all …]
Daes-ce-glue.c180 struct blkcipher_walk walk; in ecb_encrypt() local
185 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
186 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
190 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
192 err = blkcipher_walk_done(desc, &walk, in ecb_encrypt()
193 walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
203 struct blkcipher_walk walk; in ecb_decrypt() local
208 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
209 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
[all …]
Dspeck-neon-glue.c56 struct blkcipher_walk walk; in __speck128_xts_crypt() local
60 blkcipher_walk_init(&walk, dst, src, nbytes); in __speck128_xts_crypt()
61 err = blkcipher_walk_virt_block(desc, &walk, SPECK_NEON_CHUNK_SIZE); in __speck128_xts_crypt()
63 crypto_speck128_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv); in __speck128_xts_crypt()
65 while (walk.nbytes > 0) { in __speck128_xts_crypt()
66 unsigned int nbytes = walk.nbytes; in __speck128_xts_crypt()
67 u8 *dst = walk.dst.virt.addr; in __speck128_xts_crypt()
68 const u8 *src = walk.src.virt.addr; in __speck128_xts_crypt()
95 err = blkcipher_walk_done(desc, &walk, nbytes); in __speck128_xts_crypt()
167 struct blkcipher_walk walk; in __speck64_xts_crypt() local
[all …]
/arch/arm64/crypto/
Daes-glue.c109 struct blkcipher_walk walk; in ecb_encrypt() local
113 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
114 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
117 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_encrypt()
118 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
120 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
131 struct blkcipher_walk walk; in ecb_decrypt() local
135 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
136 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
139 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_decrypt()
[all …]
Daes-ce-ccm-glue.c111 struct scatter_walk walk; in ccm_calculate_auth_mac() local
127 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac()
130 u32 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac()
134 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac()
135 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac()
137 p = scatterwalk_map(&walk); in ccm_calculate_auth_mac()
143 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac()
144 scatterwalk_done(&walk, 0, len); in ccm_calculate_auth_mac()
153 struct blkcipher_walk walk; in ccm_encrypt() local
180 blkcipher_walk_init(&walk, dst, src, len); in ccm_encrypt()
[all …]
Dspeck-neon-glue.c50 struct blkcipher_walk walk; in __speck128_xts_crypt() local
54 blkcipher_walk_init(&walk, dst, src, nbytes); in __speck128_xts_crypt()
55 err = blkcipher_walk_virt_block(desc, &walk, SPECK_NEON_CHUNK_SIZE); in __speck128_xts_crypt()
57 crypto_speck128_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv); in __speck128_xts_crypt()
59 while (walk.nbytes > 0) { in __speck128_xts_crypt()
60 unsigned int nbytes = walk.nbytes; in __speck128_xts_crypt()
61 u8 *dst = walk.dst.virt.addr; in __speck128_xts_crypt()
62 const u8 *src = walk.src.virt.addr; in __speck128_xts_crypt()
89 err = blkcipher_walk_done(desc, &walk, nbytes); in __speck128_xts_crypt()
161 struct blkcipher_walk walk; in __speck64_xts_crypt() local
[all …]
/arch/s390/crypto/
Ddes_s390.c89 struct blkcipher_walk *walk) in ecb_desall_crypt() argument
95 ret = blkcipher_walk_virt(desc, walk); in ecb_desall_crypt()
96 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in ecb_desall_crypt()
99 cpacf_km(fc, ctx->key, walk->dst.virt.addr, in ecb_desall_crypt()
100 walk->src.virt.addr, n); in ecb_desall_crypt()
101 ret = blkcipher_walk_done(desc, walk, nbytes - n); in ecb_desall_crypt()
107 struct blkcipher_walk *walk) in cbc_desall_crypt() argument
117 ret = blkcipher_walk_virt(desc, walk); in cbc_desall_crypt()
118 memcpy(param.iv, walk->iv, DES_BLOCK_SIZE); in cbc_desall_crypt()
120 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { in cbc_desall_crypt()
[all …]
Daes_s390.c245 struct blkcipher_walk *walk) in ecb_aes_crypt() argument
251 ret = blkcipher_walk_virt(desc, walk); in ecb_aes_crypt()
252 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_aes_crypt()
256 walk->dst.virt.addr, walk->src.virt.addr, n); in ecb_aes_crypt()
257 ret = blkcipher_walk_done(desc, walk, nbytes - n); in ecb_aes_crypt()
268 struct blkcipher_walk walk; in ecb_aes_encrypt() local
273 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_encrypt()
274 return ecb_aes_crypt(desc, 0, &walk); in ecb_aes_encrypt()
282 struct blkcipher_walk walk; in ecb_aes_decrypt() local
287 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_decrypt()
[all …]
/arch/powerpc/crypto/
Daes-spe-glue.c190 struct blkcipher_walk walk; in ppc_ecb_encrypt() local
195 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_encrypt()
196 err = blkcipher_walk_virt(desc, &walk); in ppc_ecb_encrypt()
198 while ((nbytes = walk.nbytes)) { in ppc_ecb_encrypt()
204 ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, in ppc_ecb_encrypt()
208 err = blkcipher_walk_done(desc, &walk, ubytes); in ppc_ecb_encrypt()
218 struct blkcipher_walk walk; in ppc_ecb_decrypt() local
223 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_decrypt()
224 err = blkcipher_walk_virt(desc, &walk); in ppc_ecb_decrypt()
226 while ((nbytes = walk.nbytes)) { in ppc_ecb_decrypt()
[all …]
/arch/sparc/crypto/
Daes_glue.c220 struct blkcipher_walk walk; in ecb_encrypt() local
223 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
224 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
228 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
233 (const u64 *)walk.src.virt.addr, in ecb_encrypt()
234 (u64 *) walk.dst.virt.addr, in ecb_encrypt()
238 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt()
249 struct blkcipher_walk walk; in ecb_decrypt() local
253 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
254 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
[all …]
Ddes_glue.c98 struct blkcipher_walk walk; in __ecb_crypt() local
101 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt()
102 err = blkcipher_walk_virt(desc, &walk); in __ecb_crypt()
109 while ((nbytes = walk.nbytes)) { in __ecb_crypt()
113 des_sparc64_ecb_crypt((const u64 *)walk.src.virt.addr, in __ecb_crypt()
114 (u64 *) walk.dst.virt.addr, in __ecb_crypt()
118 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt()
146 struct blkcipher_walk walk; in cbc_encrypt() local
149 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt()
150 err = blkcipher_walk_virt(desc, &walk); in cbc_encrypt()
[all …]
Dcamellia_glue.c90 struct blkcipher_walk walk; in __ecb_crypt() local
99 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt()
100 err = blkcipher_walk_virt(desc, &walk); in __ecb_crypt()
108 while ((nbytes = walk.nbytes)) { in __ecb_crypt()
115 src64 = (const u64 *)walk.src.virt.addr; in __ecb_crypt()
116 dst64 = (u64 *) walk.dst.virt.addr; in __ecb_crypt()
120 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt()
153 struct blkcipher_walk walk; in cbc_encrypt() local
162 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt()
163 err = blkcipher_walk_virt(desc, &walk); in cbc_encrypt()
[all …]
/arch/openrisc/kernel/
Ddma.c32 unsigned long next, struct mm_walk *walk) in page_set_nocache() argument
53 unsigned long next, struct mm_walk *walk) in page_clear_nocache() argument
89 struct mm_walk walk = { in or1k_dma_alloc() local
108 if (walk_page_range(va, va + size, &walk)) { in or1k_dma_alloc()
122 struct mm_walk walk = { in or1k_dma_free() local
129 WARN_ON(walk_page_range(va, va + size, &walk)); in or1k_dma_free()
/arch/x86/crypto/sha256-mb/
Dsha256_mb.c415 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk()
421 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk()
431 rctx->walk.data, nbytes, flag); in sha_finish_walk()
540 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha256_mb_update()
547 if (crypto_ahash_walk_last(&rctx->walk)) in sha256_mb_update()
554 sha_ctx = sha256_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, in sha256_mb_update()
598 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha256_mb_finup()
605 if (crypto_ahash_walk_last(&rctx->walk)) { in sha256_mb_finup()
616 sha_ctx = sha256_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, in sha256_mb_finup()
/arch/x86/crypto/sha512-mb/
Dsha512_mb.c427 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk()
433 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk()
443 rctx->walk.data, nbytes, flag); in sha_finish_walk()
554 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha512_mb_update()
561 if (crypto_ahash_walk_last(&rctx->walk)) in sha512_mb_update()
568 sha_ctx = sha512_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, in sha512_mb_update()
613 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha512_mb_finup()
620 if (crypto_ahash_walk_last(&rctx->walk)) { in sha512_mb_finup()
631 sha_ctx = sha512_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, in sha512_mb_finup()
/arch/x86/crypto/sha1-mb/
Dsha1_mb.c417 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk()
423 if (crypto_ahash_walk_last(&rctx->walk)) { in sha_finish_walk()
433 rctx->walk.data, nbytes, flag); in sha_finish_walk()
543 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha1_mb_update()
550 if (crypto_ahash_walk_last(&rctx->walk)) in sha1_mb_update()
557 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, in sha1_mb_update()
601 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha1_mb_finup()
608 if (crypto_ahash_walk_last(&rctx->walk)) { in sha1_mb_finup()
619 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, in sha1_mb_finup()
/arch/s390/mm/
Dgmap.c2134 unsigned long end, struct mm_walk *walk) in __zap_zero_pages() argument
2142 ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in __zap_zero_pages()
2144 ptep_xchg_direct(walk->mm, addr, ptep, __pte(_PAGE_INVALID)); in __zap_zero_pages()
2152 struct mm_walk walk = { .pmd_entry = __zap_zero_pages }; in zap_zero_pages() local
2154 walk.mm = mm; in zap_zero_pages()
2155 walk_page_range(0, TASK_SIZE, &walk); in zap_zero_pages()
2186 unsigned long next, struct mm_walk *walk) in __s390_enable_skey() argument
2189 ptep_zap_key(walk->mm, addr, pte); in __s390_enable_skey()
2195 struct mm_walk walk = { .pte_entry = __s390_enable_skey }; in s390_enable_skey() local
2215 walk.mm = mm; in s390_enable_skey()
[all …]

12