/kernel/linux/linux-5.10/crypto/ |
D | skcipher.c | 42 static int skcipher_walk_next(struct skcipher_walk *walk); 44 static inline void skcipher_unmap(struct scatter_walk *walk, void *vaddr) in skcipher_unmap() argument 46 if (PageHighMem(scatterwalk_page(walk))) in skcipher_unmap() 50 static inline void *skcipher_map(struct scatter_walk *walk) in skcipher_map() argument 52 struct page *page = scatterwalk_page(walk); in skcipher_map() 55 offset_in_page(walk->offset); in skcipher_map() 58 static inline void skcipher_map_src(struct skcipher_walk *walk) in skcipher_map_src() argument 60 walk->src.virt.addr = skcipher_map(&walk->in); in skcipher_map_src() 63 static inline void skcipher_map_dst(struct skcipher_walk *walk) in skcipher_map_dst() argument 65 walk->dst.virt.addr = skcipher_map(&walk->out); in skcipher_map_dst() [all …]
|
D | cfb.c | 42 static void crypto_cfb_final(struct skcipher_walk *walk, in crypto_cfb_final() argument 48 u8 *src = walk->src.virt.addr; in crypto_cfb_final() 49 u8 *dst = walk->dst.virt.addr; in crypto_cfb_final() 50 u8 *iv = walk->iv; in crypto_cfb_final() 51 unsigned int nbytes = walk->nbytes; in crypto_cfb_final() 57 static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk, in crypto_cfb_encrypt_segment() argument 61 unsigned int nbytes = walk->nbytes; in crypto_cfb_encrypt_segment() 62 u8 *src = walk->src.virt.addr; in crypto_cfb_encrypt_segment() 63 u8 *dst = walk->dst.virt.addr; in crypto_cfb_encrypt_segment() 64 u8 *iv = walk->iv; in crypto_cfb_encrypt_segment() [all …]
|
D | cbc.c | 16 static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk, in crypto_cbc_encrypt_segment() argument 21 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment() 22 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_segment() 23 u8 *dst = walk->dst.virt.addr; in crypto_cbc_encrypt_segment() 26 u8 *iv = walk->iv; in crypto_cbc_encrypt_segment() 44 static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk, in crypto_cbc_encrypt_inplace() argument 49 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace() 50 u8 *src = walk->src.virt.addr; in crypto_cbc_encrypt_inplace() 53 u8 *iv = walk->iv; in crypto_cbc_encrypt_inplace() 67 memcpy(walk->iv, iv, bsize); in crypto_cbc_encrypt_inplace() [all …]
|
D | pcbc.c | 20 struct skcipher_walk *walk, in crypto_pcbc_encrypt_segment() argument 24 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() 25 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_segment() 26 u8 *dst = walk->dst.virt.addr; in crypto_pcbc_encrypt_segment() 27 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() 42 struct skcipher_walk *walk, in crypto_pcbc_encrypt_inplace() argument 46 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() 47 u8 *src = walk->src.virt.addr; in crypto_pcbc_encrypt_inplace() 48 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() 67 struct skcipher_walk walk; in crypto_pcbc_encrypt() local [all …]
|
D | ofb.c | 22 struct skcipher_walk walk; in crypto_ofb_crypt() local 25 err = skcipher_walk_virt(&walk, req, false); in crypto_ofb_crypt() 27 while (walk.nbytes >= bsize) { in crypto_ofb_crypt() 28 const u8 *src = walk.src.virt.addr; in crypto_ofb_crypt() 29 u8 *dst = walk.dst.virt.addr; in crypto_ofb_crypt() 30 u8 * const iv = walk.iv; in crypto_ofb_crypt() 31 unsigned int nbytes = walk.nbytes; in crypto_ofb_crypt() 40 err = skcipher_walk_done(&walk, nbytes); in crypto_ofb_crypt() 43 if (walk.nbytes) { in crypto_ofb_crypt() 44 crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv); in crypto_ofb_crypt() [all …]
|
D | ahash.c | 41 static int hash_walk_next(struct crypto_hash_walk *walk) in hash_walk_next() argument 43 unsigned int alignmask = walk->alignmask; in hash_walk_next() 44 unsigned int offset = walk->offset; in hash_walk_next() 45 unsigned int nbytes = min(walk->entrylen, in hash_walk_next() 48 walk->data = kmap_atomic(walk->pg); in hash_walk_next() 49 walk->data += offset; in hash_walk_next() 58 walk->entrylen -= nbytes; in hash_walk_next() 62 static int hash_walk_new_entry(struct crypto_hash_walk *walk) in hash_walk_new_entry() argument 66 sg = walk->sg; in hash_walk_new_entry() 67 walk->offset = sg->offset; in hash_walk_new_entry() [all …]
|
/kernel/linux/linux-5.10/include/crypto/ |
D | scatterwalk.h | 28 static inline unsigned int scatterwalk_pagelen(struct scatter_walk *walk) in scatterwalk_pagelen() argument 30 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen() 31 unsigned int len_this_page = offset_in_page(~walk->offset) + 1; in scatterwalk_pagelen() 35 static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, in scatterwalk_clamp() argument 38 unsigned int len_this_page = scatterwalk_pagelen(walk); in scatterwalk_clamp() 42 static inline void scatterwalk_advance(struct scatter_walk *walk, in scatterwalk_advance() argument 45 walk->offset += nbytes; in scatterwalk_advance() 48 static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk, in scatterwalk_aligned() argument 51 return !(walk->offset & alignmask); in scatterwalk_aligned() 54 static inline struct page *scatterwalk_page(struct scatter_walk *walk) in scatterwalk_page() argument [all …]
|
D | ctr.h | 27 struct skcipher_walk walk; in crypto_ctr_encrypt_walk() local 34 err = skcipher_walk_virt(&walk, req, false); in crypto_ctr_encrypt_walk() 36 while (walk.nbytes > 0) { in crypto_ctr_encrypt_walk() 37 u8 *dst = walk.dst.virt.addr; in crypto_ctr_encrypt_walk() 38 u8 *src = walk.src.virt.addr; in crypto_ctr_encrypt_walk() 39 int nbytes = walk.nbytes; in crypto_ctr_encrypt_walk() 42 if (nbytes < walk.total) { in crypto_ctr_encrypt_walk() 43 tail = walk.nbytes & (blocksize - 1); in crypto_ctr_encrypt_walk() 50 fn(tfm, walk.iv, buf); in crypto_ctr_encrypt_walk() 53 crypto_inc(walk.iv, blocksize); in crypto_ctr_encrypt_walk() [all …]
|
/kernel/linux/linux-5.10/mm/ |
D | pagewalk.c | 24 unsigned long end, struct mm_walk *walk) in walk_pte_range_inner() argument 26 const struct mm_walk_ops *ops = walk->ops; in walk_pte_range_inner() 30 err = ops->pte_entry(pte, addr, addr + PAGE_SIZE, walk); in walk_pte_range_inner() 42 struct mm_walk *walk) in walk_pte_range() argument 48 if (walk->no_vma) { in walk_pte_range() 50 err = walk_pte_range_inner(pte, addr, end, walk); in walk_pte_range() 53 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in walk_pte_range() 54 err = walk_pte_range_inner(pte, addr, end, walk); in walk_pte_range() 62 struct mm_walk *walk) in walk_pmd_range() argument 66 const struct mm_walk_ops *ops = walk->ops; in walk_pmd_range() [all …]
|
D | mapping_dirty_helpers.c | 33 struct mm_walk *walk) in wp_pte() argument 35 struct wp_walk *wpwalk = walk->private; in wp_pte() 39 pte_t old_pte = ptep_modify_prot_start(walk->vma, addr, pte); in wp_pte() 42 ptep_modify_prot_commit(walk->vma, addr, pte, old_pte, ptent); in wp_pte() 87 unsigned long end, struct mm_walk *walk) in clean_record_pte() argument 89 struct wp_walk *wpwalk = walk->private; in clean_record_pte() 94 pgoff_t pgoff = ((addr - walk->vma->vm_start) >> PAGE_SHIFT) + in clean_record_pte() 95 walk->vma->vm_pgoff - cwalk->bitmap_pgoff; in clean_record_pte() 96 pte_t old_pte = ptep_modify_prot_start(walk->vma, addr, pte); in clean_record_pte() 99 ptep_modify_prot_commit(walk->vma, addr, pte, old_pte, ptent); in clean_record_pte() [all …]
|
D | ptdump.c | 15 static inline int note_kasan_page_table(struct mm_walk *walk, in note_kasan_page_table() argument 18 struct ptdump_state *st = walk->private; in note_kasan_page_table() 22 walk->action = ACTION_CONTINUE; in note_kasan_page_table() 29 unsigned long next, struct mm_walk *walk) in ptdump_pgd_entry() argument 31 struct ptdump_state *st = walk->private; in ptdump_pgd_entry() 36 return note_kasan_page_table(walk, addr); in ptdump_pgd_entry() 49 unsigned long next, struct mm_walk *walk) in ptdump_p4d_entry() argument 51 struct ptdump_state *st = walk->private; in ptdump_p4d_entry() 56 return note_kasan_page_table(walk, addr); in ptdump_p4d_entry() 69 unsigned long next, struct mm_walk *walk) in ptdump_pud_entry() argument [all …]
|
D | hmm.c | 62 unsigned int required_fault, struct mm_walk *walk) in hmm_vma_fault() argument 64 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_fault() 65 struct vm_area_struct *vma = walk->vma; in hmm_vma_fault() 146 __always_unused int depth, struct mm_walk *walk) in hmm_vma_walk_hole() argument 148 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_walk_hole() 159 if (!walk->vma) { in hmm_vma_walk_hole() 165 return hmm_vma_fault(addr, end, required_fault, walk); in hmm_vma_walk_hole() 185 static int hmm_vma_handle_pmd(struct mm_walk *walk, unsigned long addr, in hmm_vma_handle_pmd() argument 189 struct hmm_vma_walk *hmm_vma_walk = walk->private; in hmm_vma_handle_pmd() 200 return hmm_vma_fault(addr, end, required_fault, walk); in hmm_vma_handle_pmd() [all …]
|
/kernel/linux/linux-5.10/arch/x86/crypto/ |
D | glue_helper.c | 26 struct skcipher_walk walk; in glue_ecb_req_128bit() local 31 err = skcipher_walk_virt(&walk, req, false); in glue_ecb_req_128bit() 33 while ((nbytes = walk.nbytes)) { in glue_ecb_req_128bit() 34 const u8 *src = walk.src.virt.addr; in glue_ecb_req_128bit() 35 u8 *dst = walk.dst.virt.addr; in glue_ecb_req_128bit() 40 &walk, fpu_enabled, nbytes); in glue_ecb_req_128bit() 58 err = skcipher_walk_done(&walk, nbytes); in glue_ecb_req_128bit() 71 struct skcipher_walk walk; in glue_cbc_encrypt_req_128bit() local 75 err = skcipher_walk_virt(&walk, req, false); in glue_cbc_encrypt_req_128bit() 77 while ((nbytes = walk.nbytes)) { in glue_cbc_encrypt_req_128bit() [all …]
|
D | cast5_avx_glue.c | 35 static inline bool cast5_fpu_begin(bool fpu_enabled, struct skcipher_walk *walk, in cast5_fpu_begin() argument 39 walk, fpu_enabled, nbytes); in cast5_fpu_begin() 52 struct skcipher_walk walk; in ecb_crypt() local 58 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt() 60 while ((nbytes = walk.nbytes)) { in ecb_crypt() 61 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 62 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 64 fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); in ecb_crypt() 93 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 115 struct skcipher_walk walk; in cbc_encrypt() local [all …]
|
D | blowfish_glue.c | 78 struct skcipher_walk walk; in ecb_crypt() local 82 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt() 84 while ((nbytes = walk.nbytes)) { in ecb_crypt() 85 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 86 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 112 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 129 struct skcipher_walk *walk) in __cbc_encrypt() argument 132 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() 133 u64 *src = (u64 *)walk->src.virt.addr; in __cbc_encrypt() 134 u64 *dst = (u64 *)walk->dst.virt.addr; in __cbc_encrypt() [all …]
|
D | des3_ede_glue.c | 79 struct skcipher_walk walk; in ecb_crypt() local 83 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt() 85 while ((nbytes = walk.nbytes)) { in ecb_crypt() 86 u8 *wsrc = walk.src.virt.addr; in ecb_crypt() 87 u8 *wdst = walk.dst.virt.addr; in ecb_crypt() 114 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt() 137 struct skcipher_walk *walk) in __cbc_encrypt() argument 140 unsigned int nbytes = walk->nbytes; in __cbc_encrypt() 141 u64 *src = (u64 *)walk->src.virt.addr; in __cbc_encrypt() 142 u64 *dst = (u64 *)walk->dst.virt.addr; in __cbc_encrypt() [all …]
|
D | aegis128-aesni-glue.c | 60 int (*skcipher_walk_init)(struct skcipher_walk *walk, 73 struct scatter_walk walk; in crypto_aegis128_aesni_process_ad() local 77 scatterwalk_start(&walk, sg_src); in crypto_aegis128_aesni_process_ad() 79 unsigned int size = scatterwalk_clamp(&walk, assoclen); in crypto_aegis128_aesni_process_ad() 81 void *mapped = scatterwalk_map(&walk); in crypto_aegis128_aesni_process_ad() 107 scatterwalk_advance(&walk, size); in crypto_aegis128_aesni_process_ad() 108 scatterwalk_done(&walk, 0, assoclen); in crypto_aegis128_aesni_process_ad() 118 struct aegis_state *state, struct skcipher_walk *walk, in crypto_aegis128_aesni_process_crypt() argument 121 while (walk->nbytes >= AEGIS128_BLOCK_SIZE) { in crypto_aegis128_aesni_process_crypt() 123 round_down(walk->nbytes, AEGIS128_BLOCK_SIZE), in crypto_aegis128_aesni_process_crypt() [all …]
|
/kernel/linux/linux-5.10/arch/arm/crypto/ |
D | aes-ce-glue.c | 171 struct skcipher_walk walk; in ecb_encrypt() local 175 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt() 177 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 179 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 182 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 191 struct skcipher_walk walk; in ecb_decrypt() local 195 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt() 197 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 199 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 202 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() [all …]
|
D | aes-neonbs-glue.c | 91 struct skcipher_walk walk; in __ecb_crypt() local 94 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt() 96 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 97 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 99 if (walk.nbytes < walk.total) in __ecb_crypt() 101 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 104 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt() 107 err = skcipher_walk_done(&walk, in __ecb_crypt() 108 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 165 struct skcipher_walk walk; in cbc_decrypt() local [all …]
|
/kernel/linux/linux-5.10/arch/arm64/crypto/ |
D | aes-neonbs-glue.c | 103 struct skcipher_walk walk; in __ecb_crypt() local 106 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt() 108 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 109 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 111 if (walk.nbytes < walk.total) in __ecb_crypt() 113 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 116 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt() 119 err = skcipher_walk_done(&walk, in __ecb_crypt() 120 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 163 struct skcipher_walk walk; in cbc_encrypt() local [all …]
|
D | aes-glue.c | 178 struct skcipher_walk walk; in ecb_encrypt() local 181 err = skcipher_walk_virt(&walk, req, false); in ecb_encrypt() 183 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 185 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt() 188 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 198 struct skcipher_walk walk; in ecb_decrypt() local 201 err = skcipher_walk_virt(&walk, req, false); in ecb_decrypt() 203 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 205 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt() 208 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() [all …]
|
D | aes-ce-ccm-glue.c | 139 struct scatter_walk walk; in ccm_calculate_auth_mac() local 154 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac() 157 u32 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac() 161 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac() 162 n = scatterwalk_clamp(&walk, len); in ccm_calculate_auth_mac() 164 p = scatterwalk_map(&walk); in ccm_calculate_auth_mac() 169 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac() 170 scatterwalk_done(&walk, 0, len); in ccm_calculate_auth_mac() 174 static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[], in ccm_crypt_fallback() argument 180 while (walk->nbytes) { in ccm_crypt_fallback() [all …]
|
D | ghash-ce-glue.c | 310 struct scatter_walk walk; in gcm_calculate_auth_mac() local 314 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac() 317 u32 n = scatterwalk_clamp(&walk, len); in gcm_calculate_auth_mac() 321 scatterwalk_start(&walk, sg_next(walk.sg)); in gcm_calculate_auth_mac() 322 n = scatterwalk_clamp(&walk, len); in gcm_calculate_auth_mac() 324 p = scatterwalk_map(&walk); in gcm_calculate_auth_mac() 330 scatterwalk_advance(&walk, n); in gcm_calculate_auth_mac() 331 scatterwalk_done(&walk, 0, len); in gcm_calculate_auth_mac() 346 struct skcipher_walk walk; in gcm_encrypt() local 363 err = skcipher_walk_aead_encrypt(&walk, req, false); in gcm_encrypt() [all …]
|
/kernel/linux/linux-5.10/arch/sparc/crypto/ |
D | aes_glue.c | 223 struct skcipher_walk walk; in ecb_encrypt() local 227 err = skcipher_walk_virt(&walk, req, true); in ecb_encrypt() 232 while ((nbytes = walk.nbytes) != 0) { in ecb_encrypt() 233 ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr, in ecb_encrypt() 234 walk.dst.virt.addr, in ecb_encrypt() 236 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 247 struct skcipher_walk walk; in ecb_decrypt() local 251 err = skcipher_walk_virt(&walk, req, true); in ecb_decrypt() 257 while ((nbytes = walk.nbytes) != 0) { in ecb_decrypt() 258 ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr, in ecb_decrypt() [all …]
|
/kernel/linux/linux-5.10/drivers/crypto/vmx/ |
D | aes_ctr.c | 70 struct skcipher_walk *walk) in p8_aes_ctr_final() argument 72 u8 *ctrblk = walk->iv; in p8_aes_ctr_final() 74 u8 *src = walk->src.virt.addr; in p8_aes_ctr_final() 75 u8 *dst = walk->dst.virt.addr; in p8_aes_ctr_final() 76 unsigned int nbytes = walk->nbytes; in p8_aes_ctr_final() 94 struct skcipher_walk walk; in p8_aes_ctr_crypt() local 106 ret = skcipher_walk_virt(&walk, req, false); in p8_aes_ctr_crypt() 107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 111 aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, in p8_aes_ctr_crypt() 112 walk.dst.virt.addr, in p8_aes_ctr_crypt() [all …]
|