| /kernel/linux/linux-6.6/fs/minix/ |
| D | itree_common.c | 158 Indirect *partial; in get_block() local 166 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 169 if (!partial) { in get_block() 173 partial = chain+depth-1; /* the whole chain */ in get_block() 180 while (partial > chain) { in get_block() 181 brelse(partial->bh); in get_block() 182 partial--; in get_block() 196 left = (chain + depth) - partial; in get_block() 197 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 201 if (splice_branch(inode, chain, partial, left) < 0) in get_block() [all …]
|
| /kernel/linux/linux-5.10/fs/minix/ |
| D | itree_common.c | 158 Indirect *partial; in get_block() local 166 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 169 if (!partial) { in get_block() 173 partial = chain+depth-1; /* the whole chain */ in get_block() 180 while (partial > chain) { in get_block() 181 brelse(partial->bh); in get_block() 182 partial--; in get_block() 196 left = (chain + depth) - partial; in get_block() 197 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 201 if (splice_branch(inode, chain, partial, left) < 0) in get_block() [all …]
|
| /kernel/linux/linux-5.10/include/crypto/ |
| D | sha256_base.h | 43 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_update() local 47 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in sha256_base_do_update() 50 if (partial) { in sha256_base_do_update() 51 int p = SHA256_BLOCK_SIZE - partial; in sha256_base_do_update() 53 memcpy(sctx->buf + partial, data, p); in sha256_base_do_update() 67 partial = 0; in sha256_base_do_update() 70 memcpy(sctx->buf + partial, data, len); in sha256_base_do_update() 81 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_finalize() local 83 sctx->buf[partial++] = 0x80; in sha256_base_do_finalize() 84 if (partial > bit_offset) { in sha256_base_do_finalize() [all …]
|
| D | sha1_base.h | 40 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_update() local 44 if (unlikely((partial + len) >= SHA1_BLOCK_SIZE)) { in sha1_base_do_update() 47 if (partial) { in sha1_base_do_update() 48 int p = SHA1_BLOCK_SIZE - partial; in sha1_base_do_update() 50 memcpy(sctx->buffer + partial, data, p); in sha1_base_do_update() 64 partial = 0; in sha1_base_do_update() 67 memcpy(sctx->buffer + partial, data, len); in sha1_base_do_update() 78 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_finalize() local 80 sctx->buffer[partial++] = 0x80; in sha1_base_do_finalize() 81 if (partial > bit_offset) { in sha1_base_do_finalize() [all …]
|
| D | sm3_base.h | 43 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_update() local 47 if (unlikely((partial + len) >= SM3_BLOCK_SIZE)) { in sm3_base_do_update() 50 if (partial) { in sm3_base_do_update() 51 int p = SM3_BLOCK_SIZE - partial; in sm3_base_do_update() 53 memcpy(sctx->buffer + partial, data, p); in sm3_base_do_update() 67 partial = 0; in sm3_base_do_update() 70 memcpy(sctx->buffer + partial, data, len); in sm3_base_do_update() 81 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_finalize() local 83 sctx->buffer[partial++] = 0x80; in sm3_base_do_finalize() 84 if (partial > bit_offset) { in sm3_base_do_finalize() [all …]
|
| D | sha512_base.h | 61 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_update() local 67 if (unlikely((partial + len) >= SHA512_BLOCK_SIZE)) { in sha512_base_do_update() 70 if (partial) { in sha512_base_do_update() 71 int p = SHA512_BLOCK_SIZE - partial; in sha512_base_do_update() 73 memcpy(sctx->buf + partial, data, p); in sha512_base_do_update() 87 partial = 0; in sha512_base_do_update() 90 memcpy(sctx->buf + partial, data, len); in sha512_base_do_update() 101 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_finalize() local 103 sctx->buf[partial++] = 0x80; in sha512_base_do_finalize() 104 if (partial > bit_offset) { in sha512_base_do_finalize() [all …]
|
| /kernel/linux/linux-6.6/include/crypto/ |
| D | sha1_base.h | 41 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_update() local 45 if (unlikely((partial + len) >= SHA1_BLOCK_SIZE)) { in sha1_base_do_update() 48 if (partial) { in sha1_base_do_update() 49 int p = SHA1_BLOCK_SIZE - partial; in sha1_base_do_update() 51 memcpy(sctx->buffer + partial, data, p); in sha1_base_do_update() 65 partial = 0; in sha1_base_do_update() 68 memcpy(sctx->buffer + partial, data, len); in sha1_base_do_update() 79 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_finalize() local 81 sctx->buffer[partial++] = 0x80; in sha1_base_do_finalize() 82 if (partial > bit_offset) { in sha1_base_do_finalize() [all …]
|
| D | sm3_base.h | 44 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_update() local 48 if (unlikely((partial + len) >= SM3_BLOCK_SIZE)) { in sm3_base_do_update() 51 if (partial) { in sm3_base_do_update() 52 int p = SM3_BLOCK_SIZE - partial; in sm3_base_do_update() 54 memcpy(sctx->buffer + partial, data, p); in sm3_base_do_update() 68 partial = 0; in sm3_base_do_update() 71 memcpy(sctx->buffer + partial, data, len); in sm3_base_do_update() 82 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_finalize() local 84 sctx->buffer[partial++] = 0x80; in sm3_base_do_finalize() 85 if (partial > bit_offset) { in sm3_base_do_finalize() [all …]
|
| D | sha256_base.h | 42 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() local 46 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in lib_sha256_base_do_update() 49 if (partial) { in lib_sha256_base_do_update() 50 int p = SHA256_BLOCK_SIZE - partial; in lib_sha256_base_do_update() 52 memcpy(sctx->buf + partial, data, p); in lib_sha256_base_do_update() 66 partial = 0; in lib_sha256_base_do_update() 69 memcpy(sctx->buf + partial, data, len); in lib_sha256_base_do_update() 89 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in lib_sha256_base_do_finalize() local 91 sctx->buf[partial++] = 0x80; in lib_sha256_base_do_finalize() 92 if (partial > bit_offset) { in lib_sha256_base_do_finalize() [all …]
|
| D | sha512_base.h | 62 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_update() local 68 if (unlikely((partial + len) >= SHA512_BLOCK_SIZE)) { in sha512_base_do_update() 71 if (partial) { in sha512_base_do_update() 72 int p = SHA512_BLOCK_SIZE - partial; in sha512_base_do_update() 74 memcpy(sctx->buf + partial, data, p); in sha512_base_do_update() 88 partial = 0; in sha512_base_do_update() 91 memcpy(sctx->buf + partial, data, len); in sha512_base_do_update() 102 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_finalize() local 104 sctx->buf[partial++] = 0x80; in sha512_base_do_finalize() 105 if (partial > bit_offset) { in sha512_base_do_finalize() [all …]
|
| /kernel/linux/linux-5.10/fs/sysv/ |
| D | itree.c | 213 Indirect *partial; in get_block() local 222 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 226 if (!partial) { in get_block() 231 partial = chain+depth-1; /* the whole chain */ in get_block() 238 while (partial > chain) { in get_block() 239 brelse(partial->bh); in get_block() 240 partial--; in get_block() 254 left = (chain + depth) - partial; in get_block() 255 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 259 if (splice_branch(inode, chain, partial, left) < 0) in get_block() [all …]
|
| /kernel/linux/linux-6.6/fs/sysv/ |
| D | itree.c | 213 Indirect *partial; in get_block() local 221 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 224 if (!partial) { in get_block() 229 partial = chain+depth-1; /* the whole chain */ in get_block() 236 while (partial > chain) { in get_block() 237 brelse(partial->bh); in get_block() 238 partial--; in get_block() 252 left = (chain + depth) - partial; in get_block() 253 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 257 if (splice_branch(inode, chain, partial, left) < 0) in get_block() [all …]
|
| /kernel/linux/linux-6.6/fs/ext4/ |
| D | indirect.c | 244 * @partial: pointer to the last triple within a chain 252 Indirect *partial) in ext4_find_goal() argument 260 goal = ext4_find_near(inode, partial); in ext4_find_goal() 316 * we had read the existing part of chain and partial points to the last 538 Indirect *partial; in ext4_ind_map_blocks() local 554 partial = ext4_get_branch(inode, depth, offsets, chain, &err); in ext4_ind_map_blocks() 557 if (!partial) { in ext4_ind_map_blocks() 580 * Count number blocks in a subtree under 'partial'. At each in ext4_ind_map_blocks() 586 for (i = partial - chain + 1; i < depth; i++) in ext4_ind_map_blocks() 620 ar.goal = ext4_find_goal(inode, map->m_lblk, partial); in ext4_ind_map_blocks() [all …]
|
| /kernel/linux/linux-5.10/fs/ext4/ |
| D | indirect.c | 244 * @partial: pointer to the last triple within a chain 252 Indirect *partial) in ext4_find_goal() argument 260 goal = ext4_find_near(inode, partial); in ext4_find_goal() 316 * we had read the existing part of chain and partial points to the last 536 Indirect *partial; in ext4_ind_map_blocks() local 552 partial = ext4_get_branch(inode, depth, offsets, chain, &err); in ext4_ind_map_blocks() 555 if (!partial) { in ext4_ind_map_blocks() 578 * Count number blocks in a subtree under 'partial'. At each in ext4_ind_map_blocks() 584 for (i = partial - chain + 1; i < depth; i++) in ext4_ind_map_blocks() 618 ar.goal = ext4_find_goal(inode, map->m_lblk, partial); in ext4_ind_map_blocks() [all …]
|
| /kernel/linux/linux-6.6/include/linux/ |
| D | slub_def.h | 21 FREE_ADD_PARTIAL, /* Freeing moves slab to partial list */ 23 ALLOC_FROM_PARTIAL, /* Cpu slab acquired from node partial list */ 38 CPU_PARTIAL_ALLOC, /* Used cpu partial on alloc */ 39 CPU_PARTIAL_FREE, /* Refill cpu partial on free */ 40 CPU_PARTIAL_NODE, /* Refill cpu partial from node partial */ 41 CPU_PARTIAL_DRAIN, /* Drain cpu partial to node partial */ 60 struct slab *partial; /* Partially allocated frozen slabs */ member 70 #define slub_percpu_partial(c) ((c)->partial) 102 /* Used for retrieving partial slabs, etc. */ 110 /* Number of per cpu partial objects to keep around */ [all …]
|
| /kernel/linux/linux-6.6/drivers/crypto/ |
| D | padlock-sha.c | 284 unsigned int partial, done; in padlock_sha1_update_nano() local 291 partial = sctx->count & 0x3f; in padlock_sha1_update_nano() 297 if ((partial + len) >= SHA1_BLOCK_SIZE) { in padlock_sha1_update_nano() 300 if (partial) { in padlock_sha1_update_nano() 301 done = -partial; in padlock_sha1_update_nano() 302 memcpy(sctx->buffer + partial, data, in padlock_sha1_update_nano() 321 partial = 0; in padlock_sha1_update_nano() 324 memcpy(sctx->buffer + partial, src, len - done); in padlock_sha1_update_nano() 332 unsigned int partial, padlen; in padlock_sha1_final_nano() local 339 partial = state->count & 0x3f; in padlock_sha1_final_nano() [all …]
|
| /kernel/linux/linux-5.10/drivers/crypto/ |
| D | padlock-sha.c | 283 unsigned int partial, done; in padlock_sha1_update_nano() local 290 partial = sctx->count & 0x3f; in padlock_sha1_update_nano() 296 if ((partial + len) >= SHA1_BLOCK_SIZE) { in padlock_sha1_update_nano() 299 if (partial) { in padlock_sha1_update_nano() 300 done = -partial; in padlock_sha1_update_nano() 301 memcpy(sctx->buffer + partial, data, in padlock_sha1_update_nano() 320 partial = 0; in padlock_sha1_update_nano() 323 memcpy(sctx->buffer + partial, src, len - done); in padlock_sha1_update_nano() 331 unsigned int partial, padlen; in padlock_sha1_final_nano() local 338 partial = state->count & 0x3f; in padlock_sha1_final_nano() [all …]
|
| /kernel/linux/linux-5.10/include/linux/ |
| D | slub_def.h | 19 FREE_ADD_PARTIAL, /* Freeing moves slab to partial list */ 21 ALLOC_FROM_PARTIAL, /* Cpu slab acquired from node partial list */ 36 CPU_PARTIAL_ALLOC, /* Used cpu partial on alloc */ 37 CPU_PARTIAL_FREE, /* Refill cpu partial on free */ 38 CPU_PARTIAL_NODE, /* Refill cpu partial from node partial */ 39 CPU_PARTIAL_DRAIN, /* Drain cpu partial to node partial */ 47 struct page *partial; /* Partially allocated frozen slabs */ member 55 #define slub_percpu_partial(c) ((c)->partial) 85 /* Used for retrieving partial slabs, etc. */ 93 /* Number of per cpu partial objects to keep around */
|
| /kernel/linux/linux-6.6/arch/arm64/crypto/ |
| D | sha3-ce-glue.c | 43 if ((sctx->partial + len) >= sctx->rsiz) { in sha3_update() 46 if (sctx->partial) { in sha3_update() 47 int p = sctx->rsiz - sctx->partial; in sha3_update() 49 memcpy(sctx->buf + sctx->partial, data, p); in sha3_update() 56 sctx->partial = 0; in sha3_update() 75 memcpy(sctx->buf + sctx->partial, data, len); in sha3_update() 76 sctx->partial += len; in sha3_update() 91 sctx->buf[sctx->partial++] = 0x06; in sha3_final() 92 memset(sctx->buf + sctx->partial, 0, sctx->rsiz - sctx->partial); in sha3_final()
|
| /kernel/linux/linux-5.10/arch/arm64/crypto/ |
| D | sha3-ce-glue.c | 43 if ((sctx->partial + len) >= sctx->rsiz) { in sha3_update() 46 if (sctx->partial) { in sha3_update() 47 int p = sctx->rsiz - sctx->partial; in sha3_update() 49 memcpy(sctx->buf + sctx->partial, data, p); in sha3_update() 56 sctx->partial = 0; in sha3_update() 75 memcpy(sctx->buf + sctx->partial, data, len); in sha3_update() 76 sctx->partial += len; in sha3_update() 91 sctx->buf[sctx->partial++] = 0x06; in sha3_final() 92 memset(sctx->buf + sctx->partial, 0, sctx->rsiz - sctx->partial); in sha3_final()
|
| /kernel/linux/linux-6.6/arch/powerpc/crypto/ |
| D | sha1.c | 30 unsigned int partial, done; in powerpc_sha1_update() local 33 partial = sctx->count & 0x3f; in powerpc_sha1_update() 38 if ((partial + len) > 63) { in powerpc_sha1_update() 40 if (partial) { in powerpc_sha1_update() 41 done = -partial; in powerpc_sha1_update() 42 memcpy(sctx->buffer + partial, data, done + 64); in powerpc_sha1_update() 52 partial = 0; in powerpc_sha1_update() 54 memcpy(sctx->buffer + partial, src, len - done); in powerpc_sha1_update()
|
| /kernel/linux/linux-5.10/arch/powerpc/crypto/ |
| D | sha1.c | 40 unsigned int partial, done; in powerpc_sha1_update() local 43 partial = sctx->count & 0x3f; in powerpc_sha1_update() 48 if ((partial + len) > 63) { in powerpc_sha1_update() 50 if (partial) { in powerpc_sha1_update() 51 done = -partial; in powerpc_sha1_update() 52 memcpy(sctx->buffer + partial, data, done + 64); in powerpc_sha1_update() 62 partial = 0; in powerpc_sha1_update() 64 memcpy(sctx->buffer + partial, src, len - done); in powerpc_sha1_update()
|
| /kernel/linux/linux-6.6/arch/sparc/crypto/ |
| D | sha1_glue.c | 31 unsigned int len, unsigned int partial) in __sha1_sparc64_update() argument 36 if (partial) { in __sha1_sparc64_update() 37 done = SHA1_BLOCK_SIZE - partial; in __sha1_sparc64_update() 38 memcpy(sctx->buffer + partial, data, done); in __sha1_sparc64_update() 55 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_sparc64_update() local 58 if (partial + len < SHA1_BLOCK_SIZE) { in sha1_sparc64_update() 60 memcpy(sctx->buffer + partial, data, len); in sha1_sparc64_update() 62 __sha1_sparc64_update(sctx, data, len, partial); in sha1_sparc64_update()
|
| /kernel/linux/linux-5.10/arch/sparc/crypto/ |
| D | sha1_glue.c | 41 unsigned int len, unsigned int partial) in __sha1_sparc64_update() argument 46 if (partial) { in __sha1_sparc64_update() 47 done = SHA1_BLOCK_SIZE - partial; in __sha1_sparc64_update() 48 memcpy(sctx->buffer + partial, data, done); in __sha1_sparc64_update() 65 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_sparc64_update() local 68 if (partial + len < SHA1_BLOCK_SIZE) { in sha1_sparc64_update() 70 memcpy(sctx->buffer + partial, data, len); in sha1_sparc64_update() 72 __sha1_sparc64_update(sctx, data, len, partial); in sha1_sparc64_update()
|
| /kernel/linux/linux-6.6/fs/ext2/ |
| D | inode.c | 325 * @partial: pointer to the last triple within a chain 331 Indirect *partial) in ext2_find_goal() argument 346 return ext2_find_near(inode, partial); in ext2_find_goal() 466 * we had read the existing part of chain and partial points to the last 632 Indirect *partial; in ext2_get_blocks() local 648 partial = ext2_get_branch(inode, depth, offsets, chain, &err); in ext2_get_blocks() 650 if (!partial) { in ext2_get_blocks() 666 partial = chain + depth - 1; in ext2_get_blocks() 696 if (err == -EAGAIN || !verify_chain(chain, partial)) { in ext2_get_blocks() 697 while (partial > chain) { in ext2_get_blocks() [all …]
|