Home
last modified time | relevance | path

Searched refs:partial (Results 1 – 25 of 228) sorted by relevance

12345678910

/kernel/linux/linux-5.10/fs/ext4/
Dindirect.c252 Indirect *partial) in ext4_find_goal() argument
260 goal = ext4_find_near(inode, partial); in ext4_find_goal()
536 Indirect *partial; in ext4_ind_map_blocks() local
552 partial = ext4_get_branch(inode, depth, offsets, chain, &err); in ext4_ind_map_blocks()
555 if (!partial) { in ext4_ind_map_blocks()
584 for (i = partial - chain + 1; i < depth; i++) in ext4_ind_map_blocks()
618 ar.goal = ext4_find_goal(inode, map->m_lblk, partial); in ext4_ind_map_blocks()
621 indirect_blks = (chain + depth) - partial - 1; in ext4_ind_map_blocks()
627 ar.len = ext4_blks_to_allocate(partial, indirect_blks, in ext4_ind_map_blocks()
634 offsets + (partial - chain), partial); in ext4_ind_map_blocks()
[all …]
/kernel/linux/linux-5.10/fs/minix/
Ditree_common.c158 Indirect *partial; in get_block() local
166 partial = get_branch(inode, depth, offsets, chain, &err); in get_block()
169 if (!partial) { in get_block()
173 partial = chain+depth-1; /* the whole chain */ in get_block()
180 while (partial > chain) { in get_block()
181 brelse(partial->bh); in get_block()
182 partial--; in get_block()
196 left = (chain + depth) - partial; in get_block()
197 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block()
201 if (splice_branch(inode, chain, partial, left) < 0) in get_block()
[all …]
/kernel/linux/linux-5.10/include/crypto/
Dsha1_base.h40 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_update() local
44 if (unlikely((partial + len) >= SHA1_BLOCK_SIZE)) { in sha1_base_do_update()
47 if (partial) { in sha1_base_do_update()
48 int p = SHA1_BLOCK_SIZE - partial; in sha1_base_do_update()
50 memcpy(sctx->buffer + partial, data, p); in sha1_base_do_update()
64 partial = 0; in sha1_base_do_update()
67 memcpy(sctx->buffer + partial, data, len); in sha1_base_do_update()
78 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_finalize() local
80 sctx->buffer[partial++] = 0x80; in sha1_base_do_finalize()
81 if (partial > bit_offset) { in sha1_base_do_finalize()
[all …]
Dsha256_base.h43 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_update() local
47 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in sha256_base_do_update()
50 if (partial) { in sha256_base_do_update()
51 int p = SHA256_BLOCK_SIZE - partial; in sha256_base_do_update()
53 memcpy(sctx->buf + partial, data, p); in sha256_base_do_update()
67 partial = 0; in sha256_base_do_update()
70 memcpy(sctx->buf + partial, data, len); in sha256_base_do_update()
81 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_finalize() local
83 sctx->buf[partial++] = 0x80; in sha256_base_do_finalize()
84 if (partial > bit_offset) { in sha256_base_do_finalize()
[all …]
Dsm3_base.h43 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_update() local
47 if (unlikely((partial + len) >= SM3_BLOCK_SIZE)) { in sm3_base_do_update()
50 if (partial) { in sm3_base_do_update()
51 int p = SM3_BLOCK_SIZE - partial; in sm3_base_do_update()
53 memcpy(sctx->buffer + partial, data, p); in sm3_base_do_update()
67 partial = 0; in sm3_base_do_update()
70 memcpy(sctx->buffer + partial, data, len); in sm3_base_do_update()
81 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_finalize() local
83 sctx->buffer[partial++] = 0x80; in sm3_base_do_finalize()
84 if (partial > bit_offset) { in sm3_base_do_finalize()
[all …]
Dsha512_base.h61 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_update() local
67 if (unlikely((partial + len) >= SHA512_BLOCK_SIZE)) { in sha512_base_do_update()
70 if (partial) { in sha512_base_do_update()
71 int p = SHA512_BLOCK_SIZE - partial; in sha512_base_do_update()
73 memcpy(sctx->buf + partial, data, p); in sha512_base_do_update()
87 partial = 0; in sha512_base_do_update()
90 memcpy(sctx->buf + partial, data, len); in sha512_base_do_update()
101 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_finalize() local
103 sctx->buf[partial++] = 0x80; in sha512_base_do_finalize()
104 if (partial > bit_offset) { in sha512_base_do_finalize()
[all …]
/kernel/linux/linux-5.10/fs/sysv/
Ditree.c209 Indirect *partial; in get_block() local
218 partial = get_branch(inode, depth, offsets, chain, &err); in get_block()
222 if (!partial) { in get_block()
227 partial = chain+depth-1; /* the whole chain */ in get_block()
234 while (partial > chain) { in get_block()
235 brelse(partial->bh); in get_block()
236 partial--; in get_block()
250 left = (chain + depth) - partial; in get_block()
251 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block()
255 if (splice_branch(inode, chain, partial, left) < 0) in get_block()
[all …]
/kernel/linux/linux-5.10/drivers/crypto/
Dpadlock-sha.c283 unsigned int partial, done; in padlock_sha1_update_nano() local
290 partial = sctx->count & 0x3f; in padlock_sha1_update_nano()
296 if ((partial + len) >= SHA1_BLOCK_SIZE) { in padlock_sha1_update_nano()
299 if (partial) { in padlock_sha1_update_nano()
300 done = -partial; in padlock_sha1_update_nano()
301 memcpy(sctx->buffer + partial, data, in padlock_sha1_update_nano()
320 partial = 0; in padlock_sha1_update_nano()
323 memcpy(sctx->buffer + partial, src, len - done); in padlock_sha1_update_nano()
331 unsigned int partial, padlen; in padlock_sha1_final_nano() local
338 partial = state->count & 0x3f; in padlock_sha1_final_nano()
[all …]
/kernel/linux/linux-5.10/arch/arm64/crypto/
Dsha3-ce-glue.c43 if ((sctx->partial + len) >= sctx->rsiz) { in sha3_update()
46 if (sctx->partial) { in sha3_update()
47 int p = sctx->rsiz - sctx->partial; in sha3_update()
49 memcpy(sctx->buf + sctx->partial, data, p); in sha3_update()
56 sctx->partial = 0; in sha3_update()
75 memcpy(sctx->buf + sctx->partial, data, len); in sha3_update()
76 sctx->partial += len; in sha3_update()
91 sctx->buf[sctx->partial++] = 0x06; in sha3_final()
92 memset(sctx->buf + sctx->partial, 0, sctx->rsiz - sctx->partial); in sha3_final()
/kernel/linux/linux-5.10/arch/powerpc/crypto/
Dsha1.c40 unsigned int partial, done; in powerpc_sha1_update() local
43 partial = sctx->count & 0x3f; in powerpc_sha1_update()
48 if ((partial + len) > 63) { in powerpc_sha1_update()
50 if (partial) { in powerpc_sha1_update()
51 done = -partial; in powerpc_sha1_update()
52 memcpy(sctx->buffer + partial, data, done + 64); in powerpc_sha1_update()
62 partial = 0; in powerpc_sha1_update()
64 memcpy(sctx->buffer + partial, src, len - done); in powerpc_sha1_update()
/kernel/linux/linux-5.10/arch/sparc/crypto/
Dsha1_glue.c41 unsigned int len, unsigned int partial) in __sha1_sparc64_update() argument
46 if (partial) { in __sha1_sparc64_update()
47 done = SHA1_BLOCK_SIZE - partial; in __sha1_sparc64_update()
48 memcpy(sctx->buffer + partial, data, done); in __sha1_sparc64_update()
65 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_sparc64_update() local
68 if (partial + len < SHA1_BLOCK_SIZE) { in sha1_sparc64_update()
70 memcpy(sctx->buffer + partial, data, len); in sha1_sparc64_update()
72 __sha1_sparc64_update(sctx, data, len, partial); in sha1_sparc64_update()
Dmd5_glue.c46 unsigned int len, unsigned int partial) in __md5_sparc64_update() argument
51 if (partial) { in __md5_sparc64_update()
52 done = MD5_HMAC_BLOCK_SIZE - partial; in __md5_sparc64_update()
53 memcpy((u8 *)sctx->block + partial, data, done); in __md5_sparc64_update()
70 unsigned int partial = sctx->byte_count % MD5_HMAC_BLOCK_SIZE; in md5_sparc64_update() local
73 if (partial + len < MD5_HMAC_BLOCK_SIZE) { in md5_sparc64_update()
75 memcpy((u8 *)sctx->block + partial, data, len); in md5_sparc64_update()
77 __md5_sparc64_update(sctx, data, len, partial); in md5_sparc64_update()
Dsha512_glue.c61 unsigned int len, unsigned int partial) in __sha512_sparc64_update() argument
67 if (partial) { in __sha512_sparc64_update()
68 done = SHA512_BLOCK_SIZE - partial; in __sha512_sparc64_update()
69 memcpy(sctx->buf + partial, data, done); in __sha512_sparc64_update()
86 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_sparc64_update() local
89 if (partial + len < SHA512_BLOCK_SIZE) { in sha512_sparc64_update()
92 memcpy(sctx->buf + partial, data, len); in sha512_sparc64_update()
94 __sha512_sparc64_update(sctx, data, len, partial); in sha512_sparc64_update()
Dsha256_glue.c62 unsigned int len, unsigned int partial) in __sha256_sparc64_update() argument
67 if (partial) { in __sha256_sparc64_update()
68 done = SHA256_BLOCK_SIZE - partial; in __sha256_sparc64_update()
69 memcpy(sctx->buf + partial, data, done); in __sha256_sparc64_update()
86 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_update() local
89 if (partial + len < SHA256_BLOCK_SIZE) { in sha256_sparc64_update()
91 memcpy(sctx->buf + partial, data, len); in sha256_sparc64_update()
93 __sha256_sparc64_update(sctx, data, len, partial); in sha256_sparc64_update()
/kernel/linux/linux-5.10/fs/ext2/
Dinode.c331 Indirect *partial) in ext2_find_goal() argument
346 return ext2_find_near(inode, partial); in ext2_find_goal()
628 Indirect *partial; in ext2_get_blocks() local
644 partial = ext2_get_branch(inode, depth, offsets, chain, &err); in ext2_get_blocks()
646 if (!partial) { in ext2_get_blocks()
662 partial = chain + depth - 1; in ext2_get_blocks()
692 if (err == -EAGAIN || !verify_chain(chain, partial)) { in ext2_get_blocks()
693 while (partial > chain) { in ext2_get_blocks()
694 brelse(partial->bh); in ext2_get_blocks()
695 partial--; in ext2_get_blocks()
[all …]
/kernel/linux/linux-5.10/arch/mips/cavium-octeon/crypto/
Docteon-sha1.c91 unsigned int partial; in __octeon_sha1_update() local
95 partial = sctx->count % SHA1_BLOCK_SIZE; in __octeon_sha1_update()
100 if ((partial + len) >= SHA1_BLOCK_SIZE) { in __octeon_sha1_update()
101 if (partial) { in __octeon_sha1_update()
102 done = -partial; in __octeon_sha1_update()
103 memcpy(sctx->buffer + partial, data, in __octeon_sha1_update()
114 partial = 0; in __octeon_sha1_update()
116 memcpy(sctx->buffer + partial, src, len - done); in __octeon_sha1_update()
Docteon-sha256.c103 unsigned int partial; in __octeon_sha256_update() local
107 partial = sctx->count % SHA256_BLOCK_SIZE; in __octeon_sha256_update()
112 if ((partial + len) >= SHA256_BLOCK_SIZE) { in __octeon_sha256_update()
113 if (partial) { in __octeon_sha256_update()
114 done = -partial; in __octeon_sha256_update()
115 memcpy(sctx->buf + partial, data, in __octeon_sha256_update()
126 partial = 0; in __octeon_sha256_update()
128 memcpy(sctx->buf + partial, src, len - done); in __octeon_sha256_update()
/kernel/linux/linux-5.10/drivers/usb/storage/
Dfreecom.c227 unsigned int partial; in freecom_transport() local
266 FCM_STATUS_PACKET_LENGTH, &partial); in freecom_transport()
267 usb_stor_dbg(us, "foo Status result %d %u\n", result, partial); in freecom_transport()
271 US_DEBUG(pdump(us, (void *)fst, partial)); in freecom_transport()
308 FCM_STATUS_PACKET_LENGTH, &partial); in freecom_transport()
310 usb_stor_dbg(us, "bar Status result %d %u\n", result, partial); in freecom_transport()
314 US_DEBUG(pdump(us, (void *)fst, partial)); in freecom_transport()
317 if (partial != 4) in freecom_transport()
376 FCM_PACKET_LENGTH, &partial); in freecom_transport()
377 US_DEBUG(pdump(us, (void *)fst, partial)); in freecom_transport()
[all …]
Dinitializers.c56 unsigned int partial; in usb_stor_ucr61s2b_init() local
70 US_BULK_CB_WRAP_LEN, &partial); in usb_stor_ucr61s2b_init()
76 US_BULK_CS_WRAP_LEN, &partial); in usb_stor_ucr61s2b_init()
/kernel/linux/linux-5.10/arch/x86/include/asm/
Dunwind.h68 bool *partial) in unwind_get_entry_regs() argument
73 if (partial) { in unwind_get_entry_regs()
75 *partial = !state->full_regs; in unwind_get_entry_regs()
77 *partial = false; in unwind_get_entry_regs()
85 bool *partial) in unwind_get_entry_regs() argument
/kernel/linux/linux-5.10/arch/arm/crypto/
Dghash-ce-glue.c98 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update() local
102 if ((partial + len) >= GHASH_BLOCK_SIZE) { in ghash_update()
106 if (partial) { in ghash_update()
107 int p = GHASH_BLOCK_SIZE - partial; in ghash_update()
109 memcpy(ctx->buf + partial, src, p); in ghash_update()
118 partial ? ctx->buf : NULL); in ghash_update()
120 partial = 0; in ghash_update()
123 memcpy(ctx->buf + partial, src, len); in ghash_update()
130 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_final() local
132 if (partial) { in ghash_final()
[all …]
/kernel/linux/linux-5.10/crypto/
Dsha3_generic.c168 sctx->partial = 0; in crypto_sha3_init()
185 if ((sctx->partial + len) > (sctx->rsiz - 1)) { in crypto_sha3_update()
186 if (sctx->partial) { in crypto_sha3_update()
187 done = -sctx->partial; in crypto_sha3_update()
188 memcpy(sctx->buf + sctx->partial, data, in crypto_sha3_update()
204 sctx->partial = 0; in crypto_sha3_update()
206 memcpy(sctx->buf + sctx->partial, src, len - done); in crypto_sha3_update()
207 sctx->partial += (len - done); in crypto_sha3_update()
216 unsigned int i, inlen = sctx->partial; in crypto_sha3_final()
/kernel/linux/linux-5.10/arch/x86/kernel/
Ddumpstack.c161 bool partial, const char *log_lvl) in show_regs_if_on_stack() argument
172 if (!partial && on_stack(info, regs, sizeof(*regs))) { in show_regs_if_on_stack()
175 } else if (partial && on_stack(info, (void *)regs + IRET_FRAME_OFFSET, in show_regs_if_on_stack()
193 bool partial = false; in show_trace_log_lvl() local
198 regs = unwind_get_entry_regs(&state, &partial); in show_trace_log_lvl()
240 show_regs_if_on_stack(&stack_info, regs, partial, log_lvl); in show_trace_log_lvl()
299 regs = unwind_get_entry_regs(&state, &partial); in show_trace_log_lvl()
301 show_regs_if_on_stack(&stack_info, regs, partial, log_lvl); in show_trace_log_lvl()
/kernel/linux/linux-5.10/drivers/crypto/stm32/
Dstm32-crc32.c69 u32 partial; /* crc32c: partial in first 4 bytes of that struct */ member
137 ctx->partial = readl_relaxed(crc->regs + CRC_DR); in stm32_crc_init()
163 ctx->partial = crc32_le(ctx->partial, d8, length); in burst_update()
165 ctx->partial = __crc32c_le(ctx->partial, d8, length); in burst_update()
176 writel_relaxed(bitrev32(ctx->partial), crc->regs + CRC_INIT); in burst_update()
206 ctx->partial = readl_relaxed(crc->regs + CRC_DR); in burst_update()
249 ~ctx->partial : ctx->partial, out); in stm32_crc_final()
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/pm/powerplay/hwmgr/
Dppevvmath.h46 } partial; member
338 X_LessThanOne = (X.partial.real == 0 && X.partial.decimal != 0 && X.full >= 0); in fMultiply()
339 Y_LessThanOne = (Y.partial.real == 0 && Y.partial.decimal != 0 && Y.full >= 0); in fMultiply()
413 if (num.partial.real > 3000) in fSqrt()
415 else if (num.partial.real > 1000) in fSqrt()
417 else if (num.partial.real > 100) in fSqrt()
512 int i, scaledDecimal = 0, tmp = A.partial.decimal; in uGetScaledDecimal()
545 solution.partial.decimal = 0; /*All fractional digits changes to 0 */ in fRoundUpByStepSize()
548 solution.partial.real += 1; /*Error term of 1 added */ in fRoundUpByStepSize()

12345678910