Home
last modified time | relevance | path

Searched refs:bsize (Results 1 – 25 of 147) sorted by relevance

123456

/kernel/linux/linux-5.10/net/netfilter/
Dnft_set_pipapo_avx2.c45 #define NFT_PIPAPO_AVX2_BUCKET_LOAD4(reg, lt, group, v, bsize) \ argument
48 (v)) * (bsize)])
49 #define NFT_PIPAPO_AVX2_BUCKET_LOAD8(reg, lt, group, v, bsize) \ argument
52 (v)) * (bsize)])
222 int i, ret = -1, m256_size = f->bsize / NFT_PIPAPO_LONGS_PER_M256, b; in nft_pipapo_avx2_lookup_4b_2()
224 unsigned long *lt = f->lt, bsize = f->bsize; in nft_pipapo_avx2_lookup_4b_2() local
231 NFT_PIPAPO_AVX2_BUCKET_LOAD4(0, lt, 0, pg[0], bsize); in nft_pipapo_avx2_lookup_4b_2()
232 NFT_PIPAPO_AVX2_BUCKET_LOAD4(1, lt, 1, pg[1], bsize); in nft_pipapo_avx2_lookup_4b_2()
235 NFT_PIPAPO_AVX2_BUCKET_LOAD4(0, lt, 0, pg[0], bsize); in nft_pipapo_avx2_lookup_4b_2()
237 NFT_PIPAPO_AVX2_BUCKET_LOAD4(1, lt, 1, pg[1], bsize); in nft_pipapo_avx2_lookup_4b_2()
[all …]
Dnft_set_pipapo.h124 size_t bsize; member
201 __bitmap_and(dst, dst, lt + v * f->bsize, in pipapo_and_field_buckets_4bit()
202 f->bsize * BITS_PER_LONG); in pipapo_and_field_buckets_4bit()
203 lt += f->bsize * NFT_PIPAPO_BUCKETS(4); in pipapo_and_field_buckets_4bit()
206 __bitmap_and(dst, dst, lt + v * f->bsize, in pipapo_and_field_buckets_4bit()
207 f->bsize * BITS_PER_LONG); in pipapo_and_field_buckets_4bit()
208 lt += f->bsize * NFT_PIPAPO_BUCKETS(4); in pipapo_and_field_buckets_4bit()
226 __bitmap_and(dst, dst, lt + *data * f->bsize, in pipapo_and_field_buckets_8bit()
227 f->bsize * BITS_PER_LONG); in pipapo_and_field_buckets_8bit()
228 lt += f->bsize * NFT_PIPAPO_BUCKETS(8); in pipapo_and_field_buckets_8bit()
Dnft_set_pipapo.c460 b = pipapo_refill(res_map, f->bsize, f->rules, fill_map, f->mt, in nft_pipapo_lookup()
563 b = pipapo_refill(res_map, f->bsize, f->rules, fill_map, f->mt, in pipapo_get()
633 if (new_bucket_size == f->bsize) in pipapo_resize()
636 if (new_bucket_size > f->bsize) in pipapo_resize()
637 copy = f->bsize; in pipapo_resize()
657 if (new_bucket_size > f->bsize) in pipapo_resize()
658 new_p += new_bucket_size - f->bsize; in pipapo_resize()
660 old_p += f->bsize - new_bucket_size; in pipapo_resize()
678 f->bsize = new_bucket_size; in pipapo_resize()
702 pos += f->bsize * NFT_PIPAPO_BUCKETS(f->bb) * group; in pipapo_bucket_set()
[all …]
/kernel/linux/linux-5.10/crypto/
Dcbc.c19 unsigned int bsize = crypto_skcipher_blocksize(skcipher); in crypto_cbc_encrypt_segment() local
33 crypto_xor(iv, src, bsize); in crypto_cbc_encrypt_segment()
35 memcpy(iv, dst, bsize); in crypto_cbc_encrypt_segment()
37 src += bsize; in crypto_cbc_encrypt_segment()
38 dst += bsize; in crypto_cbc_encrypt_segment()
39 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_segment()
47 unsigned int bsize = crypto_skcipher_blocksize(skcipher); in crypto_cbc_encrypt_inplace() local
60 crypto_xor(src, iv, bsize); in crypto_cbc_encrypt_inplace()
64 src += bsize; in crypto_cbc_encrypt_inplace()
65 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_inplace()
[all …]
Dpcbc.c23 int bsize = crypto_cipher_blocksize(tfm); in crypto_pcbc_encrypt_segment() local
30 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment()
32 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment()
34 src += bsize; in crypto_pcbc_encrypt_segment()
35 dst += bsize; in crypto_pcbc_encrypt_segment()
36 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment()
45 int bsize = crypto_cipher_blocksize(tfm); in crypto_pcbc_encrypt_inplace() local
52 memcpy(tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace()
53 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace()
55 crypto_xor_cpy(iv, tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace()
[all …]
Dcfb.c60 const unsigned int bsize = crypto_cfb_bsize(tfm); in crypto_cfb_encrypt_segment() local
68 crypto_xor(dst, src, bsize); in crypto_cfb_encrypt_segment()
71 src += bsize; in crypto_cfb_encrypt_segment()
72 dst += bsize; in crypto_cfb_encrypt_segment()
73 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_segment()
75 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_segment()
83 const unsigned int bsize = crypto_cfb_bsize(tfm); in crypto_cfb_encrypt_inplace() local
91 crypto_xor(src, tmp, bsize); in crypto_cfb_encrypt_inplace()
94 src += bsize; in crypto_cfb_encrypt_inplace()
95 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_inplace()
[all …]
Dcts.c103 int bsize = crypto_skcipher_blocksize(tfm); in cts_cbc_encrypt() local
112 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); in cts_cbc_encrypt()
113 scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0); in cts_cbc_encrypt()
115 memset(d, 0, bsize); in cts_cbc_encrypt()
118 scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1); in cts_cbc_encrypt()
124 skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv); in cts_cbc_encrypt()
149 int bsize = crypto_skcipher_blocksize(tfm); in crypto_cts_encrypt() local
155 if (nbytes < bsize) in crypto_cts_encrypt()
158 if (nbytes == bsize) { in crypto_cts_encrypt()
167 offset = rounddown(nbytes - 1, bsize); in crypto_cts_encrypt()
[all …]
Dctr.c30 unsigned int bsize = crypto_cipher_blocksize(tfm); in crypto_ctr_crypt_final() local
42 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_final()
50 unsigned int bsize = crypto_cipher_blocksize(tfm); in crypto_ctr_crypt_segment() local
59 crypto_xor(dst, src, bsize); in crypto_ctr_crypt_segment()
62 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_segment()
64 src += bsize; in crypto_ctr_crypt_segment()
65 dst += bsize; in crypto_ctr_crypt_segment()
66 } while ((nbytes -= bsize) >= bsize); in crypto_ctr_crypt_segment()
76 unsigned int bsize = crypto_cipher_blocksize(tfm); in crypto_ctr_crypt_inplace() local
87 crypto_xor(src, keystream, bsize); in crypto_ctr_crypt_inplace()
[all …]
Dofb.c21 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ofb_crypt() local
27 while (walk.nbytes >= bsize) { in crypto_ofb_crypt()
35 crypto_xor_cpy(dst, src, iv, bsize); in crypto_ofb_crypt()
36 dst += bsize; in crypto_ofb_crypt()
37 src += bsize; in crypto_ofb_crypt()
38 } while ((nbytes -= bsize) >= bsize); in crypto_ofb_crypt()
Decb.c19 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ecb_crypt() local
33 src += bsize; in crypto_ecb_crypt()
34 dst += bsize; in crypto_ecb_crypt()
35 } while ((nbytes -= bsize) >= bsize); in crypto_ecb_crypt()
/kernel/linux/linux-5.10/arch/x86/crypto/
Dcast5_avx_glue.c53 const unsigned int bsize = CAST5_BLOCK_SIZE; in ecb_crypt() local
67 if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { in ecb_crypt()
72 wsrc += bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt()
73 wdst += bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt()
74 nbytes -= bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt()
75 } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); in ecb_crypt()
77 if (nbytes < bsize) in ecb_crypt()
87 wsrc += bsize; in ecb_crypt()
88 wdst += bsize; in ecb_crypt()
89 nbytes -= bsize; in ecb_crypt()
[all …]
Dblowfish_glue.c75 unsigned int bsize = BF_BLOCK_SIZE; in ecb_crypt() local
89 if (nbytes >= bsize * 4) { in ecb_crypt()
93 wsrc += bsize * 4; in ecb_crypt()
94 wdst += bsize * 4; in ecb_crypt()
95 nbytes -= bsize * 4; in ecb_crypt()
96 } while (nbytes >= bsize * 4); in ecb_crypt()
98 if (nbytes < bsize) in ecb_crypt()
106 wsrc += bsize; in ecb_crypt()
107 wdst += bsize; in ecb_crypt()
108 nbytes -= bsize; in ecb_crypt()
[all …]
Ddes3_ede_glue.c78 const unsigned int bsize = DES3_EDE_BLOCK_SIZE; in ecb_crypt() local
90 if (nbytes >= bsize * 3) { in ecb_crypt()
95 wsrc += bsize * 3; in ecb_crypt()
96 wdst += bsize * 3; in ecb_crypt()
97 nbytes -= bsize * 3; in ecb_crypt()
98 } while (nbytes >= bsize * 3); in ecb_crypt()
100 if (nbytes < bsize) in ecb_crypt()
108 wsrc += bsize; in ecb_crypt()
109 wdst += bsize; in ecb_crypt()
110 nbytes -= bsize; in ecb_crypt()
[all …]
Dglue_helper.c25 const unsigned int bsize = 128 / 8; in glue_ecb_req_128bit() local
39 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in glue_ecb_req_128bit()
42 func_bytes = bsize * gctx->funcs[i].num_blocks; in glue_ecb_req_128bit()
55 if (nbytes < bsize) in glue_ecb_req_128bit()
70 const unsigned int bsize = 128 / 8; in glue_cbc_encrypt_req_128bit() local
88 nbytes -= bsize; in glue_cbc_encrypt_req_128bit()
89 } while (nbytes >= bsize); in glue_cbc_encrypt_req_128bit()
102 const unsigned int bsize = 128 / 8; in glue_cbc_decrypt_req_128bit() local
117 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in glue_cbc_decrypt_req_128bit()
120 src += nbytes / bsize - 1; in glue_cbc_decrypt_req_128bit()
[all …]
/kernel/linux/linux-5.10/arch/s390/crypto/
Dsha_common.c19 unsigned int bsize = crypto_shash_blocksize(desc->tfm); in s390_sha_update() local
23 index = ctx->count % bsize; in s390_sha_update()
26 if ((index + len) < bsize) in s390_sha_update()
31 memcpy(ctx->buf + index, data, bsize - index); in s390_sha_update()
32 cpacf_kimd(ctx->func, ctx->state, ctx->buf, bsize); in s390_sha_update()
33 data += bsize - index; in s390_sha_update()
34 len -= bsize - index; in s390_sha_update()
39 if (len >= bsize) { in s390_sha_update()
40 n = (len / bsize) * bsize; in s390_sha_update()
75 unsigned int bsize = crypto_shash_blocksize(desc->tfm); in s390_sha_final() local
[all …]
/kernel/linux/linux-5.10/arch/m68k/emu/
Dnfblock.c56 u32 blocks, bsize; member
100 static int __init nfhd_init_one(int id, u32 blocks, u32 bsize) in nfhd_init_one() argument
106 blocks, bsize); in nfhd_init_one()
108 if (bsize < 512 || (bsize & (bsize - 1))) { in nfhd_init_one()
119 dev->bsize = bsize; in nfhd_init_one()
120 dev->bshift = ffs(bsize) - 10; in nfhd_init_one()
126 blk_queue_logical_block_size(dev->queue, bsize); in nfhd_init_one()
137 set_capacity(dev->disk, (sector_t)blocks * (bsize / 512)); in nfhd_init_one()
156 u32 blocks, bsize; in nfhd_init() local
174 if (nfhd_get_capacity(i, 0, &blocks, &bsize)) in nfhd_init()
[all …]
/kernel/linux/linux-5.10/lib/mpi/
Dmpi-pow.c31 mpi_size_t esize, msize, bsize, rsize; in mpi_powm() local
79 bsize = base->nlimbs; in mpi_powm()
81 if (bsize > msize) { /* The base is larger than the module. Reduce it. */ in mpi_powm()
84 bp = bp_marker = mpi_alloc_limb_space(bsize + 1); in mpi_powm()
87 MPN_COPY(bp, base->d, bsize); in mpi_powm()
90 mpihelp_divrem(bp + msize, 0, bp, bsize, mp, msize); in mpi_powm()
91 bsize = msize; in mpi_powm()
94 MPN_NORMALIZE(bp, bsize); in mpi_powm()
98 if (!bsize) { in mpi_powm()
122 bp = bp_marker = mpi_alloc_limb_space(bsize); in mpi_powm()
[all …]
/kernel/linux/linux-5.10/include/crypto/
Dctr.h48 int bsize = min(nbytes, blocksize); in crypto_ctr_encrypt_walk() local
52 crypto_xor_cpy(dst, src, buf, bsize); in crypto_ctr_encrypt_walk()
55 dst += bsize; in crypto_ctr_encrypt_walk()
56 src += bsize; in crypto_ctr_encrypt_walk()
57 nbytes -= bsize; in crypto_ctr_encrypt_walk()
/kernel/linux/linux-5.10/fs/squashfs/
Dfile_direct.c21 static int squashfs_read_cache(struct page *target_page, u64 block, int bsize,
25 int squashfs_readpage_block(struct page *target_page, u64 block, int bsize, in squashfs_readpage_block() argument
84 res = squashfs_read_cache(target_page, block, bsize, pages, in squashfs_readpage_block()
93 res = squashfs_read_data(inode->i_sb, block, bsize, NULL, actor); in squashfs_readpage_block()
144 static int squashfs_read_cache(struct page *target_page, u64 block, int bsize, in squashfs_read_cache() argument
149 block, bsize); in squashfs_read_cache()
154 bsize); in squashfs_read_cache()
Dfile_cache.c21 int squashfs_readpage_block(struct page *page, u64 block, int bsize, int expected) in squashfs_readpage_block() argument
25 block, bsize); in squashfs_readpage_block()
30 bsize); in squashfs_readpage_block()
/kernel/linux/linux-5.10/fs/jfs/
Djfs_mount.c293 s32 bsize; in chkSuper() local
309 bsize = le32_to_cpu(j_sb->s_bsize); in chkSuper()
311 if (bsize != PSIZE) { in chkSuper()
326 AIM_bytesize = lengthPXD(&(j_sb->s_aim2)) * bsize; in chkSuper()
328 AIT_bytesize = lengthPXD(&(j_sb->s_ait2)) * bsize; in chkSuper()
329 AIM_byte_addr = addressPXD(&(j_sb->s_aim2)) * bsize; in chkSuper()
330 AIT_byte_addr = addressPXD(&(j_sb->s_ait2)) * bsize; in chkSuper()
332 fsckwsp_addr = addressPXD(&(j_sb->s_fsckpxd)) * bsize; in chkSuper()
360 sbi->bsize = bsize; in chkSuper()
364 if (sbi->l2bsize != ilog2((u32)bsize) || in chkSuper()
/kernel/linux/linux-5.10/fs/freevxfs/
Dvxfs_lookup.c83 u_long bsize = ip->i_sb->s_blocksize; in vxfs_find_entry() local
104 if ((pos & (bsize - 1)) < 4) { in vxfs_find_entry()
116 pos += bsize - 1; in vxfs_find_entry()
117 pos &= ~(bsize - 1); in vxfs_find_entry()
219 u_long bsize = sbp->s_blocksize; in vxfs_readdir() local
255 if ((pos & (bsize - 1)) < 4) { in vxfs_readdir()
267 pos += bsize - 1; in vxfs_readdir()
268 pos &= ~(bsize - 1); in vxfs_readdir()
Dvxfs_olt.c57 vxfs_oblock(struct super_block *sbp, daddr_t block, u_long bsize) in vxfs_oblock() argument
59 BUG_ON(sbp->s_blocksize % bsize); in vxfs_oblock()
60 return (block * (sbp->s_blocksize / bsize)); in vxfs_oblock()
77 vxfs_read_olt(struct super_block *sbp, u_long bsize) in vxfs_read_olt() argument
84 bp = sb_bread(sbp, vxfs_oblock(sbp, infp->vsi_oltext, bsize)); in vxfs_read_olt()
/kernel/linux/linux-5.10/fs/affs/
Dfile.c529 u32 bidx, boff, bsize; in affs_do_readpage_ofs() local
535 bsize = AFFS_SB(sb)->s_data_blksize; in affs_do_readpage_ofs()
537 bidx = tmp / bsize; in affs_do_readpage_ofs()
538 boff = tmp % bsize; in affs_do_readpage_ofs()
544 tmp = min(bsize - boff, to - pos); in affs_do_readpage_ofs()
545 BUG_ON(pos + tmp > to || tmp > bsize); in affs_do_readpage_ofs()
564 u32 size, bsize; in affs_extent_file_ofs() local
568 bsize = AFFS_SB(sb)->s_data_blksize; in affs_extent_file_ofs()
571 bidx = size / bsize; in affs_extent_file_ofs()
572 boff = size % bsize; in affs_extent_file_ofs()
[all …]
/kernel/linux/linux-5.10/tools/perf/util/
Dmem2node.c39 phys_entry__init(struct phys_entry *entry, u64 start, u64 bsize, u64 node) in phys_entry__init() argument
42 entry->end = start + bsize; in phys_entry__init()
51 u64 bsize = env->memory_bsize; in mem2node__init() local
77 start = bit * bsize; in mem2node__init()
88 prev->end += bsize; in mem2node__init()
93 phys_entry__init(&entries[j++], start, bsize, n->node); in mem2node__init()

123456