Home
last modified time | relevance | path

Searched refs:blocks (Results 1 – 25 of 92) sorted by relevance

1234

/arch/arm64/crypto/
Daes-neonbs-glue.c29 int rounds, int blocks);
31 int rounds, int blocks);
34 int rounds, int blocks, u8 iv[]);
37 int rounds, int blocks, u8 iv[], u8 final[]);
40 int rounds, int blocks, u8 iv[]);
42 int rounds, int blocks, u8 iv[]);
46 int rounds, int blocks);
48 int rounds, int blocks, u8 iv[]);
99 int rounds, int blocks)) in __ecb_crypt() argument
109 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() local
[all …]
Dghash-ce-glue.c53 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
57 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
61 asmlinkage void pmull_gcm_encrypt(int blocks, u64 dg[], u8 dst[],
66 asmlinkage void pmull_gcm_decrypt(int blocks, u64 dg[], u8 dst[],
81 static void ghash_do_update(int blocks, u64 dg[], const char *src, in ghash_do_update() argument
83 void (*simd_update)(int blocks, u64 dg[], in ghash_do_update() argument
90 simd_update(blocks, dg, src, key, head); in ghash_do_update()
100 blocks++; in ghash_do_update()
108 } while (--blocks); in ghash_do_update()
120 void (*simd_update)(int blocks, u64 dg[], in __ghash_update() argument
[all …]
Daes-glue.c75 int rounds, int blocks);
77 int rounds, int blocks);
80 int rounds, int blocks, u8 iv[]);
82 int rounds, int blocks, u8 iv[]);
90 int rounds, int blocks, u8 ctr[]);
100 int rounds, int blocks, u8 iv[],
103 int rounds, int blocks, u8 iv[],
107 int blocks, u8 dg[], int enc_before,
197 unsigned int blocks; in ecb_encrypt() local
201 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
[all …]
Dsha2-ce-glue.c29 int blocks);
32 int blocks) in __sha2_ce_transform() argument
35 src, blocks); in __sha2_ce_transform()
43 asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks);
46 int blocks) in __sha256_block_data_order() argument
48 return sha256_block_data_order(sst->state, src, blocks); in __sha256_block_data_order()
Dsha3-ce-glue.c27 asmlinkage void sha3_ce_transform(u64 *st, const u8 *data, int blocks,
40 int blocks; in sha3_update() local
55 blocks = len / sctx->rsiz; in sha3_update()
58 if (blocks) { in sha3_update()
60 sha3_ce_transform(sctx->st, data, blocks, digest_size); in sha3_update()
62 data += blocks * sctx->rsiz; in sha3_update()
Dsha512-ce-glue.c28 int blocks);
30 asmlinkage void sha512_block_data_order(u64 *digest, u8 const *src, int blocks);
33 int blocks) in __sha512_block_data_order() argument
35 return sha512_block_data_order(sst->state, src, blocks); in __sha512_block_data_order()
Dsha1-ce-glue.c29 int blocks);
32 int blocks) in __sha1_ce_transform() argument
35 src, blocks); in __sha1_ce_transform()
Dsha256-glue.c31 int blocks) in __sha256_block_data_order() argument
33 return sha256_block_data_order(sst->state, src, blocks); in __sha256_block_data_order()
40 int blocks) in __sha256_block_neon() argument
42 return sha256_block_neon(sst->state, src, blocks); in __sha256_block_neon()
Dsha512-glue.c28 int blocks) in __sha512_block_data_order() argument
30 return sha512_block_data_order(sst->state, src, blocks); in __sha512_block_data_order()
/arch/m68k/emu/
Dnfblock.c41 static inline s32 nfhd_get_capacity(u32 major, u32 minor, u32 *blocks, in nfhd_get_capacity() argument
45 virt_to_phys(blocks), virt_to_phys(blocksize)); in nfhd_get_capacity()
56 u32 blocks, bsize; member
87 geo->cylinders = dev->blocks >> (6 - dev->bshift); in nfhd_getgeo()
99 static int __init nfhd_init_one(int id, u32 blocks, u32 bsize) in nfhd_init_one() argument
105 blocks, bsize); in nfhd_init_one()
117 dev->blocks = blocks; in nfhd_init_one()
138 set_capacity(dev->disk, (sector_t)blocks * (bsize / 512)); in nfhd_init_one()
157 u32 blocks, bsize; in nfhd_init() local
175 if (nfhd_get_capacity(i, 0, &blocks, &bsize)) in nfhd_init()
[all …]
/arch/x86/crypto/
Dpoly1305_glue.c31 const u32 *r, unsigned int blocks);
33 unsigned int blocks, const u32 *u);
36 unsigned int blocks, const u32 *u);
67 unsigned int blocks, datalen; in poly1305_simd_blocks() local
92 blocks = srclen / (POLY1305_BLOCK_SIZE * 4); in poly1305_simd_blocks()
93 poly1305_4block_avx2(dctx->h.h, src, dctx->r.r, blocks, in poly1305_simd_blocks()
95 src += POLY1305_BLOCK_SIZE * 4 * blocks; in poly1305_simd_blocks()
96 srclen -= POLY1305_BLOCK_SIZE * 4 * blocks; in poly1305_simd_blocks()
105 blocks = srclen / (POLY1305_BLOCK_SIZE * 2); in poly1305_simd_blocks()
106 poly1305_2block_sse2(dctx->h.h, src, dctx->r.r, blocks, in poly1305_simd_blocks()
[all …]
Dsha1_ssse3_glue.c68 const u8 *data, int blocks);
119 const u8 *data, int blocks);
188 const u8 *data, int blocks);
201 const u8 *data, int blocks) in sha1_apply_transform_avx2() argument
204 if (blocks >= SHA1_AVX2_BLOCK_OPTSIZE) in sha1_apply_transform_avx2()
205 sha1_transform_avx2(state, data, blocks); in sha1_apply_transform_avx2()
207 sha1_transform_avx(state, data, blocks); in sha1_apply_transform_avx2()
Dsha512_ssse3_glue.c43 const u8 *data, int blocks);
147 const u8 *data, int blocks);
228 const u8 *data, int blocks);
Dsha256_ssse3_glue.c45 const u8 *data, int blocks);
149 const u8 *data, int blocks);
231 const u8 *data, int blocks);
Dchacha-avx512vl-x86_64.S29 # %rsi: up to 2 data blocks output, o
30 # %rdx: up to 2 data blocks input, i
34 # This function encrypts two ChaCha blocks by loading the state
194 # %rsi: up to 4 data blocks output, o
195 # %rdx: up to 4 data blocks input, i
199 # This function encrypts four ChaCha blocks by loading the state
460 # %rsi: up to 8 data blocks output, o
461 # %rdx: up to 8 data blocks input, i
465 # This function encrypts eight consecutive ChaCha blocks by loading
705 # xor/write first four blocks
[all …]
/arch/arm/crypto/
Daes-neonbs-glue.c30 int rounds, int blocks);
32 int rounds, int blocks);
35 int rounds, int blocks, u8 iv[]);
38 int rounds, int blocks, u8 ctr[], u8 final[]);
41 int rounds, int blocks, u8 iv[], int);
43 int rounds, int blocks, u8 iv[], int);
88 int rounds, int blocks)) in __ecb_crypt() argument
98 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() local
101 blocks = round_down(blocks, in __ecb_crypt()
106 ctx->rounds, blocks); in __ecb_crypt()
[all …]
Daes-ce-glue.c30 int rounds, int blocks);
32 int rounds, int blocks);
35 int rounds, int blocks, u8 iv[]);
37 int rounds, int blocks, u8 iv[]);
44 int rounds, int blocks, u8 ctr[]);
182 unsigned int blocks; in ecb_encrypt() local
187 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
190 ctx->key_enc, num_rounds(ctx), blocks); in ecb_encrypt()
202 unsigned int blocks; in ecb_decrypt() local
207 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
[all …]
Dghash-ce-glue.c48 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
52 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
56 static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
68 static void ghash_do_update(int blocks, u64 dg[], const char *src, in ghash_do_update() argument
73 pmull_ghash_update(blocks, dg, src, key, head); in ghash_do_update()
83 blocks++; in ghash_do_update()
91 } while (--blocks); in ghash_do_update()
108 int blocks; in ghash_update() local
118 blocks = len / GHASH_BLOCK_SIZE; in ghash_update()
121 ghash_do_update(blocks, ctx->digest, src, key, in ghash_update()
[all …]
/arch/powerpc/platforms/pseries/
Dpapr_scm.c29 uint64_t blocks; member
61 p->blocks, BIND_ANY_ADDR, token); in drc_pmem_bind()
127 p->drc_index, p->blocks - 1); in drc_pmem_query_n_bind()
132 if ((end_addr - start_addr) != ((p->blocks - 1) * p->block_size)) in drc_pmem_query_n_bind()
366 mapping.size = p->blocks * p->block_size; // XXX: potential overflow? in papr_scm_nvdimm_init()
406 u64 blocks, block_size; in papr_scm_probe() local
423 if (of_property_read_u64(dn, "ibm,number-of-blocks", &blocks)) { in papr_scm_probe()
444 p->blocks = blocks; in papr_scm_probe()
478 p->res.end = p->bound_addr + p->blocks * p->block_size - 1; in papr_scm_probe()
/arch/powerpc/kernel/
Drtas_flash.c110 struct flash_block blocks[FLASH_BLOCKS_PER_NODE]; member
173 if (f->blocks[i].data == NULL) { in flash_list_valid()
176 block_size = f->blocks[i].length; in flash_list_valid()
201 kmem_cache_free(flash_block_cache, f->blocks[i].data); in free_flash_list()
357 fl->blocks[next_free].data = p; in rtas_flash_write()
358 fl->blocks[next_free].length = count; in rtas_flash_write()
610 f->blocks[i].data = (char *)cpu_to_be64(__pa(f->blocks[i].data)); in rtas_flash_firmware()
611 image_size += f->blocks[i].length; in rtas_flash_firmware()
612 f->blocks[i].length = cpu_to_be64(f->blocks[i].length); in rtas_flash_firmware()
/arch/x86/kernel/cpu/mce/
Damd.c1023 first_block = per_cpu(threshold_banks, cpu)[bank]->blocks; in amd_threshold_interrupt()
1244 if (per_cpu(threshold_banks, cpu)[bank]->blocks) { in allocate_threshold_blocks()
1246 &per_cpu(threshold_banks, cpu)[bank]->blocks->miscj); in allocate_threshold_blocks()
1248 per_cpu(threshold_banks, cpu)[bank]->blocks = b; in allocate_threshold_blocks()
1281 struct list_head *head = &b->blocks->miscj; in __threshold_add_blocks()
1286 err = kobject_add(&b->blocks->kobj, b->kobj, b->blocks->kobj.name); in __threshold_add_blocks()
1379 list_for_each_entry_safe(pos, tmp, &head->blocks->miscj, miscj) { in deallocate_threshold_block()
1385 kfree(per_cpu(threshold_banks, cpu)[bank]->blocks); in deallocate_threshold_block()
1386 per_cpu(threshold_banks, cpu)[bank]->blocks = NULL; in deallocate_threshold_block()
1396 list_for_each_entry_safe(pos, tmp, &b->blocks->miscj, miscj) in __threshold_remove_blocks()
[all …]
/arch/arm/boot/dts/
Dsama5d3_tcb1.dtsi4 * 2 TC blocks.
/arch/arm64/boot/dts/freescale/
Dfsl-ls1043-post.dtsi10 /* include used FMan blocks */
Dfsl-ls1046-post.dtsi11 /* include used FMan blocks */
/arch/mips/cavium-octeon/
DKconfig21 This selects the size of CVMSEG LM, which is in cache blocks. The
22 legally range is from zero to 54 cache blocks (i.e. CVMSEG LM is

1234