Home
last modified time | relevance | path

Searched refs:bytes (Results 1 – 25 of 264) sorted by relevance

1234567891011

/arch/x86/crypto/
Dchacha_glue.c49 unsigned int bytes, int nrounds) in chacha_dosimd() argument
53 while (bytes >= CHACHA_BLOCK_SIZE * 8) { in chacha_dosimd()
54 chacha_8block_xor_avx512vl(state, dst, src, bytes, in chacha_dosimd()
56 bytes -= CHACHA_BLOCK_SIZE * 8; in chacha_dosimd()
61 if (bytes > CHACHA_BLOCK_SIZE * 4) { in chacha_dosimd()
62 chacha_8block_xor_avx512vl(state, dst, src, bytes, in chacha_dosimd()
64 state[12] += chacha_advance(bytes, 8); in chacha_dosimd()
67 if (bytes > CHACHA_BLOCK_SIZE * 2) { in chacha_dosimd()
68 chacha_4block_xor_avx512vl(state, dst, src, bytes, in chacha_dosimd()
70 state[12] += chacha_advance(bytes, 4); in chacha_dosimd()
[all …]
Dpoly1305_glue.c107 const size_t bytes = min_t(size_t, len, SZ_4K); in poly1305_simd_blocks() local
111 poly1305_blocks_avx512(ctx, inp, bytes, padbit); in poly1305_simd_blocks()
113 poly1305_blocks_avx2(ctx, inp, bytes, padbit); in poly1305_simd_blocks()
115 poly1305_blocks_avx(ctx, inp, bytes, padbit); in poly1305_simd_blocks()
118 len -= bytes; in poly1305_simd_blocks()
119 inp += bytes; in poly1305_simd_blocks()
171 unsigned int bytes, used; in poly1305_update_arch() local
174 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen); in poly1305_update_arch()
175 memcpy(dctx->buf + dctx->buflen, src, bytes); in poly1305_update_arch()
176 src += bytes; in poly1305_update_arch()
[all …]
/arch/powerpc/include/asm/
Dcacheflush.h67 unsigned long bytes = l1_dcache_bytes(); in flush_dcache_range() local
68 void *addr = (void *)(start & ~(bytes - 1)); in flush_dcache_range()
69 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in flush_dcache_range()
75 for (i = 0; i < size >> shift; i++, addr += bytes) in flush_dcache_range()
89 unsigned long bytes = l1_dcache_bytes(); in clean_dcache_range() local
90 void *addr = (void *)(start & ~(bytes - 1)); in clean_dcache_range()
91 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in clean_dcache_range()
94 for (i = 0; i < size >> shift; i++, addr += bytes) in clean_dcache_range()
108 unsigned long bytes = l1_dcache_bytes(); in invalidate_dcache_range() local
109 void *addr = (void *)(start & ~(bytes - 1)); in invalidate_dcache_range()
[all …]
Dxor_altivec.h7 void xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
9 void xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
11 void xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
14 void xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
/arch/hexagon/mm/
Dcopy_user_template.S19 p0 = cmp.gtu(bytes,#0)
25 p1 = cmp.gtu(bytes,#15)
32 loopcount = lsr(bytes,#3)
46 bytes -= asl(loopcount,#3)
58 p1 = cmp.gtu(bytes,#7)
63 loopcount = lsr(bytes,#2)
76 bytes -= asl(loopcount,#2)
84 p1 = cmp.gtu(bytes,#3)
89 loopcount = lsr(bytes,#1)
102 bytes -= asl(loopcount,#1)
[all …]
/arch/arm/include/asm/
Dxor.h47 xor_arm4regs_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_arm4regs_2() argument
49 unsigned int lines = bytes / sizeof(unsigned long) / 4; in xor_arm4regs_2()
67 xor_arm4regs_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_3() argument
70 unsigned int lines = bytes / sizeof(unsigned long) / 4; in xor_arm4regs_3()
89 xor_arm4regs_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_4() argument
92 unsigned int lines = bytes / sizeof(unsigned long) / 2; in xor_arm4regs_4()
108 xor_arm4regs_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_5() argument
111 unsigned int lines = bytes / sizeof(unsigned long) / 2; in xor_arm4regs_5()
149 xor_neon_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_neon_2() argument
152 xor_arm4regs_2(bytes, p1, p2); in xor_neon_2()
[all …]
/arch/s390/lib/
Dxor.c14 static void xor_xc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_xc_2() argument
31 : : "d" (bytes), "a" (p1), "a" (p2) in xor_xc_2()
35 static void xor_xc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_3() argument
57 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3) in xor_xc_3()
61 static void xor_xc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_4() argument
87 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4) in xor_xc_4()
91 static void xor_xc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_5() argument
121 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4), in xor_xc_5()
/arch/powerpc/lib/
Dxor_vmx_glue.c15 void xor_altivec_2(unsigned long bytes, unsigned long *v1_in, in xor_altivec_2() argument
20 __xor_altivec_2(bytes, v1_in, v2_in); in xor_altivec_2()
26 void xor_altivec_3(unsigned long bytes, unsigned long *v1_in, in xor_altivec_3() argument
31 __xor_altivec_3(bytes, v1_in, v2_in, v3_in); in xor_altivec_3()
37 void xor_altivec_4(unsigned long bytes, unsigned long *v1_in, in xor_altivec_4() argument
43 __xor_altivec_4(bytes, v1_in, v2_in, v3_in, v4_in); in xor_altivec_4()
49 void xor_altivec_5(unsigned long bytes, unsigned long *v1_in, in xor_altivec_5() argument
55 __xor_altivec_5(bytes, v1_in, v2_in, v3_in, v4_in, v5_in); in xor_altivec_5()
Dpmem.c16 unsigned long bytes = l1_dcache_bytes(); in __clean_pmem_range() local
17 void *addr = (void *)(start & ~(bytes - 1)); in __clean_pmem_range()
18 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in __clean_pmem_range()
21 for (i = 0; i < size >> shift; i++, addr += bytes) in __clean_pmem_range()
28 unsigned long bytes = l1_dcache_bytes(); in __flush_pmem_range() local
29 void *addr = (void *)(start & ~(bytes - 1)); in __flush_pmem_range()
30 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in __flush_pmem_range()
33 for (i = 0; i < size >> shift; i++, addr += bytes) in __flush_pmem_range()
Dxor_vmx.h9 void __xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
12 void __xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
15 void __xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
19 void __xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
Dxor_vmx.c52 void __xor_altivec_2(unsigned long bytes, unsigned long *v1_in, in __xor_altivec_2() argument
57 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_2()
70 void __xor_altivec_3(unsigned long bytes, unsigned long *v1_in, in __xor_altivec_3() argument
76 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_3()
92 void __xor_altivec_4(unsigned long bytes, unsigned long *v1_in, in __xor_altivec_4() argument
100 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_4()
119 void __xor_altivec_5(unsigned long bytes, unsigned long *v1_in, in __xor_altivec_5() argument
128 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_5()
/arch/arm64/include/asm/
Dxor.h19 xor_neon_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_neon_2() argument
22 xor_block_inner_neon.do_2(bytes, p1, p2); in xor_neon_2()
27 xor_neon_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_3() argument
31 xor_block_inner_neon.do_3(bytes, p1, p2, p3); in xor_neon_3()
36 xor_neon_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_4() argument
40 xor_block_inner_neon.do_4(bytes, p1, p2, p3, p4); in xor_neon_4()
45 xor_neon_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_5() argument
49 xor_block_inner_neon.do_5(bytes, p1, p2, p3, p4, p5); in xor_neon_5()
/arch/s390/include/asm/
Dalternative-asm.h39 .macro alt_pad_fill bytes
40 .fill ( \bytes ) / 6, 6, 0xc0040000
41 .fill ( \bytes ) % 6 / 4, 4, 0x47000000
42 .fill ( \bytes ) % 6 % 4 / 2, 2, 0x0700
50 .macro alt_pad bytes
51 .if ( \bytes > 0 )
52 .if ( \bytes > 6 )
53 jg . + \bytes
54 alt_pad_fill \bytes - 6
56 alt_pad_fill \bytes
/arch/sparc/include/asm/
Dxor_32.h16 sparc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in sparc_2() argument
18 int lines = bytes / (sizeof (long)) / 8; in sparc_2()
53 sparc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_3() argument
56 int lines = bytes / (sizeof (long)) / 8; in sparc_3()
104 sparc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_4() argument
107 int lines = bytes / (sizeof (long)) / 8; in sparc_4()
168 sparc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_5() argument
171 int lines = bytes / (sizeof (long)) / 8; in sparc_5()
/arch/alpha/lib/
Dev6-memcpy.S68 ldq $1, 0($17) # L : get 8 bytes
85 wh64 ($7) # L1 : memory subsystem hint: 64 bytes at
87 ldq $6, 0($17) # L0 : bytes 0..7
91 ldq $4, 8($17) # L : bytes 8..15
92 ldq $5, 16($17) # L : bytes 16..23
96 ldq $3, 24($17) # L : bytes 24..31
101 addq $17, 32, $17 # E : src += 32 bytes
102 stq $6, 0($16) # L : bytes 0..7
106 stq $4, 8($16) # L : bytes 8..15
107 stq $5, 16($16) # L : bytes 16..23
[all …]
/arch/s390/crypto/
Dghash_s390.c27 u32 bytes; member
61 if (dctx->bytes) { in ghash_update()
62 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update()
64 n = min(srclen, dctx->bytes); in ghash_update()
65 dctx->bytes -= n; in ghash_update()
71 if (!dctx->bytes) { in ghash_update()
85 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update()
96 if (dctx->bytes) { in ghash_flush()
97 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush()
99 memset(pos, 0, dctx->bytes); in ghash_flush()
[all …]
/arch/x86/include/asm/
Dxor_32.h24 xor_pII_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_pII_mmx_2() argument
26 unsigned long lines = bytes >> 7; in xor_pII_mmx_2()
67 xor_pII_mmx_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_3() argument
70 unsigned long lines = bytes >> 7; in xor_pII_mmx_3()
116 xor_pII_mmx_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_4() argument
119 unsigned long lines = bytes >> 7; in xor_pII_mmx_4()
171 xor_pII_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_5() argument
174 unsigned long lines = bytes >> 7; in xor_pII_mmx_5()
251 xor_p5_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_p5_mmx_2() argument
253 unsigned long lines = bytes >> 6; in xor_p5_mmx_2()
[all …]
Dinsn.h19 insn_byte_t bytes[4]; member
36 p->bytes[n] = v; in insn_set_byte()
45 insn_byte_t bytes[4]; member
63 p->bytes[n] = v; in insn_set_byte()
188 return X86_VEX3_M(insn->vex_prefix.bytes[1]); in insn_vex_m_bits()
190 return X86_EVEX_M(insn->vex_prefix.bytes[1]); in insn_vex_m_bits()
196 return X86_VEX_P(insn->vex_prefix.bytes[1]); in insn_vex_p_bits()
198 return X86_VEX_P(insn->vex_prefix.bytes[2]); in insn_vex_p_bits()
207 if (insn->prefixes.bytes[3]) in insn_last_prefix_id()
208 return inat_get_last_prefix_id(insn->prefixes.bytes[3]); in insn_last_prefix_id()
[all …]
Dxor_avx.h29 static void xor_avx_2(unsigned long bytes, unsigned long *p0, unsigned long *p1) in xor_avx_2() argument
31 unsigned long lines = bytes >> 9; in xor_avx_2()
55 static void xor_avx_3(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_3() argument
58 unsigned long lines = bytes >> 9; in xor_avx_3()
85 static void xor_avx_4(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_4() argument
88 unsigned long lines = bytes >> 9; in xor_avx_4()
118 static void xor_avx_5(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_5() argument
121 unsigned long lines = bytes >> 9; in xor_avx_5()
Dxor.h60 xor_sse_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2() argument
62 unsigned long lines = bytes >> 8; in xor_sse_2()
111 xor_sse_2_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2_pf64() argument
113 unsigned long lines = bytes >> 8; in xor_sse_2_pf64()
145 xor_sse_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3() argument
148 unsigned long lines = bytes >> 8; in xor_sse_3()
204 xor_sse_3_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3_pf64() argument
207 unsigned long lines = bytes >> 8; in xor_sse_3_pf64()
241 xor_sse_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_4() argument
244 unsigned long lines = bytes >> 8; in xor_sse_4()
[all …]
/arch/x86/kernel/
Dsev.c660 exit_info_1 = (ctxt->insn.opcode.bytes[1] == 0x30) ? 1 : 0; in vc_handle_msr()
858 unsigned int bytes, bool read) in vc_do_mmio() argument
882 exit_info_2 = bytes; in vc_do_mmio()
893 unsigned int bytes = 0; in vc_handle_mmio_twobyte_ops() local
898 switch (insn->opcode.bytes[1]) { in vc_handle_mmio_twobyte_ops()
901 bytes = 1; in vc_handle_mmio_twobyte_ops()
904 if (!bytes) in vc_handle_mmio_twobyte_ops()
905 bytes = 2; in vc_handle_mmio_twobyte_ops()
907 ret = vc_do_mmio(ghcb, ctxt, bytes, true); in vc_handle_mmio_twobyte_ops()
918 memcpy(reg_data, ghcb->shared_buffer, bytes); in vc_handle_mmio_twobyte_ops()
[all …]
/arch/arm/crypto/
Dchacha-glue.c30 asmlinkage void chacha_doarm(u8 *dst, const u8 *src, unsigned int bytes,
41 unsigned int bytes, int nrounds) in chacha_doneon() argument
45 while (bytes > CHACHA_BLOCK_SIZE) { in chacha_doneon()
46 unsigned int l = min(bytes, CHACHA_BLOCK_SIZE * 4U); in chacha_doneon()
49 bytes -= l; in chacha_doneon()
54 if (bytes) { in chacha_doneon()
58 if (bytes != CHACHA_BLOCK_SIZE) in chacha_doneon()
59 s = d = memcpy(buf, src, bytes); in chacha_doneon()
62 memcpy(dst, buf, bytes); in chacha_doneon()
85 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, in chacha_crypt_arch() argument
[all …]
/arch/powerpc/mm/
Dcacheflush.c37 unsigned long bytes = l1_icache_bytes(); in invalidate_icache_range() local
38 char *addr = (char *)(start & ~(bytes - 1)); in invalidate_icache_range()
39 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in invalidate_icache_range()
42 for (i = 0; i < size >> shift; i++, addr += bytes) in invalidate_icache_range()
86 unsigned long bytes = l1_dcache_bytes(); in flush_dcache_icache_phys() local
87 unsigned long nb = PAGE_SIZE / bytes; in flush_dcache_icache_phys()
114 : "r" (nb), "r" (msr), "i" (bytes), "r" (msr0) in flush_dcache_icache_phys()
/arch/arm64/crypto/
Dpolyval-ce-glue.c41 u32 bytes; member
110 if (dctx->bytes) { in polyval_arm64_update()
111 n = min(srclen, dctx->bytes); in polyval_arm64_update()
112 pos = dctx->buffer + POLYVAL_BLOCK_SIZE - dctx->bytes; in polyval_arm64_update()
114 dctx->bytes -= n; in polyval_arm64_update()
120 if (!dctx->bytes) in polyval_arm64_update()
134 dctx->bytes = POLYVAL_BLOCK_SIZE - srclen; in polyval_arm64_update()
148 if (dctx->bytes) { in polyval_arm64_final()
/arch/mips/crypto/
Dpoly1305-glue.c76 u32 bytes = min(len, POLY1305_BLOCK_SIZE - dctx->buflen); in mips_poly1305_update() local
78 memcpy(dctx->buf + dctx->buflen, src, bytes); in mips_poly1305_update()
79 src += bytes; in mips_poly1305_update()
80 len -= bytes; in mips_poly1305_update()
81 dctx->buflen += bytes; in mips_poly1305_update()
106 u32 bytes = min(nbytes, POLY1305_BLOCK_SIZE - dctx->buflen); in poly1305_update_arch() local
108 memcpy(dctx->buf + dctx->buflen, src, bytes); in poly1305_update_arch()
109 src += bytes; in poly1305_update_arch()
110 nbytes -= bytes; in poly1305_update_arch()
111 dctx->buflen += bytes; in poly1305_update_arch()

1234567891011