/arch/x86/crypto/ |
D | chacha_glue.c | 50 unsigned int bytes, int nrounds) in chacha_dosimd() argument 55 while (bytes >= CHACHA_BLOCK_SIZE * 8) { in chacha_dosimd() 56 chacha_8block_xor_avx512vl(state, dst, src, bytes, in chacha_dosimd() 58 bytes -= CHACHA_BLOCK_SIZE * 8; in chacha_dosimd() 63 if (bytes > CHACHA_BLOCK_SIZE * 4) { in chacha_dosimd() 64 chacha_8block_xor_avx512vl(state, dst, src, bytes, in chacha_dosimd() 66 state[12] += chacha_advance(bytes, 8); in chacha_dosimd() 69 if (bytes > CHACHA_BLOCK_SIZE * 2) { in chacha_dosimd() 70 chacha_4block_xor_avx512vl(state, dst, src, bytes, in chacha_dosimd() 72 state[12] += chacha_advance(bytes, 4); in chacha_dosimd() [all …]
|
D | aegis128-aesni-glue.c | 48 u8 bytes[AEGIS128_BLOCK_SIZE] __aligned(AEGIS128_BLOCK_ALIGN); 87 memcpy(buf.bytes + pos, src, fill); in crypto_aegis128_aesni_process_ad() 90 buf.bytes); in crypto_aegis128_aesni_process_ad() 102 memcpy(buf.bytes + pos, src, left); in crypto_aegis128_aesni_process_ad() 112 memset(buf.bytes + pos, 0, AEGIS128_BLOCK_SIZE - pos); in crypto_aegis128_aesni_process_ad() 113 crypto_aegis128_aesni_ad(state, AEGIS128_BLOCK_SIZE, buf.bytes); in crypto_aegis128_aesni_process_ad() 152 memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE); in crypto_aegis128_aesni_setkey() 181 crypto_aegis128_aesni_init(&state, ctx->key.bytes, req->iv); in crypto_aegis128_aesni_crypt() 204 scatterwalk_map_and_copy(tag.bytes, req->dst, in crypto_aegis128_aesni_encrypt() 224 scatterwalk_map_and_copy(tag.bytes, req->src, in crypto_aegis128_aesni_decrypt() [all …]
|
D | poly1305_glue.c | 122 unsigned int bytes; in poly1305_simd_update() local 131 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen); in poly1305_simd_update() 132 memcpy(dctx->buf + dctx->buflen, src, bytes); in poly1305_simd_update() 133 src += bytes; in poly1305_simd_update() 134 srclen -= bytes; in poly1305_simd_update() 135 dctx->buflen += bytes; in poly1305_simd_update() 145 bytes = poly1305_simd_blocks(dctx, src, srclen); in poly1305_simd_update() 146 src += srclen - bytes; in poly1305_simd_update() 147 srclen = bytes; in poly1305_simd_update()
|
/arch/hexagon/mm/ |
D | copy_user_template.S | 19 p0 = cmp.gtu(bytes,#0) 25 p1 = cmp.gtu(bytes,#15) 32 loopcount = lsr(bytes,#3) 46 bytes -= asl(loopcount,#3) 58 p1 = cmp.gtu(bytes,#7) 63 loopcount = lsr(bytes,#2) 76 bytes -= asl(loopcount,#2) 84 p1 = cmp.gtu(bytes,#3) 89 loopcount = lsr(bytes,#1) 102 bytes -= asl(loopcount,#1) [all …]
|
/arch/arm/include/asm/ |
D | xor.h | 47 xor_arm4regs_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_arm4regs_2() argument 49 unsigned int lines = bytes / sizeof(unsigned long) / 4; in xor_arm4regs_2() 67 xor_arm4regs_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_3() argument 70 unsigned int lines = bytes / sizeof(unsigned long) / 4; in xor_arm4regs_3() 89 xor_arm4regs_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_4() argument 92 unsigned int lines = bytes / sizeof(unsigned long) / 2; in xor_arm4regs_4() 108 xor_arm4regs_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_5() argument 111 unsigned int lines = bytes / sizeof(unsigned long) / 2; in xor_arm4regs_5() 149 xor_neon_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_neon_2() argument 152 xor_arm4regs_2(bytes, p1, p2); in xor_neon_2() [all …]
|
/arch/s390/lib/ |
D | xor.c | 14 static void xor_xc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_xc_2() argument 31 : : "d" (bytes), "a" (p1), "a" (p2) in xor_xc_2() 35 static void xor_xc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_3() argument 57 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3) in xor_xc_3() 61 static void xor_xc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_4() argument 87 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4) in xor_xc_4() 91 static void xor_xc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_xc_5() argument 124 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4), in xor_xc_5()
|
/arch/powerpc/include/asm/ |
D | cacheflush.h | 67 unsigned long bytes = l1_cache_bytes(); in flush_dcache_range() local 68 void *addr = (void *)(start & ~(bytes - 1)); in flush_dcache_range() 69 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in flush_dcache_range() 77 for (i = 0; i < size >> shift; i++, addr += bytes) in flush_dcache_range() 93 unsigned long bytes = l1_cache_bytes(); in clean_dcache_range() local 94 void *addr = (void *)(start & ~(bytes - 1)); in clean_dcache_range() 95 unsigned long size = stop - (unsigned long)addr + (bytes - 1); in clean_dcache_range() 98 for (i = 0; i < size >> shift; i++, addr += bytes) in clean_dcache_range() 112 unsigned long bytes = l1_cache_bytes(); in invalidate_dcache_range() local 113 void *addr = (void *)(start & ~(bytes - 1)); in invalidate_dcache_range() [all …]
|
D | xor_altivec.h | 7 void xor_altivec_2(unsigned long bytes, unsigned long *v1_in, 9 void xor_altivec_3(unsigned long bytes, unsigned long *v1_in, 11 void xor_altivec_4(unsigned long bytes, unsigned long *v1_in, 14 void xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
|
/arch/powerpc/lib/ |
D | xor_vmx_glue.c | 15 void xor_altivec_2(unsigned long bytes, unsigned long *v1_in, in xor_altivec_2() argument 20 __xor_altivec_2(bytes, v1_in, v2_in); in xor_altivec_2() 26 void xor_altivec_3(unsigned long bytes, unsigned long *v1_in, in xor_altivec_3() argument 31 __xor_altivec_3(bytes, v1_in, v2_in, v3_in); in xor_altivec_3() 37 void xor_altivec_4(unsigned long bytes, unsigned long *v1_in, in xor_altivec_4() argument 43 __xor_altivec_4(bytes, v1_in, v2_in, v3_in, v4_in); in xor_altivec_4() 49 void xor_altivec_5(unsigned long bytes, unsigned long *v1_in, in xor_altivec_5() argument 55 __xor_altivec_5(bytes, v1_in, v2_in, v3_in, v4_in, v5_in); in xor_altivec_5()
|
D | xor_vmx.h | 9 void __xor_altivec_2(unsigned long bytes, unsigned long *v1_in, 12 void __xor_altivec_3(unsigned long bytes, unsigned long *v1_in, 15 void __xor_altivec_4(unsigned long bytes, unsigned long *v1_in, 19 void __xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
|
D | xor_vmx.c | 52 void __xor_altivec_2(unsigned long bytes, unsigned long *v1_in, in __xor_altivec_2() argument 57 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_2() 70 void __xor_altivec_3(unsigned long bytes, unsigned long *v1_in, in __xor_altivec_3() argument 76 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_3() 92 void __xor_altivec_4(unsigned long bytes, unsigned long *v1_in, in __xor_altivec_4() argument 100 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_4() 119 void __xor_altivec_5(unsigned long bytes, unsigned long *v1_in, in __xor_altivec_5() argument 128 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in __xor_altivec_5()
|
/arch/arm64/include/asm/ |
D | xor.h | 19 xor_neon_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_neon_2() argument 22 xor_block_inner_neon.do_2(bytes, p1, p2); in xor_neon_2() 27 xor_neon_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_3() argument 31 xor_block_inner_neon.do_3(bytes, p1, p2, p3); in xor_neon_3() 36 xor_neon_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_4() argument 40 xor_block_inner_neon.do_4(bytes, p1, p2, p3, p4); in xor_neon_4() 45 xor_neon_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_neon_5() argument 49 xor_block_inner_neon.do_5(bytes, p1, p2, p3, p4, p5); in xor_neon_5()
|
/arch/s390/include/asm/ |
D | alternative-asm.h | 39 .macro alt_pad_fill bytes 40 .fill ( \bytes ) / 6, 6, 0xc0040000 41 .fill ( \bytes ) % 6 / 4, 4, 0x47000000 42 .fill ( \bytes ) % 6 % 4 / 2, 2, 0x0700 50 .macro alt_pad bytes 51 .if ( \bytes > 0 ) 52 .if ( \bytes > 6 ) 53 jg . + \bytes 54 alt_pad_fill \bytes - 6 56 alt_pad_fill \bytes
|
/arch/sparc/include/asm/ |
D | xor_32.h | 16 sparc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in sparc_2() argument 18 int lines = bytes / (sizeof (long)) / 8; in sparc_2() 53 sparc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_3() argument 56 int lines = bytes / (sizeof (long)) / 8; in sparc_3() 104 sparc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_4() argument 107 int lines = bytes / (sizeof (long)) / 8; in sparc_4() 168 sparc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_5() argument 171 int lines = bytes / (sizeof (long)) / 8; in sparc_5()
|
/arch/alpha/lib/ |
D | ev6-memcpy.S | 68 ldq $1, 0($17) # L : get 8 bytes 85 wh64 ($7) # L1 : memory subsystem hint: 64 bytes at 87 ldq $6, 0($17) # L0 : bytes 0..7 91 ldq $4, 8($17) # L : bytes 8..15 92 ldq $5, 16($17) # L : bytes 16..23 96 ldq $3, 24($17) # L : bytes 24..31 101 addq $17, 32, $17 # E : src += 32 bytes 102 stq $6, 0($16) # L : bytes 0..7 106 stq $4, 8($16) # L : bytes 8..15 107 stq $5, 16($16) # L : bytes 16..23 [all …]
|
/arch/s390/crypto/ |
D | ghash_s390.c | 27 u32 bytes; member 63 if (dctx->bytes) { in ghash_update() 64 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 66 n = min(srclen, dctx->bytes); in ghash_update() 67 dctx->bytes -= n; in ghash_update() 73 if (!dctx->bytes) { in ghash_update() 87 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update() 98 if (dctx->bytes) { in ghash_flush() 99 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush() 101 memset(pos, 0, dctx->bytes); in ghash_flush() [all …]
|
/arch/x86/include/asm/ |
D | xor_32.h | 24 xor_pII_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_pII_mmx_2() argument 26 unsigned long lines = bytes >> 7; in xor_pII_mmx_2() 67 xor_pII_mmx_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_3() argument 70 unsigned long lines = bytes >> 7; in xor_pII_mmx_3() 116 xor_pII_mmx_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_4() argument 119 unsigned long lines = bytes >> 7; in xor_pII_mmx_4() 171 xor_pII_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_5() argument 174 unsigned long lines = bytes >> 7; in xor_pII_mmx_5() 251 xor_p5_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_p5_mmx_2() argument 253 unsigned long lines = bytes >> 6; in xor_p5_mmx_2() [all …]
|
D | insn.h | 16 insn_byte_t bytes[4]; member 143 return X86_VEX3_M(insn->vex_prefix.bytes[1]); in insn_vex_m_bits() 145 return X86_EVEX_M(insn->vex_prefix.bytes[1]); in insn_vex_m_bits() 151 return X86_VEX_P(insn->vex_prefix.bytes[1]); in insn_vex_p_bits() 153 return X86_VEX_P(insn->vex_prefix.bytes[2]); in insn_vex_p_bits() 162 if (insn->prefixes.bytes[3]) in insn_last_prefix_id() 163 return inat_get_last_prefix_id(insn->prefixes.bytes[3]); in insn_last_prefix_id() 211 return insn->opcode.bytes[0] == POP_SS_OPCODE || in insn_masking_exception() 212 (insn->opcode.bytes[0] == MOV_SREG_OPCODE && in insn_masking_exception() 213 X86_MODRM_REG(insn->modrm.bytes[0]) == 2); in insn_masking_exception()
|
D | xor_avx.h | 31 static void xor_avx_2(unsigned long bytes, unsigned long *p0, unsigned long *p1) in xor_avx_2() argument 33 unsigned long lines = bytes >> 9; in xor_avx_2() 57 static void xor_avx_3(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_3() argument 60 unsigned long lines = bytes >> 9; in xor_avx_3() 87 static void xor_avx_4(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_4() argument 90 unsigned long lines = bytes >> 9; in xor_avx_4() 120 static void xor_avx_5(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_5() argument 123 unsigned long lines = bytes >> 9; in xor_avx_5()
|
D | xor.h | 60 xor_sse_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2() argument 62 unsigned long lines = bytes >> 8; in xor_sse_2() 111 xor_sse_2_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2_pf64() argument 113 unsigned long lines = bytes >> 8; in xor_sse_2_pf64() 145 xor_sse_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3() argument 148 unsigned long lines = bytes >> 8; in xor_sse_3() 204 xor_sse_3_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3_pf64() argument 207 unsigned long lines = bytes >> 8; in xor_sse_3_pf64() 241 xor_sse_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_4() argument 244 unsigned long lines = bytes >> 8; in xor_sse_4() [all …]
|
/arch/csky/mm/ |
D | syscache.c | 11 unsigned long, bytes, in SYSCALL_DEFINE3() argument 17 (unsigned long)addr + bytes); in SYSCALL_DEFINE3() 21 (unsigned long)addr + bytes); in SYSCALL_DEFINE3() 25 (unsigned long)addr + bytes); in SYSCALL_DEFINE3()
|
/arch/x86/oprofile/ |
D | backtrace.c | 28 unsigned long bytes; in dump_user_backtrace_32() local 30 bytes = copy_from_user_nmi(bufhead, head, sizeof(bufhead)); in dump_user_backtrace_32() 31 if (bytes != 0) in dump_user_backtrace_32() 74 unsigned long bytes; in dump_user_backtrace() local 76 bytes = copy_from_user_nmi(bufhead, head, sizeof(bufhead)); in dump_user_backtrace() 77 if (bytes != 0) in dump_user_backtrace()
|
/arch/sparc/lib/ |
D | M7memset.S | 129 cmp %o2, 7 ! if small counts, just write bytes 134 or %o1, %o3, %o1 ! now o1 has 2 bytes of c 138 or %o1, %o3, %o1 ! now o1 has 4 bytes of c 141 or %o1, %o3, %o1 ! now o1 has 8 bytes of c 146 sub %o3, 8, %o3 ! -(bytes till long word aligned) 149 ! Set -(%o3) bytes till sp1 long word aligned 157 cmp %o2, 64 ! check if there are 64 bytes to set 163 sub %o3, 64, %o3 ! o3 is -(bytes till block aligned) 166 ! Store -(%o3) bytes till dst is block (64 byte) aligned. 177 andn %o2, 63, %o4 ! calculate size of blocks in bytes [all …]
|
/arch/arm/crypto/ |
D | chacha-neon-glue.c | 40 unsigned int bytes, int nrounds) in chacha_doneon() argument 44 while (bytes >= CHACHA_BLOCK_SIZE * 4) { in chacha_doneon() 46 bytes -= CHACHA_BLOCK_SIZE * 4; in chacha_doneon() 51 while (bytes >= CHACHA_BLOCK_SIZE) { in chacha_doneon() 53 bytes -= CHACHA_BLOCK_SIZE; in chacha_doneon() 58 if (bytes) { in chacha_doneon() 59 memcpy(buf, src, bytes); in chacha_doneon() 61 memcpy(dst, buf, bytes); in chacha_doneon()
|
/arch/x86/kernel/ |
D | msr.c | 53 ssize_t bytes = 0; in msr_read() local 67 bytes += 8; in msr_read() 70 return bytes ? bytes : err; in msr_read() 81 ssize_t bytes = 0; in msr_write() local 99 bytes += 8; in msr_write() 102 return bytes ? bytes : err; in msr_write()
|