| /arch/powerpc/lib/ |
| D | xor_vmx_glue.c | 15 void xor_altivec_2(unsigned long bytes, unsigned long * __restrict p1, in xor_altivec_2() 26 void xor_altivec_3(unsigned long bytes, unsigned long * __restrict p1, in xor_altivec_3() 38 void xor_altivec_4(unsigned long bytes, unsigned long * __restrict p1, in xor_altivec_4() 51 void xor_altivec_5(unsigned long bytes, unsigned long * __restrict p1, in xor_altivec_5()
|
| D | xor_vmx.c | 52 void __xor_altivec_2(unsigned long bytes, in __xor_altivec_2() 71 void __xor_altivec_3(unsigned long bytes, in __xor_altivec_3() 95 void __xor_altivec_4(unsigned long bytes, in __xor_altivec_4() 124 void __xor_altivec_5(unsigned long bytes, in __xor_altivec_5()
|
| D | pmem.c | 16 unsigned long bytes = l1_dcache_bytes(); in __clean_pmem_range() local 28 unsigned long bytes = l1_dcache_bytes(); in __flush_pmem_range() local
|
| /arch/s390/lib/ |
| D | xor.c | 14 static void xor_xc_2(unsigned long bytes, unsigned long * __restrict p1, in xor_xc_2() 36 static void xor_xc_3(unsigned long bytes, unsigned long * __restrict p1, in xor_xc_3() 63 static void xor_xc_4(unsigned long bytes, unsigned long * __restrict p1, in xor_xc_4() 95 static void xor_xc_5(unsigned long bytes, unsigned long * __restrict p1, in xor_xc_5()
|
| /arch/riscv/include/asm/ |
| D | xor.h | 13 static void xor_vector_2(unsigned long bytes, unsigned long *__restrict p1, in xor_vector_2() 21 static void xor_vector_3(unsigned long bytes, unsigned long *__restrict p1, in xor_vector_3() 30 static void xor_vector_4(unsigned long bytes, unsigned long *__restrict p1, in xor_vector_4() 40 static void xor_vector_5(unsigned long bytes, unsigned long *__restrict p1, in xor_vector_5()
|
| /arch/arm64/include/asm/ |
| D | xor.h | 19 xor_neon_2(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_2() 28 xor_neon_3(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_3() 38 xor_neon_4(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_4() 49 xor_neon_5(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_5()
|
| /arch/arm/include/asm/ |
| D | xor.h | 47 xor_arm4regs_2(unsigned long bytes, unsigned long * __restrict p1, in xor_arm4regs_2() 68 xor_arm4regs_3(unsigned long bytes, unsigned long * __restrict p1, in xor_arm4regs_3() 91 xor_arm4regs_4(unsigned long bytes, unsigned long * __restrict p1, in xor_arm4regs_4() 112 xor_arm4regs_5(unsigned long bytes, unsigned long * __restrict p1, in xor_arm4regs_5() 156 xor_neon_2(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_2() 169 xor_neon_3(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_3() 183 xor_neon_4(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_4() 198 xor_neon_5(unsigned long bytes, unsigned long * __restrict p1, in xor_neon_5()
|
| /arch/sparc/include/asm/ |
| D | xor_32.h | 16 sparc_2(unsigned long bytes, unsigned long * __restrict p1, in sparc_2() 54 sparc_3(unsigned long bytes, unsigned long * __restrict p1, in sparc_3() 106 sparc_4(unsigned long bytes, unsigned long * __restrict p1, in sparc_4() 172 sparc_5(unsigned long bytes, unsigned long * __restrict p1, in sparc_5()
|
| /arch/x86/include/asm/ |
| D | xor_32.h | 24 xor_pII_mmx_2(unsigned long bytes, unsigned long * __restrict p1, in xor_pII_mmx_2() 68 xor_pII_mmx_3(unsigned long bytes, unsigned long * __restrict p1, in xor_pII_mmx_3() 118 xor_pII_mmx_4(unsigned long bytes, unsigned long * __restrict p1, in xor_pII_mmx_4() 175 xor_pII_mmx_5(unsigned long bytes, unsigned long * __restrict p1, in xor_pII_mmx_5() 258 xor_p5_mmx_2(unsigned long bytes, unsigned long * __restrict p1, in xor_p5_mmx_2() 306 xor_p5_mmx_3(unsigned long bytes, unsigned long * __restrict p1, in xor_p5_mmx_3() 364 xor_p5_mmx_4(unsigned long bytes, unsigned long * __restrict p1, in xor_p5_mmx_4() 432 xor_p5_mmx_5(unsigned long bytes, unsigned long * __restrict p1, in xor_p5_mmx_5()
|
| D | xor_avx.h | 29 static void xor_avx_2(unsigned long bytes, unsigned long * __restrict p0, in xor_avx_2() 56 static void xor_avx_3(unsigned long bytes, unsigned long * __restrict p0, in xor_avx_3() 87 static void xor_avx_4(unsigned long bytes, unsigned long * __restrict p0, in xor_avx_4() 122 static void xor_avx_5(unsigned long bytes, unsigned long * __restrict p0, in xor_avx_5()
|
| D | xor.h | 60 xor_sse_2(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_2() 112 xor_sse_2_pf64(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_2_pf64() 147 xor_sse_3(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_3() 207 xor_sse_3_pf64(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_3_pf64() 245 xor_sse_4(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_4() 313 xor_sse_4_pf64(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_4_pf64() 354 xor_sse_5(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_5() 430 xor_sse_5_pf64(unsigned long bytes, unsigned long * __restrict p1, in xor_sse_5_pf64()
|
| /arch/arm64/lib/ |
| D | xor-neon.c | 13 static void xor_arm64_neon_2(unsigned long bytes, unsigned long * __restrict p1, in xor_arm64_neon_2() 40 static void xor_arm64_neon_3(unsigned long bytes, unsigned long * __restrict p1, in xor_arm64_neon_3() 76 static void xor_arm64_neon_4(unsigned long bytes, unsigned long * __restrict p1, in xor_arm64_neon_4() 121 static void xor_arm64_neon_5(unsigned long bytes, unsigned long * __restrict p1, in xor_arm64_neon_5() 194 static void xor_arm64_eor3_3(unsigned long bytes, in xor_arm64_eor3_3() 229 static void xor_arm64_eor3_4(unsigned long bytes, in xor_arm64_eor3_4() 273 static void xor_arm64_eor3_5(unsigned long bytes, in xor_arm64_eor3_5()
|
| /arch/powerpc/include/asm/ |
| D | cacheflush.h | 73 unsigned long bytes = l1_dcache_bytes(); in flush_dcache_range() local 95 unsigned long bytes = l1_dcache_bytes(); in clean_dcache_range() local 114 unsigned long bytes = l1_dcache_bytes(); in invalidate_dcache_range() local
|
| /arch/x86/kvm/mmu/ |
| D | page_track.h | 42 const u8 *new, int bytes) { } in __kvm_page_track_write() 51 const u8 *new, int bytes) in kvm_page_track_write()
|
| /arch/csky/mm/ |
| D | syscache.c | 11 unsigned long, bytes, in SYSCALL_DEFINE3() argument
|
| /arch/mips/crypto/ |
| D | poly1305-glue.c | 76 u32 bytes = min(len, POLY1305_BLOCK_SIZE - dctx->buflen); in mips_poly1305_update() local 106 u32 bytes = min(nbytes, POLY1305_BLOCK_SIZE - dctx->buflen); in poly1305_update_arch() local
|
| /arch/hexagon/mm/ |
| D | copy_to_user.S | 30 #define bytes r2 macro
|
| D | copy_from_user.S | 31 #define bytes r2 macro
|
| /arch/arm64/crypto/ |
| D | poly1305-glue.c | 85 u32 bytes = min(len, POLY1305_BLOCK_SIZE - dctx->buflen); in neon_poly1305_do_update() local 129 u32 bytes = min(nbytes, POLY1305_BLOCK_SIZE - dctx->buflen); in poly1305_update_arch() local
|
| D | chacha-neon-glue.c | 43 int bytes, int nrounds) in chacha_doneon() 83 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, in chacha_crypt_arch()
|
| /arch/powerpc/mm/ |
| D | cacheflush.c | 37 unsigned long bytes = l1_icache_bytes(); in invalidate_icache_range() local 86 unsigned long bytes = l1_dcache_bytes(); in flush_dcache_icache_phys() local
|
| /arch/x86/crypto/ |
| D | chacha_glue.c | 49 unsigned int bytes, int nrounds) in chacha_dosimd() 142 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, in chacha_crypt_arch()
|
| /arch/powerpc/crypto/ |
| D | chacha-p10-glue.c | 38 unsigned int bytes, int nrounds) in chacha_p10_do_8x() 66 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, in chacha_crypt_arch()
|
| /arch/powerpc/sysdev/xics/ |
| D | icp-native.c | 30 u8 bytes[4]; member 34 u8 bytes[4]; member 39 u8 bytes[4]; member
|
| /arch/arm/crypto/ |
| D | poly1305-glue.c | 89 u32 bytes = min(len, POLY1305_BLOCK_SIZE - dctx->buflen); in arm_poly1305_do_update() local 146 u32 bytes = min(nbytes, POLY1305_BLOCK_SIZE - dctx->buflen); in poly1305_update_arch() local
|