Home
last modified time | relevance | path

Searched refs:bytes (Results 1 – 25 of 248) sorted by relevance

12345678910

/arch/hexagon/mm/
Dcopy_user_template.S32 p0 = cmp.gtu(bytes,#0)
38 p1 = cmp.gtu(bytes,#15)
45 loopcount = lsr(bytes,#3)
59 bytes -= asl(loopcount,#3)
71 p1 = cmp.gtu(bytes,#7)
76 loopcount = lsr(bytes,#2)
89 bytes -= asl(loopcount,#2)
97 p1 = cmp.gtu(bytes,#3)
102 loopcount = lsr(bytes,#1)
115 bytes -= asl(loopcount,#1)
[all …]
/arch/arm/include/asm/
Dxor.h50 xor_arm4regs_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_arm4regs_2() argument
52 unsigned int lines = bytes / sizeof(unsigned long) / 4; in xor_arm4regs_2()
70 xor_arm4regs_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_3() argument
73 unsigned int lines = bytes / sizeof(unsigned long) / 4; in xor_arm4regs_3()
92 xor_arm4regs_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_4() argument
95 unsigned int lines = bytes / sizeof(unsigned long) / 2; in xor_arm4regs_4()
111 xor_arm4regs_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_arm4regs_5() argument
114 unsigned int lines = bytes / sizeof(unsigned long) / 2; in xor_arm4regs_5()
152 xor_neon_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_neon_2() argument
155 xor_arm4regs_2(bytes, p1, p2); in xor_neon_2()
[all …]
/arch/s390/include/asm/
Dalternative-asm.h39 .macro alt_pad_fill bytes
40 .fill ( \bytes ) / 6, 6, 0xc0040000
41 .fill ( \bytes ) % 6 / 4, 4, 0x47000000
42 .fill ( \bytes ) % 6 % 4 / 2, 2, 0x0700
50 .macro alt_pad bytes
51 .if ( \bytes > 0 )
52 .if ( \bytes > 6 )
53 jg . + \bytes
54 alt_pad_fill \bytes - 6
56 alt_pad_fill \bytes
/arch/sparc/include/asm/
Dxor_32.h24 sparc_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in sparc_2() argument
26 int lines = bytes / (sizeof (long)) / 8; in sparc_2()
61 sparc_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_3() argument
64 int lines = bytes / (sizeof (long)) / 8; in sparc_3()
112 sparc_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_4() argument
115 int lines = bytes / (sizeof (long)) / 8; in sparc_4()
176 sparc_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in sparc_5() argument
179 int lines = bytes / (sizeof (long)) / 8; in sparc_5()
/arch/x86/include/asm/
Dpmem.h99 static inline size_t arch_copy_from_iter_pmem(void __pmem *addr, size_t bytes, in arch_copy_from_iter_pmem() argument
106 len = copy_from_iter_nocache(vaddr, bytes, i); in arch_copy_from_iter_pmem()
123 if (bytes < 8) { in arch_copy_from_iter_pmem()
124 if (!IS_ALIGNED(dest, 4) || (bytes != 4)) in arch_copy_from_iter_pmem()
125 __arch_wb_cache_pmem(addr, bytes); in arch_copy_from_iter_pmem()
133 if (bytes > flushed && !IS_ALIGNED(bytes - flushed, 8)) in arch_copy_from_iter_pmem()
134 __arch_wb_cache_pmem(addr + bytes - 1, 1); in arch_copy_from_iter_pmem()
137 __arch_wb_cache_pmem(addr, bytes); in arch_copy_from_iter_pmem()
Dxor_32.h32 xor_pII_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_pII_mmx_2() argument
34 unsigned long lines = bytes >> 7; in xor_pII_mmx_2()
75 xor_pII_mmx_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_3() argument
78 unsigned long lines = bytes >> 7; in xor_pII_mmx_3()
124 xor_pII_mmx_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_4() argument
127 unsigned long lines = bytes >> 7; in xor_pII_mmx_4()
179 xor_pII_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_pII_mmx_5() argument
182 unsigned long lines = bytes >> 7; in xor_pII_mmx_5()
259 xor_p5_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_p5_mmx_2() argument
261 unsigned long lines = bytes >> 6; in xor_p5_mmx_2()
[all …]
Dinsn.h29 insn_byte_t bytes[4]; member
148 return X86_VEX3_M(insn->vex_prefix.bytes[1]); in insn_vex_m_bits()
154 return X86_VEX_P(insn->vex_prefix.bytes[1]); in insn_vex_p_bits()
156 return X86_VEX_P(insn->vex_prefix.bytes[2]); in insn_vex_p_bits()
165 if (insn->prefixes.bytes[3]) in insn_last_prefix_id()
166 return inat_get_last_prefix_id(insn->prefixes.bytes[3]); in insn_last_prefix_id()
214 …for (idx = 0; idx < ARRAY_SIZE(insn->prefixes.bytes) && (prefix = insn->prefixes.bytes[idx]) != 0;…
229 return insn->opcode.bytes[0] == POP_SS_OPCODE || in insn_masking_exception()
230 (insn->opcode.bytes[0] == MOV_SREG_OPCODE && in insn_masking_exception()
231 X86_MODRM_REG(insn->modrm.bytes[0]) == 2); in insn_masking_exception()
Dxor_avx.h35 static void xor_avx_2(unsigned long bytes, unsigned long *p0, unsigned long *p1) in xor_avx_2() argument
37 unsigned long lines = bytes >> 9; in xor_avx_2()
61 static void xor_avx_3(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_3() argument
64 unsigned long lines = bytes >> 9; in xor_avx_3()
91 static void xor_avx_4(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_4() argument
94 unsigned long lines = bytes >> 9; in xor_avx_4()
124 static void xor_avx_5(unsigned long bytes, unsigned long *p0, unsigned long *p1, in xor_avx_5() argument
127 unsigned long lines = bytes >> 9; in xor_avx_5()
Dxor.h71 xor_sse_2(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2() argument
73 unsigned long lines = bytes >> 8; in xor_sse_2()
122 xor_sse_2_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2) in xor_sse_2_pf64() argument
124 unsigned long lines = bytes >> 8; in xor_sse_2_pf64()
156 xor_sse_3(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3() argument
159 unsigned long lines = bytes >> 8; in xor_sse_3()
215 xor_sse_3_pf64(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_3_pf64() argument
218 unsigned long lines = bytes >> 8; in xor_sse_3_pf64()
252 xor_sse_4(unsigned long bytes, unsigned long *p1, unsigned long *p2, in xor_sse_4() argument
255 unsigned long lines = bytes >> 8; in xor_sse_4()
[all …]
/arch/alpha/lib/
Dev6-memcpy.S67 ldq $1, 0($17) # L : get 8 bytes
84 wh64 ($7) # L1 : memory subsystem hint: 64 bytes at
86 ldq $6, 0($17) # L0 : bytes 0..7
90 ldq $4, 8($17) # L : bytes 8..15
91 ldq $5, 16($17) # L : bytes 16..23
95 ldq $3, 24($17) # L : bytes 24..31
100 addq $17, 32, $17 # E : src += 32 bytes
101 stq $6, 0($16) # L : bytes 0..7
105 stq $4, 8($16) # L : bytes 8..15
106 stq $5, 16($16) # L : bytes 16..23
[all …]
/arch/s390/crypto/
Dghash_s390.c27 u32 bytes; member
64 if (dctx->bytes) { in ghash_update()
65 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update()
67 n = min(srclen, dctx->bytes); in ghash_update()
68 dctx->bytes -= n; in ghash_update()
74 if (!dctx->bytes) { in ghash_update()
92 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update()
104 if (dctx->bytes) { in ghash_flush()
105 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush()
107 memset(pos, 0, dctx->bytes); in ghash_flush()
[all …]
/arch/x86/crypto/
Dchacha20_glue.c30 unsigned int bytes) in chacha20_dosimd() argument
36 while (bytes >= CHACHA20_BLOCK_SIZE * 8) { in chacha20_dosimd()
38 bytes -= CHACHA20_BLOCK_SIZE * 8; in chacha20_dosimd()
45 while (bytes >= CHACHA20_BLOCK_SIZE * 4) { in chacha20_dosimd()
47 bytes -= CHACHA20_BLOCK_SIZE * 4; in chacha20_dosimd()
52 while (bytes >= CHACHA20_BLOCK_SIZE) { in chacha20_dosimd()
54 bytes -= CHACHA20_BLOCK_SIZE; in chacha20_dosimd()
59 if (bytes) { in chacha20_dosimd()
60 memcpy(buf, src, bytes); in chacha20_dosimd()
62 memcpy(dst, buf, bytes); in chacha20_dosimd()
Dpoly1305_glue.c124 unsigned int bytes; in poly1305_simd_update() local
133 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen); in poly1305_simd_update()
134 memcpy(dctx->buf + dctx->buflen, src, bytes); in poly1305_simd_update()
135 src += bytes; in poly1305_simd_update()
136 srclen -= bytes; in poly1305_simd_update()
137 dctx->buflen += bytes; in poly1305_simd_update()
147 bytes = poly1305_simd_blocks(dctx, src, srclen); in poly1305_simd_update()
148 src += srclen - bytes; in poly1305_simd_update()
149 srclen = bytes; in poly1305_simd_update()
Dghash-clmulni-intel_glue.c43 u32 bytes; member
88 if (dctx->bytes) { in ghash_update()
89 int n = min(srclen, dctx->bytes); in ghash_update()
90 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update()
92 dctx->bytes -= n; in ghash_update()
98 if (!dctx->bytes) in ghash_update()
108 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update()
120 if (dctx->bytes) { in ghash_flush()
121 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush()
123 while (dctx->bytes--) in ghash_flush()
[all …]
/arch/x86/oprofile/
Dbacktrace.c47 unsigned long bytes; in dump_user_backtrace_32() local
49 bytes = copy_from_user_nmi(bufhead, head, sizeof(bufhead)); in dump_user_backtrace_32()
50 if (bytes != 0) in dump_user_backtrace_32()
93 unsigned long bytes; in dump_user_backtrace() local
95 bytes = copy_from_user_nmi(bufhead, head, sizeof(bufhead)); in dump_user_backtrace()
96 if (bytes != 0) in dump_user_backtrace()
/arch/powerpc/include/asm/
Dxor.h27 void xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
29 void xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
31 void xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
34 void xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
/arch/powerpc/lib/
Dxor_vmx.c57 void xor_altivec_2(unsigned long bytes, unsigned long *v1_in, in xor_altivec_2() argument
62 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in xor_altivec_2()
81 void xor_altivec_3(unsigned long bytes, unsigned long *v1_in, in xor_altivec_3() argument
87 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in xor_altivec_3()
109 void xor_altivec_4(unsigned long bytes, unsigned long *v1_in, in xor_altivec_4() argument
117 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in xor_altivec_4()
142 void xor_altivec_5(unsigned long bytes, unsigned long *v1_in, in xor_altivec_5() argument
151 unsigned long lines = bytes / (sizeof(unative_t)) / 4; in xor_altivec_5()
/arch/ia64/sn/kernel/sn2/
Dcache.c24 sn_flush_all_caches(long flush_addr, long bytes) in sn_flush_all_caches() argument
32 flush_icache_range(addr, addr + bytes); in sn_flush_all_caches()
38 flush_icache_range(addr, addr + bytes); in sn_flush_all_caches()
/arch/metag/lib/
Dmemcpy.S15 ! If there are less than 16 bytes to copy use the byte copy loop
36 ! The destination address is not 8 byte aligned. We will copy bytes from
39 ! bytes here).
43 SUB D1Ar3, D1Ar3, #1 ! decrement count of remaining bytes
48 ! We have at least (16 - 7) = 9 bytes to copy - calculate the number of 8 byte
74 ! If there are any remaining bytes use the byte copy loop, otherwise we are done
80 ! or more bytes to be copied.
88 ! Save the number of bytes of mis-alignment in D0Ar4 for use later
94 ! prefetch 8 bytes
99 ! There are 3 mis-alignment cases to be considered. Less than 4 bytes, exactly
[all …]
/arch/metag/mm/
Dcache.c293 void metag_data_cache_flush(const void *start, int bytes) in metag_data_cache_flush() argument
302 if (bytes >= 4096) { in metag_data_cache_flush()
309 loops = ((int)start & (DCACHE_LINE_BYTES - 1)) + bytes + in metag_data_cache_flush()
344 static void metag_phys_code_cache_flush(const void *start, int bytes) in metag_phys_code_cache_flush() argument
384 if ((bytes < 4096) && (bytes < loops)) { in metag_phys_code_cache_flush()
388 loops = (((int) start) & (step-1)) + bytes + step - 1; in metag_phys_code_cache_flush()
465 void metag_code_cache_flush(const void *start, int bytes) in metag_code_cache_flush() argument
478 metag_phys_code_cache_flush(start, bytes); in metag_code_cache_flush()
482 if (bytes >= 4096) { in metag_code_cache_flush()
483 metag_phys_code_cache_flush(start, bytes); in metag_code_cache_flush()
[all …]
/arch/powerpc/crypto/
Dsha1-spe-glue.c83 unsigned int bytes; in ppc_spe_sha1_update() local
106 bytes = (len > MAX_BYTES) ? MAX_BYTES : len; in ppc_spe_sha1_update()
107 bytes = bytes & ~0x3f; in ppc_spe_sha1_update()
110 ppc_spe_sha1_transform(sctx->state, src, bytes >> 6); in ppc_spe_sha1_update()
113 src += bytes; in ppc_spe_sha1_update()
114 len -= bytes; in ppc_spe_sha1_update()
/arch/arm/lib/
Dmemzero.S24 blt 5f @ 1 bytes to align with?
43 blt 4f @ 1 have < 16 bytes
58 3: subs r1, r1, #64 @ 1 write 32 bytes out per loop
71 tst r1, #16 @ 1 16 bytes or more?
122 4: tst r1, #8 @ 1 8 bytes or more?
124 tst r1, #4 @ 1 4 bytes or more?
130 5: tst r1, #2 @ 1 2 bytes or more?
/arch/x86/kernel/
Dioport.c27 unsigned int i, max_long, bytes, bytes_updated; in sys_ioperm() local
73 bytes = (max_long + 1) * sizeof(unsigned long); in sys_ioperm()
74 bytes_updated = max(bytes, t->io_bitmap_max); in sys_ioperm()
76 t->io_bitmap_max = bytes; in sys_ioperm()
Dmsr.c78 ssize_t bytes = 0; in msr_read() local
92 bytes += 8; in msr_read()
95 return bytes ? bytes : err; in msr_read()
106 ssize_t bytes = 0; in msr_write() local
120 bytes += 8; in msr_write()
123 return bytes ? bytes : err; in msr_write()
/arch/powerpc/platforms/powermac/
Dpfunc_core.c80 static void print_blob(const char *title, const void *blob, int bytes)
83 while(bytes--) {
270 u32 bytes = pmf_next32(cmd); in pmf_parser_read_i2c() local
272 LOG_PARSE("pmf: read_i2c(bytes: %ud)\n", bytes); in pmf_parser_read_i2c()
274 PMF_PARSE_CALL(read_i2c, cmd, h, bytes); in pmf_parser_read_i2c()
279 u32 bytes = pmf_next32(cmd); in pmf_parser_write_i2c() local
280 const void *blob = pmf_next_blob(cmd, bytes); in pmf_parser_write_i2c()
282 LOG_PARSE("pmf: write_i2c(bytes: %ud) ...\n", bytes); in pmf_parser_write_i2c()
283 LOG_BLOB("pmf: data: \n", blob, bytes); in pmf_parser_write_i2c()
285 PMF_PARSE_CALL(write_i2c, cmd, h, bytes, blob); in pmf_parser_write_i2c()
[all …]

12345678910