/arch/x86/lib/ |
D | csum-partial_64.c | 35 static unsigned do_csum(const unsigned char *buff, unsigned len) in do_csum() argument 42 odd = 1 & (unsigned long) buff; in do_csum() 44 result = *buff << 8; in do_csum() 46 buff++; in do_csum() 50 if (2 & (unsigned long) buff) { in do_csum() 51 result += *(unsigned short *)buff; in do_csum() 54 buff += 2; in do_csum() 60 if (4 & (unsigned long) buff) { in do_csum() 61 result += *(unsigned int *) buff; in do_csum() 64 buff += 4; in do_csum() [all …]
|
/arch/parisc/lib/ |
D | checksum.c | 37 static inline unsigned int do_csum(const unsigned char * buff, int len) in do_csum() argument 44 odd = 1 & (unsigned long) buff; in do_csum() 46 result = be16_to_cpu(*buff); in do_csum() 48 buff++; in do_csum() 52 if (2 & (unsigned long) buff) { in do_csum() 53 result += *(unsigned short *) buff; in do_csum() 56 buff += 2; in do_csum() 62 r1 = *(unsigned int *)(buff + 0); in do_csum() 63 r2 = *(unsigned int *)(buff + 4); in do_csum() 64 r3 = *(unsigned int *)(buff + 8); in do_csum() [all …]
|
/arch/ia64/lib/ |
D | csum_partial_copy.c | 35 unsigned long do_csum_c(const unsigned char * buff, int len, unsigned int psum) in do_csum_c() argument 42 odd = 1 & (unsigned long) buff; in do_csum_c() 44 result = *buff << 8; in do_csum_c() 46 buff++; in do_csum_c() 50 if (2 & (unsigned long) buff) { in do_csum_c() 51 result += *(unsigned short *) buff; in do_csum_c() 54 buff += 2; in do_csum_c() 58 if (4 & (unsigned long) buff) { in do_csum_c() 59 result += *(unsigned int *) buff; in do_csum_c() 62 buff += 4; in do_csum_c() [all …]
|
D | checksum.c | 80 __wsum csum_partial(const void *buff, int len, __wsum sum) in csum_partial() argument 82 u64 result = do_csum(buff, len); in csum_partial() 97 __sum16 ip_compute_csum (const void *buff, int len) in ip_compute_csum() argument 99 return (__force __sum16)~do_csum(buff,len); in ip_compute_csum()
|
/arch/alpha/lib/ |
D | checksum.c | 79 static inline unsigned long do_csum(const unsigned char * buff, int len) in do_csum() argument 86 odd = 1 & (unsigned long) buff; in do_csum() 88 result = *buff << 8; in do_csum() 90 buff++; in do_csum() 94 if (2 & (unsigned long) buff) { in do_csum() 95 result += *(unsigned short *) buff; in do_csum() 98 buff += 2; in do_csum() 102 if (4 & (unsigned long) buff) { in do_csum() 103 result += *(unsigned int *) buff; in do_csum() 106 buff += 4; in do_csum() [all …]
|
/arch/powerpc/kernel/ |
D | optprobes.c | 158 kprobe_opcode_t *buff; in arch_prepare_optimized_kprobe() local 168 buff = get_optinsn_slot(); in arch_prepare_optimized_kprobe() 169 if (!buff) in arch_prepare_optimized_kprobe() 181 b_offset = (unsigned long)buff - (unsigned long)p->addr; in arch_prepare_optimized_kprobe() 186 b_offset = (unsigned long)(buff + TMPL_RET_IDX) - nip; in arch_prepare_optimized_kprobe() 193 pr_devel("Copying template to %p, size %lu\n", buff, size); in arch_prepare_optimized_kprobe() 195 rc = patch_instruction(buff + i, ppc_inst(*(optprobe_template_entry + i))); in arch_prepare_optimized_kprobe() 204 patch_imm_load_insns((unsigned long)op, 3, buff + TMPL_OP_IDX); in arch_prepare_optimized_kprobe() 216 rc = create_branch(&branch_op_callback, buff + TMPL_CALL_HDLR_IDX, in arch_prepare_optimized_kprobe() 219 rc |= create_branch(&branch_emulate_step, buff + TMPL_EMULATE_IDX, in arch_prepare_optimized_kprobe() [all …]
|
D | nvram_64.c | 181 char *buff, int length, in nvram_write_os_partition() argument 206 rc = ppc_md.nvram_write(buff, length, &tmp_index); in nvram_write_os_partition() 219 int nvram_read_partition(struct nvram_os_partition *part, char *buff, in nvram_read_partition() argument 243 rc = ppc_md.nvram_read(buff, length, &tmp_index); in nvram_read_partition() 426 char *buff = NULL; in nvram_pstore_read() local 486 buff = kmalloc(part->size, GFP_KERNEL); in nvram_pstore_read() 488 if (!buff) in nvram_pstore_read() 491 if (nvram_read_partition(part, buff, part->size, &err_type, &id_no)) { in nvram_pstore_read() 492 kfree(buff); in nvram_pstore_read() 504 oops_hdr = (struct oops_log_info *)buff; in nvram_pstore_read() [all …]
|
/arch/powerpc/include/asm/ |
D | checksum.h | 164 __wsum __csum_partial(const void *buff, int len, __wsum sum); 166 static __always_inline __wsum csum_partial(const void *buff, int len, __wsum sum) in csum_partial() argument 170 sum = csum_add(sum, (__force __wsum)*(const u16 *)buff); in csum_partial() 172 sum = csum_add(sum, (__force __wsum)*(const u32 *)buff); in csum_partial() 175 *(const u16 *)(buff + 4)); in csum_partial() 178 *(const u32 *)(buff + 4)); in csum_partial() 181 *(const u16 *)(buff + 8)); in csum_partial() 184 *(const u32 *)(buff + 8)); in csum_partial() 187 *(const u16 *)(buff + 12)); in csum_partial() 190 *(const u32 *)(buff + 12)); in csum_partial() [all …]
|
D | nvram.h | 45 extern int nvram_write_error_log(char * buff, int length, 47 extern int nvram_read_error_log(char * buff, int length, 84 extern int nvram_read_partition(struct nvram_os_partition *part, char *buff, 90 char *buff, int length,
|
/arch/m68k/lib/ |
D | checksum.c | 38 __wsum csum_partial(const void *buff, int len, __wsum sum) in csum_partial() argument 117 : "=d" (sum), "=d" (len), "=a" (buff), in csum_partial() 119 : "0" (sum), "1" (len), "2" (buff) in csum_partial()
|
/arch/s390/include/asm/ |
D | checksum.h | 30 static inline __wsum csum_partial(const void *buff, int len, __wsum sum) in csum_partial() argument 33 .even = (unsigned long) buff, in csum_partial() 106 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 108 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
/arch/x86/um/asm/ |
D | checksum_32.h | 8 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 10 return csum_fold (csum_partial(buff, len, 0)); in ip_compute_csum()
|
/arch/m68k/include/asm/ |
D | checksum.h | 23 __wsum csum_partial(const void *buff, int len, __wsum sum); 113 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 115 return csum_fold (csum_partial(buff, len, 0)); in ip_compute_csum()
|
/arch/sparc/include/asm/ |
D | checksum_64.h | 33 __wsum csum_partial(const void * buff, int len, __wsum sum); 127 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 129 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
D | checksum_32.h | 33 __wsum csum_partial(const void *buff, int len, __wsum sum); 193 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 195 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
/arch/arm64/lib/ |
D | csum.c | 21 unsigned int __no_sanitize_address do_csum(const unsigned char *buff, int len) in do_csum() argument 30 offset = (unsigned long)buff & 7; in do_csum() 39 kasan_check_read(buff, len); in do_csum() 40 ptr = (u64 *)(buff - offset); in do_csum()
|
/arch/sh/include/asm/ |
D | checksum_32.h | 23 asmlinkage __wsum csum_partial(const void *buff, int len, __wsum sum); 150 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 152 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
/arch/x86/include/asm/ |
D | checksum_32.h | 20 asmlinkage __wsum csum_partial(const void *buff, int len, __wsum sum); 139 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 141 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
/arch/arm/include/asm/ |
D | checksum.h | 27 __wsum csum_partial(const void *buff, int len, __wsum sum); 148 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 150 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|
/arch/nios2/include/asm/ |
D | checksum.h | 14 extern __wsum csum_partial(const void *buff, int len, __wsum sum); 16 extern __sum16 ip_compute_csum(const void *buff, int len);
|
/arch/powerpc/platforms/pseries/ |
D | nvram.c | 133 int nvram_write_error_log(char * buff, int length, in nvram_write_error_log() argument 136 int rc = nvram_write_os_partition(&rtas_log_partition, buff, length, in nvram_write_error_log() 152 int nvram_read_error_log(char *buff, int length, in nvram_read_error_log() argument 155 return nvram_read_partition(&rtas_log_partition, buff, length, in nvram_read_error_log()
|
/arch/xtensa/include/asm/ |
D | checksum.h | 30 asmlinkage __wsum csum_partial(const void *buff, int len, __wsum sum); 168 static __inline__ __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 170 return csum_fold (csum_partial(buff, len, 0)); in ip_compute_csum()
|
/arch/ia64/include/asm/ |
D | checksum.h | 38 extern __wsum csum_partial(const void *buff, int len, __wsum sum); 44 extern __sum16 ip_compute_csum(const void *buff, int len);
|
/arch/alpha/include/asm/ |
D | checksum.h | 35 extern __wsum csum_partial(const void *buff, int len, __wsum sum); 56 extern __sum16 ip_compute_csum(const void *buff, int len);
|
/arch/mips/include/asm/ |
D | checksum.h | 35 __wsum csum_partial(const void *buff, int len, __wsum sum); 180 static inline __sum16 ip_compute_csum(const void *buff, int len) in ip_compute_csum() argument 182 return csum_fold(csum_partial(buff, len, 0)); in ip_compute_csum()
|