/arch/x86/lib/ |
D | mmx_32.c | 39 void *_mmx_memcpy(void *to, const void *from, size_t len) in _mmx_memcpy() argument 45 return __memcpy(to, from, len); in _mmx_memcpy() 64 : : "r" (from)); in _mmx_memcpy() 90 : : "r" (from), "r" (to) : "memory"); in _mmx_memcpy() 92 from += 64; in _mmx_memcpy() 114 : : "r" (from), "r" (to) : "memory"); in _mmx_memcpy() 116 from += 64; in _mmx_memcpy() 122 __memcpy(to, from, len & 63); in _mmx_memcpy() 169 static void fast_copy_page(void *to, void *from) in fast_copy_page() argument 190 _ASM_EXTABLE(1b, 3b) : : "r" (from)); in fast_copy_page() [all …]
|
D | iomem.c | 5 #define movs(type,to,from) \ argument 6 asm volatile("movs" type:"=&D" (to), "=&S" (from):"0" (to), "1" (from):"memory") 9 static __always_inline void rep_movs(void *to, const void *from, size_t n) in rep_movs() argument 21 : "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from) in rep_movs() 25 void memcpy_fromio(void *to, const volatile void __iomem *from, size_t n) in memcpy_fromio() argument 31 if (unlikely(1 & (unsigned long)from)) { in memcpy_fromio() 32 movs("b", to, from); in memcpy_fromio() 35 if (n > 1 && unlikely(2 & (unsigned long)from)) { in memcpy_fromio() 36 movs("w", to, from); in memcpy_fromio() 39 rep_movs(to, (const void *)from, n); in memcpy_fromio() [all …]
|
D | usercopy_32.c | 97 __copy_user_intel(void __user *to, const void *from, unsigned long size) in __copy_user_intel() argument 195 : "1"(to), "2"(from), "0"(size) in __copy_user_intel() 201 const void __user *from, unsigned long size) in __copy_user_intel_nocache() argument 283 : "1"(to), "2"(from), "0"(size) in __copy_user_intel_nocache() 294 unsigned long __copy_user_intel(void __user *to, const void *from, 299 #define __copy_user(to, from, size) \ argument 328 : "3"(size), "0"(size), "1"(to), "2"(from) \ 332 unsigned long __copy_user_ll(void *to, const void *from, unsigned long n) in __copy_user_ll() argument 335 if (movsl_is_ok(to, from, n)) in __copy_user_ll() 336 __copy_user(to, from, n); in __copy_user_ll() [all …]
|
/arch/sh/kernel/ |
D | io.c | 16 void memcpy_fromio(void *to, const volatile void __iomem *from, unsigned long count) in memcpy_fromio() argument 24 (((u32)to & 0x1f) == 0) && (((u32)from & 0x3) == 0)) { in memcpy_fromio() 52 "=&r" (tmp5), "=&r" (tmp6), "=&r" (from) in memcpy_fromio() 53 : "7"(from), "0" (to), "1" (count) in memcpy_fromio() 58 if ((((u32)to | (u32)from) & 0x3) == 0) { in memcpy_fromio() 60 *(u32 *)to = *(volatile u32 *)from; in memcpy_fromio() 62 from += 4; in memcpy_fromio() 67 *(u8 *)to = *(volatile u8 *)from; in memcpy_fromio() 69 from++; in memcpy_fromio() 79 void memcpy_toio(volatile void __iomem *to, const void *from, unsigned long count) in memcpy_toio() argument [all …]
|
D | machvec.c | 40 static int __init early_parse_mv(char *from) in early_parse_mv() argument 48 mv_end = strchr(from, ' '); in early_parse_mv() 50 mv_end = from + strlen(from); in early_parse_mv() 52 mv_comma = strchr(from, ','); in early_parse_mv() 53 mv_len = mv_end - from; in early_parse_mv() 56 memcpy(mv_name, from, mv_len); in early_parse_mv() 58 from = mv_end; in early_parse_mv()
|
/arch/arm64/kernel/ |
D | io.c | 15 void __memcpy_fromio(void *to, const volatile void __iomem *from, size_t count) in __memcpy_fromio() argument 17 while (count && !IS_ALIGNED((unsigned long)from, 8)) { in __memcpy_fromio() 18 *(u8 *)to = __raw_readb(from); in __memcpy_fromio() 19 from++; in __memcpy_fromio() 25 *(u64 *)to = __raw_readq(from); in __memcpy_fromio() 26 from += 8; in __memcpy_fromio() 32 *(u8 *)to = __raw_readb(from); in __memcpy_fromio() 33 from++; in __memcpy_fromio() 43 void __memcpy_toio(volatile void __iomem *to, const void *from, size_t count) in __memcpy_toio() argument 46 __raw_writeb(*(u8 *)from, to); in __memcpy_toio() [all …]
|
/arch/m68k/include/asm/ |
D | uaccess.h | 195 unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n); 196 unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n); 203 #define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\ argument 238 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \ 241 #define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\ argument 242 ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3) 243 #define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3) \ argument 244 ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, \ 248 __constant_copy_from_user(void *to, const void __user *from, unsigned long n) in __constant_copy_from_user() argument 254 __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0); in __constant_copy_from_user() [all …]
|
/arch/m68k/lib/ |
D | memcpy.c | 10 void *memcpy(void *to, const void *from, size_t n) in memcpy() argument 19 const char *cfrom = from; in memcpy() 22 from = cfrom; in memcpy() 26 if ((long)from & 1) { in memcpy() 28 const char *cfrom = from; in memcpy() 36 const short *sfrom = from; in memcpy() 39 from = sfrom; in memcpy() 45 const long *lfrom = from; in memcpy() 73 from = lfrom; in memcpy() 77 const short *sfrom = from; in memcpy() [all …]
|
/arch/x86/include/asm/ |
D | string_32.h | 33 static __always_inline void *__memcpy(void *to, const void *from, size_t n) in __memcpy() argument 43 : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from) in __memcpy() 52 static __always_inline void *__constant_memcpy(void *to, const void *from, in __constant_memcpy() argument 61 *(char *)to = *(char *)from; in __constant_memcpy() 64 *(short *)to = *(short *)from; in __constant_memcpy() 67 *(int *)to = *(int *)from; in __constant_memcpy() 70 *(short *)to = *(short *)from; in __constant_memcpy() 71 *((char *)to + 2) = *((char *)from + 2); in __constant_memcpy() 74 *(int *)to = *(int *)from; in __constant_memcpy() 75 *((char *)to + 4) = *((char *)from + 4); in __constant_memcpy() [all …]
|
D | uaccess_32.h | 13 (void *to, const void *from, unsigned long n); 15 (void *to, const void __user *from, unsigned long n); 18 raw_copy_to_user(void __user *to, const void *from, unsigned long n) in raw_copy_to_user() argument 20 return __copy_user_ll((__force void *)to, from, n); in raw_copy_to_user() 24 raw_copy_from_user(void *to, const void __user *from, unsigned long n) in raw_copy_from_user() argument 26 return __copy_user_ll(to, (__force const void *)from, n); in raw_copy_from_user() 30 __copy_from_user_inatomic_nocache(void *to, const void __user *from, in __copy_from_user_inatomic_nocache() argument 33 return __copy_from_user_ll_nocache_nozero(to, from, n); in __copy_from_user_inatomic_nocache()
|
D | asm.h | 130 # define _ASM_EXTABLE_TYPE(from, to, type) \ 133 .long (from) - . ; \ 177 # define _ASM_EXTABLE_TYPE(from, to, type) \ argument 180 " .long (" #from ") - .\n" \ 185 # define _ASM_EXTABLE_TYPE_REG(from, to, type, reg) \ argument 188 " .long (" #from ") - .\n" \ 207 #define _ASM_EXTABLE(from, to) \ argument 208 _ASM_EXTABLE_TYPE(from, to, EX_TYPE_DEFAULT) 210 #define _ASM_EXTABLE_UA(from, to) \ argument 211 _ASM_EXTABLE_TYPE(from, to, EX_TYPE_UACCESS) [all …]
|
/arch/arm/mach-omap2/ |
D | pm44xx.c | 43 const char *from; member 173 {.from = "mpuss_clkdm", .to = "l3_emif_clkdm"}, 174 {.from = "mpuss_clkdm", .to = "l3_1_clkdm"}, 175 {.from = "mpuss_clkdm", .to = "l3_2_clkdm"}, 176 {.from = "ducati_clkdm", .to = "l3_1_clkdm"}, 177 {.from = "ducati_clkdm", .to = "l3_2_clkdm"}, 178 {.from = NULL} /* TERMINATION */ 182 {.from = "mpu_clkdm", .to = "emif_clkdm"}, 183 {.from = NULL} /* TERMINATION */ 193 struct clockdomain *from, *to; in omap4plus_init_static_deps() local [all …]
|
/arch/arc/include/asm/ |
D | uaccess.h | 168 raw_copy_from_user(void *to, const void __user *from, unsigned long n) in raw_copy_from_user() argument 179 if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) { in raw_copy_from_user() 205 "=&r" (tmp), "+r" (to), "+r" (from) in raw_copy_from_user() 246 : "+r" (res), "+r"(to), "+r"(from), in raw_copy_from_user() 270 : "+r" (res), "+r"(to), "+r"(from), in raw_copy_from_user() 291 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 311 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 329 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 385 : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val), in raw_copy_from_user() 395 raw_copy_to_user(void __user *to, const void *from, unsigned long n) in raw_copy_to_user() argument [all …]
|
/arch/x86/entry/vdso/ |
D | extable.h | 11 #define _ASM_VDSO_EXTABLE_HANDLE(from, to) \ 12 ASM_VDSO_EXTABLE_HANDLE from to 14 .macro ASM_VDSO_EXTABLE_HANDLE from:req to:req 16 .long (\from) - __ex_table 21 #define _ASM_VDSO_EXTABLE_HANDLE(from, to) \ argument 23 ".long (" #from ") - __ex_table\n" \
|
/arch/arm/mm/ |
D | copypage-xscale.c | 36 static void mc_copy_user_page(void *from, void *to) in mc_copy_user_page() argument 79 : "+&r" (from), "+&r" (to), "=&r" (tmp) in mc_copy_user_page() 84 void xscale_mc_copy_user_highpage(struct page *to, struct page *from, in xscale_mc_copy_user_highpage() argument 89 if (!test_and_set_bit(PG_dcache_clean, &from->flags)) in xscale_mc_copy_user_highpage() 90 __flush_dcache_page(page_mapping_file(from), from); in xscale_mc_copy_user_highpage() 94 set_top_pte(COPYPAGE_MINICACHE, mk_pte(from, minicache_pgprot)); in xscale_mc_copy_user_highpage()
|
D | copypage-v4mc.c | 40 static void mc_copy_user_page(void *from, void *to) in mc_copy_user_page() argument 59 : "+&r" (from), "+&r" (to), "=&r" (tmp) in mc_copy_user_page() 64 void v4_mc_copy_user_highpage(struct page *to, struct page *from, in v4_mc_copy_user_highpage() argument 69 if (!test_and_set_bit(PG_dcache_clean, &from->flags)) in v4_mc_copy_user_highpage() 70 __flush_dcache_page(page_mapping_file(from), from); in v4_mc_copy_user_highpage() 74 set_top_pte(COPYPAGE_MINICACHE, mk_pte(from, minicache_pgprot)); in v4_mc_copy_user_highpage()
|
D | copypage-v6.c | 31 struct page *from, unsigned long vaddr, struct vm_area_struct *vma) in v6_copy_user_highpage_nonaliasing() argument 35 kfrom = kmap_atomic(from); in v6_copy_user_highpage_nonaliasing() 70 struct page *from, unsigned long vaddr, struct vm_area_struct *vma) in v6_copy_user_highpage_aliasing() argument 75 if (!test_and_set_bit(PG_dcache_clean, &from->flags)) in v6_copy_user_highpage_aliasing() 76 __flush_dcache_page(page_mapping_file(from), from); in v6_copy_user_highpage_aliasing() 90 set_top_pte(kfrom, mk_pte(from, PAGE_KERNEL)); in v6_copy_user_highpage_aliasing()
|
/arch/powerpc/kernel/ |
D | signal.h | 43 void __user *from); 45 void __user *from); 48 unsigned long copy_fpr_from_user(struct task_struct *task, void __user *from); 49 unsigned long copy_ckfpr_from_user(struct task_struct *task, void __user *from); 71 #define unsafe_copy_fpr_from_user(task, from, label) do { \ argument 73 u64 __user *buf = (u64 __user *)from; \ 81 #define unsafe_copy_vsx_from_user(task, from, label) do { \ argument 83 u64 __user *buf = (u64 __user *)from; \ 112 #define unsafe_copy_ckfpr_from_user(task, from, label) do { \ argument 114 u64 __user *buf = (u64 __user *)from; \ [all …]
|
/arch/sh/include/asm/ |
D | dma.h | 108 extern int dma_xfer(unsigned int chan, unsigned long from, 111 #define dma_write(chan, from, to, size) \ argument 112 dma_xfer(chan, from, to, size, DMA_MODE_WRITE) 113 #define dma_write_page(chan, from, to) \ argument 114 dma_write(chan, from, to, PAGE_SIZE) 116 #define dma_read(chan, from, to, size) \ argument 117 dma_xfer(chan, from, to, size, DMA_MODE_READ) 118 #define dma_read_page(chan, from, to) \ argument 119 dma_read(chan, from, to, PAGE_SIZE)
|
/arch/csky/lib/ |
D | usercopy.c | 7 unsigned long raw_copy_from_user(void *to, const void *from, in raw_copy_from_user() argument 69 : "=r"(n), "=r"(to), "=r"(from), "=r"(nsave), in raw_copy_from_user() 71 : "0"(n), "1"(to), "2"(from) in raw_copy_from_user() 78 unsigned long raw_copy_to_user(void *to, const void *from, in raw_copy_to_user() argument 136 : "=r"(n), "=r"(to), "=r"(from), "=r"(w0), in raw_copy_to_user() 138 : "0"(n), "1"(to), "2"(from) in raw_copy_to_user()
|
/arch/x86/kernel/ |
D | ioport.c | 65 long ksys_ioperm(unsigned long from, unsigned long num, int turn_on) in ksys_ioperm() argument 71 if ((from + num <= from) || (from + num > IO_BITMAP_BITS)) in ksys_ioperm() 122 bitmap_clear(iobm->bitmap, from, num); in ksys_ioperm() 124 bitmap_set(iobm->bitmap, from, num); in ksys_ioperm() 152 SYSCALL_DEFINE3(ioperm, unsigned long, from, unsigned long, num, int, turn_on) in SYSCALL_DEFINE3() argument 154 return ksys_ioperm(from, num, turn_on); in SYSCALL_DEFINE3() 202 long ksys_ioperm(unsigned long from, unsigned long num, int turn_on) in ksys_ioperm() argument 206 SYSCALL_DEFINE3(ioperm, unsigned long, from, unsigned long, num, int, turn_on) in SYSCALL_DEFINE3() argument
|
/arch/parisc/kernel/ |
D | alternative.c | 47 u32 *from, cond, replacement; in apply_alternatives() local 50 from = (u32 *)((ulong)&entry->orig_offset + entry->orig_offset); in apply_alternatives() 61 index, cond, len, from, replacement); in apply_alternatives() 69 replacement = *from; in apply_alternatives() 82 index, cond, len, replacement, from, from); in apply_alternatives() 89 memcpy(from, source, 4 * len); in apply_alternatives() 92 *from = replacement; in apply_alternatives()
|
/arch/arm64/mm/ |
D | copypage.c | 17 void copy_highpage(struct page *to, struct page *from) in copy_highpage() argument 20 void *kfrom = page_address(from); in copy_highpage() 27 if (system_supports_mte() && page_mte_tagged(from)) { in copy_highpage() 34 void copy_user_highpage(struct page *to, struct page *from, in copy_user_highpage() argument 37 copy_highpage(to, from); in copy_user_highpage()
|
/arch/powerpc/include/asm/ |
D | kup.h | 68 static inline void allow_user_access(void __user *to, const void __user *from, in allow_user_access() argument 82 static __always_inline void allow_read_from_user(const void __user *from, unsigned long size) in allow_read_from_user() argument 85 allow_user_access(NULL, from, size, KUAP_READ); in allow_read_from_user() 93 static __always_inline void allow_read_write_user(void __user *to, const void __user *from, in allow_read_write_user() argument 97 allow_user_access(to, from, size, KUAP_READ_WRITE); in allow_read_write_user() 100 static __always_inline void prevent_read_from_user(const void __user *from, unsigned long size) in prevent_read_from_user() argument 110 static __always_inline void prevent_read_write_user(void __user *to, const void __user *from, in prevent_read_write_user() argument
|
/arch/arm64/include/asm/ |
D | page.h | 22 extern void copy_page(void *to, const void *from); 25 void copy_user_highpage(struct page *to, struct page *from, 29 void copy_highpage(struct page *to, struct page *from); 40 #define copy_user_page(to, from, vaddr, pg) copy_page(to, from) argument
|