Home
last modified time | relevance | path

Searched refs:size (Results 1 – 25 of 2416) sorted by relevance

12345678910>>...97

/arch/s390/lib/
Dfind.c18 unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size) in find_first_bit_inv() argument
24 while (size & ~(BITS_PER_LONG - 1)) { in find_first_bit_inv()
28 size -= BITS_PER_LONG; in find_first_bit_inv()
30 if (!size) in find_first_bit_inv()
32 tmp = (*p) & (~0UL << (BITS_PER_LONG - size)); in find_first_bit_inv()
34 return result + size; /* Nope. */ in find_first_bit_inv()
40 unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size, in find_next_bit_inv() argument
47 if (offset >= size) in find_next_bit_inv()
48 return size; in find_next_bit_inv()
49 size -= result; in find_next_bit_inv()
[all …]
Duaccess.c35 unsigned long size) in copy_from_user_mvcos() argument
69 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_from_user_mvcos()
71 return size; in copy_from_user_mvcos()
75 unsigned long size) in copy_from_user_mvcp() argument
114 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_from_user_mvcp()
116 return size; in copy_from_user_mvcp()
128 unsigned long size) in copy_to_user_mvcos() argument
152 : "+a" (size), "+a" (ptr), "+a" (x), "+a" (tmp1), "=a" (tmp2) in copy_to_user_mvcos()
154 return size; in copy_to_user_mvcos()
158 unsigned long size) in copy_to_user_mvcs() argument
[all …]
/arch/sparc/lib/
Duser_fixup.c21 static unsigned long compute_size(unsigned long start, unsigned long size, unsigned long *offset) in compute_size() argument
24 unsigned long end = start + size; in compute_size()
30 size = end - fault_addr; in compute_size()
32 return size; in compute_size()
35 unsigned long copy_from_user_fixup(void *to, const void __user *from, unsigned long size) in copy_from_user_fixup() argument
39 size = compute_size((unsigned long) from, size, &offset); in copy_from_user_fixup()
40 if (likely(size)) in copy_from_user_fixup()
41 memset(to + offset, 0, size); in copy_from_user_fixup()
43 return size; in copy_from_user_fixup()
47 unsigned long copy_to_user_fixup(void __user *to, const void *from, unsigned long size) in copy_to_user_fixup() argument
[all …]
/arch/blackfin/mm/
Dmaccess.c12 static int validate_memory_access_address(unsigned long addr, int size) in validate_memory_access_address() argument
14 if (size < 0 || addr == 0) in validate_memory_access_address()
16 return bfin_mem_access_type(addr, size); in validate_memory_access_address()
19 long probe_kernel_read(void *dst, const void *src, size_t size) in probe_kernel_read() argument
24 mem_type = validate_memory_access_address(lsrc, size); in probe_kernel_read()
29 if (size == 2 && lsrc % 2 == 0) { in probe_kernel_read()
33 } else if (size == 4 && lsrc % 4 == 0) { in probe_kernel_read()
42 return __probe_kernel_read(dst, src, size); in probe_kernel_read()
45 if (dma_memcpy(dst, src, size)) in probe_kernel_read()
49 if (isram_memcpy(dst, src, size)) in probe_kernel_read()
[all …]
/arch/avr32/mm/
Ddma-coherent.c16 void dma_cache_sync(struct device *dev, void *vaddr, size_t size, int direction) in dma_cache_sync() argument
26 invalidate_dcache_region(vaddr, size); in dma_cache_sync()
29 clean_dcache_region(vaddr, size); in dma_cache_sync()
32 flush_dcache_region(vaddr, size); in dma_cache_sync()
40 static struct page *__dma_alloc(struct device *dev, size_t size, in __dma_alloc() argument
53 size = PAGE_ALIGN(size); in __dma_alloc()
54 order = get_order(size); in __dma_alloc()
70 invalidate_dcache_region(phys_to_virt(page_to_phys(page)), size); in __dma_alloc()
73 free = page + (size >> PAGE_SHIFT); in __dma_alloc()
87 static void __dma_free(struct device *dev, size_t size, in __dma_free() argument
[all …]
/arch/blackfin/kernel/
Dprocess.c231 int in_mem(unsigned long addr, unsigned long size, in in_mem() argument
234 return addr >= start && addr + size <= end; in in_mem()
237 int in_mem_const_off(unsigned long addr, unsigned long size, unsigned long off, in in_mem_const_off() argument
241 in_mem(addr, size, const_addr + off, const_addr + const_size); in in_mem_const_off()
244 int in_mem_const(unsigned long addr, unsigned long size, in in_mem_const() argument
247 return in_mem_const_off(addr, size, 0, const_addr, const_size); in in_mem_const()
268 int in_async(unsigned long addr, unsigned long size) in in_async() argument
273 if (addr + size <= ASYNC_BANK0_BASE + ASYNC_BANK0_SIZE) in in_async()
275 size -= ASYNC_BANK0_BASE + ASYNC_BANK0_SIZE - addr; in in_async()
281 if (addr + size <= ASYNC_BANK1_BASE + ASYNC_BANK1_SIZE) in in_async()
[all …]
/arch/powerpc/boot/
Dsimple_alloc.c24 unsigned long size; member
37 static void *simple_malloc(unsigned long size) in simple_malloc() argument
42 if (size == 0) in simple_malloc()
45 size = _ALIGN_UP(size, alloc_min); in simple_malloc()
49 if (size <= space_left) { in simple_malloc()
51 p->size = size; in simple_malloc()
53 next_base += size; in simple_malloc()
54 space_left -= size; in simple_malloc()
60 else if (!(p->flags & ENTRY_IN_USE) && (size <= p->size)) { in simple_malloc()
97 static void *simple_realloc(void *ptr, unsigned long size) in simple_realloc() argument
[all …]
/arch/parisc/include/asm/
Dfloppy.h54 #define fd_dma_mem_alloc(size) SW._dma_mem_alloc(size) argument
55 #define fd_dma_setup(addr, size, mode, io) SW._dma_setup(addr, size, mode, io) argument
166 static unsigned long dma_mem_alloc(unsigned long size) in dma_mem_alloc() argument
168 return __get_dma_pages(GFP_KERNEL, get_order(size)); in dma_mem_alloc()
172 static unsigned long vdma_mem_alloc(unsigned long size) in vdma_mem_alloc() argument
174 return (unsigned long) vmalloc(size); in vdma_mem_alloc()
178 #define nodma_mem_alloc(size) vdma_mem_alloc(size) argument
180 static void _fd_dma_mem_free(unsigned long addr, unsigned long size) in _fd_dma_mem_free() argument
185 free_pages(addr, get_order(size)); in _fd_dma_mem_free()
188 #define fd_dma_mem_free(addr, size) _fd_dma_mem_free(addr, size) argument
[all …]
Ddma-mapping.h11 void *(*alloc_consistent)(struct device *dev, size_t size, dma_addr_t *iova, gfp_t flag);
12 void *(*alloc_noncoherent)(struct device *dev, size_t size, dma_addr_t *iova, gfp_t flag);
13 void (*free_consistent)(struct device *dev, size_t size, void *vaddr, dma_addr_t iova);
14 …dma_addr_t (*map_single)(struct device *dev, void *addr, size_t size, enum dma_data_direction dire…
15 …void (*unmap_single)(struct device *dev, dma_addr_t iova, size_t size, enum dma_data_direction dir…
18 …or_cpu)(struct device *dev, dma_addr_t iova, unsigned long offset, size_t size, enum dma_data_dire…
19 …device)(struct device *dev, dma_addr_t iova, unsigned long offset, size_t size, enum dma_data_dire…
55 dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle, in dma_alloc_coherent() argument
58 return hppa_dma_ops->alloc_consistent(dev, size, dma_handle, flag); in dma_alloc_coherent()
62 dma_alloc_noncoherent(struct device *dev, size_t size, dma_addr_t *dma_handle, in dma_alloc_noncoherent() argument
[all …]
/arch/openrisc/include/asm/
Duaccess.h60 #define __range_ok(addr, size) (size <= get_fs() && addr <= (get_fs()-size)) argument
65 #define access_ok(type, addr, size) \ argument
66 __range_ok((unsigned long)addr, (unsigned long)size)
119 #define __put_user_nocheck(x, ptr, size) \ argument
122 __put_user_size((x), (ptr), (size), __pu_err); \
126 #define __put_user_check(x, ptr, size) \ argument
130 if (access_ok(VERIFY_WRITE, __pu_addr, size)) \
131 __put_user_size((x), __pu_addr, (size), __pu_err); \
135 #define __put_user_size(x, ptr, size, retval) \ argument
138 switch (size) { \
[all …]
/arch/sparc/include/asm/
Duaccess_64.h52 static inline int __access_ok(const void __user * addr, unsigned long size) in __access_ok() argument
57 static inline int access_ok(int type, const void __user * addr, unsigned long size) in access_ok() argument
107 #define __put_user_nocheck(data,addr,size) ({ \ argument
109 switch (size) { \
117 #define __put_user_asm(x,size,addr,ret) \ argument
120 "1:\t" "st"#size "a %1, [%2] %%asi\n\t" \
139 #define __get_user_nocheck(data,addr,size,type) ({ \ argument
142 switch (size) { \
150 #define __get_user_nocheck_ret(data,addr,size,type,retval) ({ \ argument
152 switch (size) { \
[all …]
/arch/powerpc/lib/
Drheap.c139 blk->size = 0; in get_slot()
157 int size; in attach_free_block() local
162 size = blkn->size; in attach_free_block()
164 e = s + size; in attach_free_block()
176 be = bs + blk->size; in attach_free_block()
193 if (before && s != (before->start + before->size)) in attach_free_block()
215 before->size += size; in attach_free_block()
221 after->start -= size; in attach_free_block()
222 after->size += size; in attach_free_block()
227 before->size += size + after->size; in attach_free_block()
[all …]
/arch/unicore32/mm/
Dioremap.c66 static void unmap_area_sections(unsigned long virt, unsigned long size) in unmap_area_sections() argument
68 unsigned long addr = virt, end = virt + (size & ~(SZ_4M - 1)); in unmap_area_sections()
103 size_t size, const struct mem_type *type) in remap_area_sections() argument
105 unsigned long addr = virt, end = virt + size; in remap_area_sections()
112 unmap_area_sections(virt, size); in remap_area_sections()
130 unsigned long offset, size_t size, unsigned int mtype, void *caller) in __uc32_ioremap_pfn_caller() argument
161 size = PAGE_ALIGN(offset + size); in __uc32_ioremap_pfn_caller()
163 area = get_vm_area_caller(size, VM_IOREMAP, caller); in __uc32_ioremap_pfn_caller()
168 if (!((__pfn_to_phys(pfn) | size | addr) & ~PMD_MASK)) { in __uc32_ioremap_pfn_caller()
170 err = remap_area_sections(addr, pfn, size, type); in __uc32_ioremap_pfn_caller()
[all …]
/arch/x86/kernel/cpu/mtrr/
Dif.c38 mtrr_file_add(unsigned long base, unsigned long size, in mtrr_file_add() argument
52 if ((base & (PAGE_SIZE - 1)) || (size & (PAGE_SIZE - 1))) in mtrr_file_add()
55 size >>= PAGE_SHIFT; in mtrr_file_add()
57 reg = mtrr_add_page(base, size, type, true); in mtrr_file_add()
64 mtrr_file_del(unsigned long base, unsigned long size, in mtrr_file_del() argument
71 if ((base & (PAGE_SIZE - 1)) || (size & (PAGE_SIZE - 1))) in mtrr_file_del()
74 size >>= PAGE_SHIFT; in mtrr_file_del()
76 reg = mtrr_del_page(-1, base, size); in mtrr_file_del()
98 unsigned long long base, size; in mtrr_write() local
143 size = simple_strtoull(ptr + 5, &ptr, 0); in mtrr_write()
[all …]
Damd.c10 unsigned long *size, mtrr_type *type) in amd_get_mtrr() argument
26 *size = 0; in amd_get_mtrr()
45 *size = (low + 4) << (15 - PAGE_SHIFT); in amd_get_mtrr()
59 amd_set_mtrr(unsigned int reg, unsigned long base, unsigned long size, mtrr_type type) in amd_set_mtrr() argument
70 if (size == 0) { in amd_set_mtrr()
82 regs[reg] = (-size >> (15 - PAGE_SHIFT) & 0x0001FFFC) in amd_set_mtrr()
95 amd_validate_add_page(unsigned long base, unsigned long size, unsigned int type) in amd_validate_add_page() argument
105 if (type > MTRR_TYPE_WRCOMB || size < (1 << (17 - PAGE_SHIFT)) in amd_validate_add_page()
106 || (size & ~(size - 1)) - size || (base & (size - 1))) in amd_validate_add_page()
/arch/mips/mm/
Dc-r3k.c30 unsigned long flags, status, dummy, size; in r3k_cache_size() local
45 size = 0; in r3k_cache_size()
47 for (size = 128; size <= 0x40000; size <<= 1) in r3k_cache_size()
48 *(p + size) = 0; in r3k_cache_size()
50 for (size = 128; in r3k_cache_size()
51 (size <= 0x40000) && (*(p + size) == 0); in r3k_cache_size()
52 size <<= 1) in r3k_cache_size()
54 if (size > 0x40000) in r3k_cache_size()
55 size = 0; in r3k_cache_size()
60 return size * sizeof(*p); in r3k_cache_size()
[all …]
/arch/blackfin/include/asm/
Ddma-mapping.h13 void *dma_alloc_coherent(struct device *dev, size_t size,
15 void dma_free_coherent(struct device *dev, size_t size, void *vaddr,
43 __dma_sync(dma_addr_t addr, size_t size, enum dma_data_direction dir);
45 __dma_sync_inline(dma_addr_t addr, size_t size, enum dma_data_direction dir) in __dma_sync_inline() argument
51 flush_dcache_range(addr, addr + size); in __dma_sync_inline()
56 invalidate_dcache_range(addr, addr + size); in __dma_sync_inline()
61 _dma_sync(dma_addr_t addr, size_t size, enum dma_data_direction dir) in _dma_sync() argument
64 __dma_sync_inline(addr, size, dir); in _dma_sync()
66 __dma_sync(addr, size, dir); in _dma_sync()
70 dma_map_single(struct device *dev, void *ptr, size_t size, in dma_map_single() argument
[all …]
/arch/s390/mm/
Dmem_detect.c22 static inline void memblock_physmem_add(phys_addr_t start, phys_addr_t size) in memblock_physmem_add() argument
24 memblock_add_range(&memblock.memory, start, size, 0, 0); in memblock_physmem_add()
25 memblock_add_range(&memblock.physmem, start, size, 0, 0); in memblock_physmem_add()
31 unsigned long addr, size; in detect_memory_memblock() local
48 size = 0; in detect_memory_memblock()
51 size += rzm; in detect_memory_memblock()
52 if (max_physmem_end && addr + size >= max_physmem_end) in detect_memory_memblock()
54 } while (type == tprot(addr + size)); in detect_memory_memblock()
56 if (max_physmem_end && (addr + size > max_physmem_end)) in detect_memory_memblock()
57 size = max_physmem_end - addr; in detect_memory_memblock()
[all …]
/arch/x86/include/asm/
Dfloppy.h43 #define fd_dma_mem_alloc(size) SW._dma_mem_alloc(size) argument
44 #define fd_dma_setup(addr, size, mode, io) SW._dma_setup(addr, size, mode, io) argument
154 static unsigned long dma_mem_alloc(unsigned long size) in dma_mem_alloc() argument
156 return __get_dma_pages(GFP_KERNEL|__GFP_NORETRY, get_order(size)); in dma_mem_alloc()
160 static unsigned long vdma_mem_alloc(unsigned long size) in vdma_mem_alloc() argument
162 return (unsigned long)vmalloc(size); in vdma_mem_alloc()
166 #define nodma_mem_alloc(size) vdma_mem_alloc(size) argument
168 static void _fd_dma_mem_free(unsigned long addr, unsigned long size) in _fd_dma_mem_free() argument
173 free_pages(addr, get_order(size)); in _fd_dma_mem_free()
176 #define fd_dma_mem_free(addr, size) _fd_dma_mem_free(addr, size) argument
[all …]
Duaccess_64.h52 int __copy_from_user_nocheck(void *dst, const void __user *src, unsigned size) in __copy_from_user_nocheck() argument
56 check_object_size(dst, size, false); in __copy_from_user_nocheck()
57 if (!__builtin_constant_p(size)) in __copy_from_user_nocheck()
58 return copy_user_generic(dst, (__force void *)src, size); in __copy_from_user_nocheck()
59 switch (size) { in __copy_from_user_nocheck()
105 return copy_user_generic(dst, (__force void *)src, size); in __copy_from_user_nocheck()
110 int __copy_from_user(void *dst, const void __user *src, unsigned size) in __copy_from_user() argument
113 return __copy_from_user_nocheck(dst, src, size); in __copy_from_user()
117 int __copy_to_user_nocheck(void __user *dst, const void *src, unsigned size) in __copy_to_user_nocheck() argument
121 check_object_size(src, size, true); in __copy_to_user_nocheck()
[all …]
/arch/arm/mm/
Ddma-mapping.c76 unsigned long offset, size_t size, enum dma_data_direction dir, in arm_dma_map_page() argument
80 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_dma_map_page()
85 unsigned long offset, size_t size, enum dma_data_direction dir, in arm_coherent_dma_map_page() argument
106 size_t size, enum dma_data_direction dir, in arm_dma_unmap_page() argument
111 handle & ~PAGE_MASK, size, dir); in arm_dma_unmap_page()
115 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_dma_sync_single_for_cpu() argument
119 __dma_page_dev_to_cpu(page, offset, size, dir); in arm_dma_sync_single_for_cpu()
123 dma_addr_t handle, size_t size, enum dma_data_direction dir) in arm_dma_sync_single_for_device() argument
127 __dma_page_cpu_to_dev(page, offset, size, dir); in arm_dma_sync_single_for_device()
147 static void *arm_coherent_dma_alloc(struct device *dev, size_t size,
[all …]
/arch/um/kernel/
Dinitrd.c15 static int load_initrd(char *filename, void *buf, int size);
20 long long size; in read_initrd() local
26 err = os_file_size(initrd, &size); in read_initrd()
34 if (size == 0) { in read_initrd()
39 area = alloc_bootmem(size); in read_initrd()
43 if (load_initrd(initrd, area, size) == -1) in read_initrd()
47 initrd_end = initrd_start + size; in read_initrd()
65 static int load_initrd(char *filename, void *buf, int size) in load_initrd() argument
75 n = os_read_file(fd, buf, size); in load_initrd()
76 if (n != size) { in load_initrd()
[all …]
/arch/parisc/kernel/
Dpci-dma.c84 unsigned long size, unsigned long *paddr_ptr) in map_pte_uncached() argument
90 end = vaddr + size; in map_pte_uncached()
111 unsigned long size, unsigned long *paddr_ptr) in map_pmd_uncached() argument
117 end = vaddr + size; in map_pmd_uncached()
133 static inline int map_uncached_pages(unsigned long vaddr, unsigned long size, in map_uncached_pages() argument
137 unsigned long end = vaddr + size; in map_uncached_pages()
155 unsigned long size) in unmap_uncached_pte() argument
170 end = vaddr + size; in unmap_uncached_pte()
191 unsigned long size) in unmap_uncached_pmd() argument
206 end = vaddr + size; in unmap_uncached_pmd()
[all …]
/arch/arc/mm/
Ddma.c29 void *dma_alloc_noncoherent(struct device *dev, size_t size, in dma_alloc_noncoherent() argument
35 paddr = alloc_pages_exact(size, gfp); in dma_alloc_noncoherent()
46 void dma_free_noncoherent(struct device *dev, size_t size, void *vaddr, in dma_free_noncoherent() argument
50 size); in dma_free_noncoherent()
54 void *dma_alloc_coherent(struct device *dev, size_t size, in dma_alloc_coherent() argument
60 paddr = alloc_pages_exact(size, gfp); in dma_alloc_coherent()
65 kvaddr = ioremap_nocache((unsigned long)paddr, size); in dma_alloc_coherent()
67 memset(kvaddr, 0, size); in dma_alloc_coherent()
76 void dma_free_coherent(struct device *dev, size_t size, void *kvaddr, in dma_free_coherent() argument
82 size); in dma_free_coherent()
[all …]
/arch/sh/mm/
Dpmb.c40 unsigned long size; member
55 unsigned long size; member
58 { .size = SZ_512M, .flag = PMB_SZ_512M, },
59 { .size = SZ_128M, .flag = PMB_SZ_128M, },
60 { .size = SZ_64M, .flag = PMB_SZ_64M, },
61 { .size = SZ_16M, .flag = PMB_SZ_16M, },
132 return (b->vpn == (a->vpn + a->size)) && in pmb_can_merge()
133 (b->ppn == (a->ppn + a->size)) && in pmb_can_merge()
138 unsigned long size) in pmb_mapping_exists() argument
156 if ((vaddr < pmbe->vpn) || (vaddr >= (pmbe->vpn + pmbe->size))) in pmb_mapping_exists()
[all …]

12345678910>>...97