Home
last modified time | relevance | path

Searched refs:mm (Results 1 – 25 of 52) sorted by relevance

123

/drivers/gpu/drm/
Ddrm_mm.c47 unsigned long drm_mm_tail_space(struct drm_mm *mm) in drm_mm_tail_space() argument
52 tail_node = mm->ml_entry.prev; in drm_mm_tail_space()
60 int drm_mm_remove_space_from_tail(struct drm_mm *mm, unsigned long size) in drm_mm_remove_space_from_tail() argument
65 tail_node = mm->ml_entry.prev; in drm_mm_remove_space_from_tail()
78 static int drm_mm_create_tail_node(struct drm_mm *mm, in drm_mm_create_tail_node() argument
92 child->mm = mm; in drm_mm_create_tail_node()
94 list_add_tail(&child->ml_entry, &mm->ml_entry); in drm_mm_create_tail_node()
95 list_add_tail(&child->fl_entry, &mm->fl_entry); in drm_mm_create_tail_node()
101 int drm_mm_add_space_to_tail(struct drm_mm *mm, unsigned long size) in drm_mm_add_space_to_tail() argument
106 tail_node = mm->ml_entry.prev; in drm_mm_add_space_to_tail()
[all …]
Ddrm_sman.c51 if (sman->mm) in drm_sman_takedown()
52 drm_free(sman->mm, sman->num_managers * sizeof(*sman->mm), in drm_sman_takedown()
64 sman->mm = (struct drm_sman_mm *) drm_calloc(num_managers, sizeof(*sman->mm), in drm_sman_init()
66 if (!sman->mm) { in drm_sman_init()
81 drm_free(sman->mm, num_managers * sizeof(*sman->mm), DRM_MEM_MM); in drm_sman_init()
91 struct drm_mm *mm = (struct drm_mm *) private; in drm_sman_mm_allocate() local
94 tmp = drm_mm_search_free(mm, size, alignment, 1); in drm_sman_mm_allocate()
111 struct drm_mm *mm = (struct drm_mm *) private; in drm_sman_mm_destroy() local
112 drm_mm_takedown(mm); in drm_sman_mm_destroy()
113 drm_free(mm, sizeof(*mm), DRM_MEM_MM); in drm_sman_mm_destroy()
[all …]
Ddrm_gem.c81 struct drm_gem_mm *mm; in drm_gem_init() local
92 mm = drm_calloc(1, sizeof(struct drm_gem_mm), DRM_MEM_MM); in drm_gem_init()
93 if (!mm) { in drm_gem_init()
98 dev->mm_private = mm; in drm_gem_init()
100 if (drm_ht_create(&mm->offset_hash, 19)) { in drm_gem_init()
101 drm_free(mm, sizeof(struct drm_gem_mm), DRM_MEM_MM); in drm_gem_init()
105 if (drm_mm_init(&mm->offset_manager, DRM_FILE_PAGE_OFFSET_START, in drm_gem_init()
107 drm_ht_remove(&mm->offset_hash); in drm_gem_init()
108 drm_free(mm, sizeof(struct drm_gem_mm), DRM_MEM_MM); in drm_gem_init()
118 struct drm_gem_mm *mm = dev->mm_private; in drm_gem_destroy() local
[all …]
/drivers/oprofile/
Dbuffer_sync.c87 struct mm_struct *mm = current->mm; in munmap_notify() local
90 down_read(&mm->mmap_sem); in munmap_notify()
92 mpnt = find_vma(mm, addr); in munmap_notify()
94 up_read(&mm->mmap_sem); in munmap_notify()
102 up_read(&mm->mmap_sem); in munmap_notify()
216 static unsigned long get_exec_dcookie(struct mm_struct *mm) in get_exec_dcookie() argument
221 if (!mm) in get_exec_dcookie()
224 for (vma = mm->mmap; vma; vma = vma->vm_next) { in get_exec_dcookie()
244 lookup_dcookie(struct mm_struct *mm, unsigned long addr, off_t *offset) in lookup_dcookie() argument
249 for (vma = find_vma(mm, addr); vma; vma = vma->vm_next) { in lookup_dcookie()
[all …]
/drivers/infiniband/hw/ipath/
Dipath_user_pages.c73 ret = get_user_pages(current, current->mm, in __get_user_pages()
81 current->mm->locked_vm += num_pages; in __get_user_pages()
165 down_write(&current->mm->mmap_sem); in ipath_get_user_pages()
169 up_write(&current->mm->mmap_sem); in ipath_get_user_pages()
176 down_write(&current->mm->mmap_sem); in ipath_release_user_pages()
180 current->mm->locked_vm -= num_pages; in ipath_release_user_pages()
182 up_write(&current->mm->mmap_sem); in ipath_release_user_pages()
187 struct mm_struct *mm; member
196 down_write(&work->mm->mmap_sem); in user_pages_account()
197 work->mm->locked_vm -= work->num_pages; in user_pages_account()
[all …]
/drivers/infiniband/core/
Dumem.c136 down_write(&current->mm->mmap_sem); in ib_umem_get()
138 locked = npages + current->mm->locked_vm; in ib_umem_get()
150 ret = get_user_pages(current, current->mm, cur_base, in ib_umem_get()
208 current->mm->locked_vm = locked; in ib_umem_get()
210 up_write(&current->mm->mmap_sem); in ib_umem_get()
223 down_write(&umem->mm->mmap_sem); in ib_umem_account()
224 umem->mm->locked_vm -= umem->diff; in ib_umem_account()
225 up_write(&umem->mm->mmap_sem); in ib_umem_account()
226 mmput(umem->mm); in ib_umem_account()
237 struct mm_struct *mm; in ib_umem_release() local
[all …]
/drivers/gpu/drm/i915/
Di915_gem.c69 drm_mm_init(&dev_priv->mm.gtt_space, start, in i915_gem_do_init()
291 ret = fast_user_write (dev_priv->mm.gtt_mapping, page_base, in i915_gem_gtt_pwrite()
299 ret = slow_user_write (dev_priv->mm.gtt_mapping, in i915_gem_gtt_pwrite()
525 down_write(&current->mm->mmap_sem); in i915_gem_mmap_ioctl()
529 up_write(&current->mm->mmap_sem); in i915_gem_mmap_ioctl()
580 list_add(&obj_priv->list, &dev_priv->mm.inactive_list); in i915_gem_fault()
627 struct drm_gem_mm *mm = dev->mm_private; in i915_gem_create_mmap_offset() local
646 list->file_offset_node = drm_mm_search_free(&mm->offset_manager, in i915_gem_create_mmap_offset()
662 if (drm_ht_insert_item(&mm->offset_hash, &list->hash)) { in i915_gem_create_mmap_offset()
686 struct drm_gem_mm *mm = dev->mm_private; in i915_gem_free_mmap_offset() local
[all …]
Di915_gem_tiling.c172 dev_priv->mm.bit_6_swizzle_x = swizzle_x; in i915_gem_detect_bit_6_swizzle()
173 dev_priv->mm.bit_6_swizzle_y = swizzle_y; in i915_gem_detect_bit_6_swizzle()
273 args->swizzle_mode = dev_priv->mm.bit_6_swizzle_x; in i915_gem_set_tiling()
275 args->swizzle_mode = dev_priv->mm.bit_6_swizzle_y; in i915_gem_set_tiling()
330 args->swizzle_mode = dev_priv->mm.bit_6_swizzle_x; in i915_gem_get_tiling()
333 args->swizzle_mode = dev_priv->mm.bit_6_swizzle_y; in i915_gem_get_tiling()
Di915_gem_proc.c51 list_for_each_entry(obj_priv, &dev_priv->mm.active_list, in i915_gem_active_info()
90 list_for_each_entry(obj_priv, &dev_priv->mm.flushing_list, in i915_gem_flushing_info()
128 list_for_each_entry(obj_priv, &dev_priv->mm.inactive_list, in i915_gem_inactive_info()
166 list_for_each_entry(gem_request, &dev_priv->mm.request_list, in i915_gem_request_info()
201 dev_priv->mm.waiting_gem_seqno); in i915_gem_seqno_info()
202 DRM_PROC_PRINT("IRQ sequence: %d\n", dev_priv->mm.irq_gem_seqno); in i915_gem_seqno_info()
244 dev_priv->mm.waiting_gem_seqno); in i915_interrupt_info()
246 dev_priv->mm.irq_gem_seqno); in i915_interrupt_info()
Di915_gem_debug.c41 list_for_each_entry(obj_priv, &dev_priv->mm.inactive_list, list) { in i915_verify_inactive()
108 list_for_each_entry(obj_priv, &dev_priv->mm.active_list, in i915_dump_lru()
116 list_for_each_entry(obj_priv, &dev_priv->mm.flushing_list, in i915_dump_lru()
124 list_for_each_entry(obj_priv, &dev_priv->mm.inactive_list, list) { in i915_dump_lru()
Di915_dma.c1090 dev_priv->mm.gtt_mapping = in i915_driver_load()
1093 if (dev_priv->mm.gtt_mapping == NULL) { in i915_driver_load()
1103 dev_priv->mm.gtt_mtrr = mtrr_add(dev->agp->base, in i915_driver_load()
1107 if (dev_priv->mm.gtt_mtrr < 0) { in i915_driver_load()
1170 io_mapping_free(dev_priv->mm.gtt_mapping); in i915_driver_load()
1182 io_mapping_free(dev_priv->mm.gtt_mapping); in i915_driver_unload()
1183 if (dev_priv->mm.gtt_mtrr >= 0) { in i915_driver_unload()
1184 mtrr_del(dev_priv->mm.gtt_mtrr, dev->agp->base, in i915_driver_unload()
1186 dev_priv->mm.gtt_mtrr = -1; in i915_driver_unload()
1232 i915_file_priv->mm.last_gem_seqno = 0; in i915_driver_open()
[all …]
/drivers/infiniband/hw/cxgb3/
Diwch_provider.h208 struct iwch_mm_entry *mm; in remove_mmap() local
213 mm = list_entry(pos, struct iwch_mm_entry, entry); in remove_mmap()
214 if (mm->key == key && mm->len == len) { in remove_mmap()
215 list_del_init(&mm->entry); in remove_mmap()
218 key, (unsigned long long) mm->addr, mm->len); in remove_mmap()
219 return mm; in remove_mmap()
227 struct iwch_mm_entry *mm) in insert_mmap() argument
231 mm->key, (unsigned long long) mm->addr, mm->len); in insert_mmap()
232 list_add_tail(&mm->entry, &ucontext->mmaps); in insert_mmap()
Diwch_provider.c103 struct iwch_mm_entry *mm, *tmp; in iwch_dealloc_ucontext() local
106 list_for_each_entry_safe(mm, tmp, &ucontext->mmaps, entry) in iwch_dealloc_ucontext()
107 kfree(mm); in iwch_dealloc_ucontext()
200 struct iwch_mm_entry *mm; in iwch_create_cq() local
202 mm = kmalloc(sizeof *mm, GFP_KERNEL); in iwch_create_cq()
203 if (!mm) { in iwch_create_cq()
214 kfree(mm); in iwch_create_cq()
218 mm->key = uresp.key; in iwch_create_cq()
219 mm->addr = virt_to_phys(chp->cq.queue); in iwch_create_cq()
220 mm->len = PAGE_ALIGN((1UL << uresp.size_log2) * in iwch_create_cq()
[all …]
/drivers/misc/sgi-gru/
Dgrutlbpurge.c224 struct mm_struct *mm, in gru_invalidate_range_start() argument
238 struct mm_struct *mm, unsigned long start, in gru_invalidate_range_end() argument
251 static void gru_invalidate_page(struct mmu_notifier *mn, struct mm_struct *mm, in gru_invalidate_page() argument
262 static void gru_release(struct mmu_notifier *mn, struct mm_struct *mm) in gru_release() argument
280 static struct mmu_notifier *mmu_find_ops(struct mm_struct *mm, in mmu_find_ops() argument
286 if (mm->mmu_notifier_mm) { in mmu_find_ops()
288 hlist_for_each_entry_rcu(mn, n, &mm->mmu_notifier_mm->list, in mmu_find_ops()
304 mn = mmu_find_ops(current->mm, &gru_mmuops); in gru_register_mmu_notifier()
315 __mmu_notifier_register(&gms->ms_notifier, current->mm); in gru_register_mmu_notifier()
329 mmu_notifier_unregister(&gms->ms_notifier, current->mm); in gru_drop_mmu_notifier()
Dgrufault.c57 vma = find_vma(current->mm, vaddr); in gru_find_vma()
73 struct mm_struct *mm = current->mm; in gru_find_lock_gts() local
77 down_read(&mm->mmap_sem); in gru_find_lock_gts()
84 up_read(&mm->mmap_sem); in gru_find_lock_gts()
90 struct mm_struct *mm = current->mm; in gru_alloc_locked_gts() local
94 down_write(&mm->mmap_sem); in gru_alloc_locked_gts()
100 downgrade_write(&mm->mmap_sem); in gru_alloc_locked_gts()
102 up_write(&mm->mmap_sem); in gru_alloc_locked_gts()
114 up_read(&current->mm->mmap_sem); in gru_unlock_gts()
209 (current, current->mm, vaddr, 1, write, 0, &page, NULL) <= 0) in non_atomic_pte_lookup()
[all …]
/drivers/misc/
Dpmem.c867 struct mm_struct *mm) in pmem_unlock_data_and_mm() argument
870 if (mm != NULL) { in pmem_unlock_data_and_mm()
871 up_write(&mm->mmap_sem); in pmem_unlock_data_and_mm()
872 mmput(mm); in pmem_unlock_data_and_mm()
880 struct mm_struct *mm = NULL; in pmem_lock_data_and_mm() local
885 mm = get_task_mm(data->task); in pmem_lock_data_and_mm()
886 if (!mm) { in pmem_lock_data_and_mm()
896 if (mm) in pmem_lock_data_and_mm()
897 down_write(&mm->mmap_sem); in pmem_lock_data_and_mm()
903 if (PMEM_IS_SUBMAP(data) && !mm) { in pmem_lock_data_and_mm()
[all …]
/drivers/pcmcia/
Drsrc_nonstatic.c432 struct resource_map *m, mm; in validate_mem() local
446 for (m = s_data->mem_db.next; m != &s_data->mem_db; m = mm.next) { in validate_mem()
447 mm = *m; in validate_mem()
449 if (mm.base >= 0x100000) in validate_mem()
451 if ((mm.base | mm.num) & 0xffff) { in validate_mem()
452 ok += do_mem_probe(mm.base, mm.num, s); in validate_mem()
458 if ((b >= mm.base) && (b+0x10000 <= mm.base+mm.num)) { in validate_mem()
477 struct resource_map *m, mm; in validate_mem() local
481 for (m = s_data->mem_db.next; m != &s_data->mem_db; m = mm.next) { in validate_mem()
482 mm = *m; in validate_mem()
[all …]
/drivers/misc/qemutrace/
Dqemu_trace.c284 struct mm_struct *mm = get_task_mm(tsk); in qemu_trace_pid_exec() local
285 if (mm == NULL) in qemu_trace_pid_exec()
287 down_read(&mm->mmap_sem); in qemu_trace_pid_exec()
289 struct vm_area_struct *vma = mm->mmap; in qemu_trace_pid_exec()
306 up_read(&mm->mmap_sem); in qemu_trace_pid_exec()
307 mmput(mm); in qemu_trace_pid_exec()
/drivers/dma/
Diovlock.c97 down_read(&current->mm->mmap_sem); in dma_pin_iovec_pages()
100 current->mm, in dma_pin_iovec_pages()
107 up_read(&current->mm->mmap_sem); in dma_pin_iovec_pages()
/drivers/media/video/ivtv/
Divtv-udma.c127 down_read(&current->mm->mmap_sem); in ivtv_udma_setup()
128 err = get_user_pages(current, current->mm, in ivtv_udma_setup()
130 up_read(&current->mm->mmap_sem); in ivtv_udma_setup()
/drivers/staging/android/
Dlowmemorykiller.c87 if (p->oomkilladj < min_adj || !p->mm) in lowmem_shrink()
89 tasksize = get_mm_rss(p->mm); in lowmem_shrink()
/drivers/gpu/drm/via/
Dvia_mm.c146 (item->mm-> in via_mem_alloc()
147 offset(item->mm, item->mm_info) << VIA_MM_ALIGN_SHIFT); in via_mem_alloc()
/drivers/media/video/
Dv4l1-compat.c978 struct video_mmap *mm, in v4l1_compat_capture_frame() argument
1000 if (mm->width != fmt->fmt.pix.width || in v4l1_compat_capture_frame()
1001 mm->height != fmt->fmt.pix.height || in v4l1_compat_capture_frame()
1002 palette_to_pixelformat(mm->format) != in v4l1_compat_capture_frame()
1005 fmt->fmt.pix.width = mm->width; in v4l1_compat_capture_frame()
1006 fmt->fmt.pix.height = mm->height; in v4l1_compat_capture_frame()
1008 palette_to_pixelformat(mm->format); in v4l1_compat_capture_frame()
1017 buf.index = mm->frame; in v4l1_compat_capture_frame()
/drivers/lguest/
Dlguest_user.c139 cpu->mm = get_task_mm(cpu->tsk); in lg_cpu_start()
304 mmput(lg->cpus[i].mm); in close()
/drivers/gpu/drm/sis/
Dsis_mm.c149 (item->mm-> in sis_drm_alloc()
150 offset(item->mm, item->mm_info) << SIS_MM_ALIGN_SHIFT); in sis_drm_alloc()

123