Home
last modified time | relevance | path

Searched refs:mm (Results 1 – 16 of 16) sorted by relevance

/fs/proc/
Dtask_nommu.c18 void task_mem(struct seq_file *m, struct mm_struct *mm) in task_mem() argument
25 down_read(&mm->mmap_sem); in task_mem()
26 for (p = rb_first(&mm->mm_rb); p; p = rb_next(p)) { in task_mem()
39 if (atomic_read(&mm->mm_count) > 1 || in task_mem()
49 if (atomic_read(&mm->mm_count) > 1) in task_mem()
50 sbytes += kobjsize(mm); in task_mem()
52 bytes += kobjsize(mm); in task_mem()
77 up_read(&mm->mmap_sem); in task_mem()
80 unsigned long task_vsize(struct mm_struct *mm) in task_vsize() argument
86 down_read(&mm->mmap_sem); in task_vsize()
[all …]
Dtask_mmu.c24 void task_mem(struct seq_file *m, struct mm_struct *mm) in task_mem() argument
29 anon = get_mm_counter(mm, MM_ANONPAGES); in task_mem()
30 file = get_mm_counter(mm, MM_FILEPAGES); in task_mem()
31 shmem = get_mm_counter(mm, MM_SHMEMPAGES); in task_mem()
40 hiwater_vm = total_vm = mm->total_vm; in task_mem()
41 if (hiwater_vm < mm->hiwater_vm) in task_mem()
42 hiwater_vm = mm->hiwater_vm; in task_mem()
44 if (hiwater_rss < mm->hiwater_rss) in task_mem()
45 hiwater_rss = mm->hiwater_rss; in task_mem()
47 text = (PAGE_ALIGN(mm->end_code) - (mm->start_code & PAGE_MASK)) >> 10; in task_mem()
[all …]
Darray.c371 struct mm_struct *mm = get_task_mm(task); in proc_pid_status() local
376 if (mm) { in proc_pid_status()
377 task_mem(m, mm); in proc_pid_status()
378 mmput(mm); in proc_pid_status()
400 struct mm_struct *mm; in do_task_stat() local
413 mm = get_task_mm(task); in do_task_stat()
414 if (mm) { in do_task_stat()
415 vsize = task_vsize(mm); in do_task_stat()
521 seq_put_decimal_ull(m, " ", mm ? get_mm_rss(mm) : 0); in do_task_stat()
523 seq_put_decimal_ull(m, " ", mm ? (permitted ? mm->start_code : 1) : 0); in do_task_stat()
[all …]
Dbase.c204 struct mm_struct *mm; in proc_pid_cmdline_read() local
218 mm = get_task_mm(tsk); in proc_pid_cmdline_read()
220 if (!mm) in proc_pid_cmdline_read()
223 if (!mm->env_end) { in proc_pid_cmdline_read()
234 down_read(&mm->mmap_sem); in proc_pid_cmdline_read()
235 arg_start = mm->arg_start; in proc_pid_cmdline_read()
236 arg_end = mm->arg_end; in proc_pid_cmdline_read()
237 env_start = mm->env_start; in proc_pid_cmdline_read()
238 env_end = mm->env_end; in proc_pid_cmdline_read()
239 up_read(&mm->mmap_sem); in proc_pid_cmdline_read()
[all …]
Dinternal.h103 struct mm_struct *mm; in task_dumpable() local
106 mm = task->mm; in task_dumpable()
107 if (mm) in task_dumpable()
108 dumpable = get_dumpable(mm); in task_dumpable()
291 struct mm_struct *mm; member
Dinode.c294 get_area = current->mm->get_unmapped_area; in proc_reg_get_unmapped_area()
/fs/
Duserfaultfd.c59 struct mm_struct *mm; member
147 mmdrop(ctx->mm); in userfaultfd_ctx_put()
203 struct mm_struct *mm = ctx->mm; in userfaultfd_must_wait() local
210 VM_BUG_ON(!rwsem_is_locked(&mm->mmap_sem)); in userfaultfd_must_wait()
212 pgd = pgd_offset(mm, address); in userfaultfd_must_wait()
269 struct mm_struct *mm = fe->vma->vm_mm; in handle_userfault() local
276 BUG_ON(!rwsem_is_locked(&mm->mmap_sem)); in handle_userfault()
283 BUG_ON(ctx->mm != mm); in handle_userfault()
367 up_read(&mm->mmap_sem); in handle_userfault()
421 down_read(&mm->mmap_sem); in handle_userfault()
[all …]
Dbinfmt_elf_fdpic.c359 current->mm->start_code = 0; in load_elf_fdpic_binary()
360 current->mm->end_code = 0; in load_elf_fdpic_binary()
361 current->mm->start_stack = 0; in load_elf_fdpic_binary()
362 current->mm->start_data = 0; in load_elf_fdpic_binary()
363 current->mm->end_data = 0; in load_elf_fdpic_binary()
364 current->mm->context.exec_fdpic_loadmap = 0; in load_elf_fdpic_binary()
365 current->mm->context.interp_fdpic_loadmap = 0; in load_elf_fdpic_binary()
370 &current->mm->start_stack, in load_elf_fdpic_binary()
371 &current->mm->start_brk); in load_elf_fdpic_binary()
373 retval = setup_arg_pages(bprm, current->mm->start_stack, in load_elf_fdpic_binary()
[all …]
Dexec.c179 struct mm_struct *mm = current->mm; in acct_arg_size() local
182 if (!mm || !diff) in acct_arg_size()
186 add_mm_counter(mm, MM_ANONPAGES, diff); in acct_arg_size()
211 ret = get_user_pages_remote(current, bprm->mm, pos, 1, gup_flags, in get_arg_page()
286 struct mm_struct *mm = bprm->mm; in __bprm_mm_init() local
292 if (down_write_killable(&mm->mmap_sem)) { in __bprm_mm_init()
296 vma->vm_mm = mm; in __bprm_mm_init()
311 err = insert_vm_struct(mm, vma); in __bprm_mm_init()
315 mm->stack_vm = mm->total_vm = 1; in __bprm_mm_init()
316 arch_bprm_mm_init(mm, vma); in __bprm_mm_init()
[all …]
Dbinfmt_flat.c117 sp = (unsigned long __user *)current->mm->start_stack; in create_flat_tables()
124 current->mm->start_stack = (unsigned long)sp & -FLAT_STACK_ALIGN; in create_flat_tables()
125 sp = (unsigned long __user *)current->mm->start_stack; in create_flat_tables()
136 current->mm->arg_start = (unsigned long)p; in create_flat_tables()
145 current->mm->arg_end = (unsigned long)p; in create_flat_tables()
147 current->mm->env_start = (unsigned long) p; in create_flat_tables()
156 current->mm->env_end = (unsigned long)p; in create_flat_tables()
388 ptr = (unsigned long __user *)(current->mm->start_code + r.reloc.offset); in old_reloc()
390 ptr = (unsigned long __user *)(current->mm->start_data + r.reloc.offset); in old_reloc()
400 val += current->mm->start_code; in old_reloc()
[all …]
Dcoredump.c158 exe_file = get_mm_exe_file(current->mm); in cn_print_exe_file()
321 if (t != current && t->mm) { in zap_process()
331 static int zap_threads(struct task_struct *tsk, struct mm_struct *mm, in zap_threads() argument
340 mm->core_state = core_state; in zap_threads()
350 if (atomic_read(&mm->mm_users) == nr + 1) in zap_threads()
390 if (unlikely(!p->mm)) in zap_threads()
392 if (unlikely(p->mm == mm)) { in zap_threads()
410 struct mm_struct *mm = tsk->mm; in coredump_wait() local
417 if (down_write_killable(&mm->mmap_sem)) in coredump_wait()
420 if (!mm->core_state) in coredump_wait()
[all …]
Dbinfmt_aout.c171 current->mm->arg_start = (unsigned long) p; in create_aout_tables()
180 current->mm->arg_end = current->mm->env_start = (unsigned long) p; in create_aout_tables()
189 current->mm->env_end = (unsigned long) p; in create_aout_tables()
247 current->mm->end_code = ex.a_text + in load_aout_binary()
248 (current->mm->start_code = N_TXTADDR(ex)); in load_aout_binary()
249 current->mm->end_data = ex.a_data + in load_aout_binary()
250 (current->mm->start_data = N_DATADDR(ex)); in load_aout_binary()
251 current->mm->brk = ex.a_bss + in load_aout_binary()
252 (current->mm->start_brk = N_BSSADDR(ex)); in load_aout_binary()
323 retval = set_brk(current->mm->start_brk, current->mm->brk); in load_aout_binary()
[all …]
Daio.c323 struct mm_struct *mm = vma->vm_mm; in aio_ring_mremap() local
327 spin_lock(&mm->ioctx_lock); in aio_ring_mremap()
329 table = rcu_dereference(mm->ioctx_table); in aio_ring_mremap()
344 spin_unlock(&mm->ioctx_lock); in aio_ring_mremap()
449 struct mm_struct *mm = current->mm; in aio_setup_ring() local
508 if (down_write_killable(&mm->mmap_sem)) { in aio_setup_ring()
517 up_write(&mm->mmap_sem); in aio_setup_ring()
650 static int ioctx_add_table(struct kioctx *ctx, struct mm_struct *mm) in ioctx_add_table() argument
656 spin_lock(&mm->ioctx_lock); in ioctx_add_table()
657 table = rcu_dereference_raw(mm->ioctx_table); in ioctx_add_table()
[all …]
Dbinfmt_elf.c103 current->mm->start_brk = current->mm->brk = end; in set_brk()
217 elf_info = (elf_addr_t *)current->mm->saved_auxv; in create_elf_tables()
267 sizeof current->mm->saved_auxv - ei_index * sizeof elf_info[0]); in create_elf_tables()
290 vma = find_extend_vma(current->mm, bprm->p); in create_elf_tables()
301 p = current->mm->arg_end = current->mm->arg_start; in create_elf_tables()
313 current->mm->arg_end = current->mm->env_start = p; in create_elf_tables()
325 current->mm->env_end = p; in create_elf_tables()
865 current->mm->start_stack = bprm->p; in load_elf_binary()
1096 current->mm->end_code = end_code; in load_elf_binary()
1097 current->mm->start_code = start_code; in load_elf_binary()
[all …]
/fs/hugetlbfs/
Dinode.c174 struct mm_struct *mm = current->mm; in hugetlb_get_unmapped_area() local
192 vma = find_vma(mm, addr); in hugetlb_get_unmapped_area()
430 hash = hugetlb_fault_mutex_hash(h, current->mm, in remove_inode_hugepages()
552 struct mm_struct *mm = current->mm; in hugetlbfs_fallocate() local
616 hash = hugetlb_fault_mutex_hash(h, mm, &pseudo_vma, mapping, in hugetlbfs_fallocate()
/fs/ramfs/
Dfile-mmu.c38 return current->mm->get_unmapped_area(file, addr, len, pgoff, flags); in ramfs_mmu_get_unmapped_area()