Home
last modified time | relevance | path

Searched full:mm (Results 1 – 25 of 5225) sorted by relevance

12345678910>>...209

/kernel/linux/linux-6.6/include/linux/
Dmmap_lock.h20 void __mmap_lock_do_trace_start_locking(struct mm_struct *mm, bool write);
21 void __mmap_lock_do_trace_acquire_returned(struct mm_struct *mm, bool write,
23 void __mmap_lock_do_trace_released(struct mm_struct *mm, bool write);
25 static inline void __mmap_lock_trace_start_locking(struct mm_struct *mm, in __mmap_lock_trace_start_locking() argument
29 __mmap_lock_do_trace_start_locking(mm, write); in __mmap_lock_trace_start_locking()
32 static inline void __mmap_lock_trace_acquire_returned(struct mm_struct *mm, in __mmap_lock_trace_acquire_returned() argument
36 __mmap_lock_do_trace_acquire_returned(mm, write, success); in __mmap_lock_trace_acquire_returned()
39 static inline void __mmap_lock_trace_released(struct mm_struct *mm, bool write) in __mmap_lock_trace_released() argument
42 __mmap_lock_do_trace_released(mm, write); in __mmap_lock_trace_released()
47 static inline void __mmap_lock_trace_start_locking(struct mm_struct *mm, in __mmap_lock_trace_start_locking() argument
[all …]
Dpage_table_check.h17 void __page_table_check_pte_clear(struct mm_struct *mm, pte_t pte);
18 void __page_table_check_pmd_clear(struct mm_struct *mm, pmd_t pmd);
19 void __page_table_check_pud_clear(struct mm_struct *mm, pud_t pud);
20 void __page_table_check_ptes_set(struct mm_struct *mm, pte_t *ptep, pte_t pte,
22 void __page_table_check_pmd_set(struct mm_struct *mm, pmd_t *pmdp, pmd_t pmd);
23 void __page_table_check_pud_set(struct mm_struct *mm, pud_t *pudp, pud_t pud);
24 void __page_table_check_pte_clear_range(struct mm_struct *mm,
44 static inline void page_table_check_pte_clear(struct mm_struct *mm, pte_t pte) in page_table_check_pte_clear() argument
49 __page_table_check_pte_clear(mm, pte); in page_table_check_pte_clear()
52 static inline void page_table_check_pmd_clear(struct mm_struct *mm, pmd_t pmd) in page_table_check_pmd_clear() argument
[all …]
/kernel/linux/linux-5.10/include/linux/
Dmmap_lock.h9 static inline void mmap_init_lock(struct mm_struct *mm) in mmap_init_lock() argument
11 init_rwsem(&mm->mmap_lock); in mmap_init_lock()
14 static inline void mmap_write_lock(struct mm_struct *mm) in mmap_write_lock() argument
16 down_write(&mm->mmap_lock); in mmap_write_lock()
19 static inline void mmap_write_lock_nested(struct mm_struct *mm, int subclass) in mmap_write_lock_nested() argument
21 down_write_nested(&mm->mmap_lock, subclass); in mmap_write_lock_nested()
24 static inline int mmap_write_lock_killable(struct mm_struct *mm) in mmap_write_lock_killable() argument
26 return down_write_killable(&mm->mmap_lock); in mmap_write_lock_killable()
29 static inline bool mmap_write_trylock(struct mm_struct *mm) in mmap_write_trylock() argument
31 return down_write_trylock(&mm->mmap_lock) != 0; in mmap_write_trylock()
[all …]
/kernel/linux/linux-6.6/arch/x86/include/asm/
Dmmu_context.h55 static inline void init_new_context_ldt(struct mm_struct *mm) in init_new_context_ldt() argument
57 mm->context.ldt = NULL; in init_new_context_ldt()
58 init_rwsem(&mm->context.ldt_usr_sem); in init_new_context_ldt()
60 int ldt_dup_context(struct mm_struct *oldmm, struct mm_struct *mm);
61 void destroy_context_ldt(struct mm_struct *mm);
62 void ldt_arch_exit_mmap(struct mm_struct *mm);
64 static inline void init_new_context_ldt(struct mm_struct *mm) { } in init_new_context_ldt() argument
66 struct mm_struct *mm) in ldt_dup_context() argument
70 static inline void destroy_context_ldt(struct mm_struct *mm) { } in destroy_context_ldt() argument
71 static inline void ldt_arch_exit_mmap(struct mm_struct *mm) { } in ldt_arch_exit_mmap() argument
[all …]
/kernel/linux/linux-6.6/drivers/gpu/drm/
Ddrm_buddy.c14 static struct drm_buddy_block *drm_block_alloc(struct drm_buddy *mm, in drm_block_alloc() argument
35 static void drm_block_free(struct drm_buddy *mm, in drm_block_free() argument
41 static void list_insert_sorted(struct drm_buddy *mm, in list_insert_sorted() argument
47 head = &mm->free_list[drm_buddy_block_order(block)]; in list_insert_sorted()
68 static void mark_free(struct drm_buddy *mm, in mark_free() argument
74 list_insert_sorted(mm, block); in mark_free()
88 * @mm: DRM buddy manager to initialize
97 int drm_buddy_init(struct drm_buddy *mm, u64 size, u64 chunk_size) in drm_buddy_init() argument
113 mm->size = size; in drm_buddy_init()
114 mm->avail = size; in drm_buddy_init()
[all …]
/kernel/linux/linux-5.10/arch/powerpc/include/asm/
Dmmu_context.h7 #include <linux/mm.h>
17 extern int init_new_context(struct task_struct *tsk, struct mm_struct *mm);
18 extern void destroy_context(struct mm_struct *mm);
23 extern bool mm_iommu_preregistered(struct mm_struct *mm);
24 extern long mm_iommu_new(struct mm_struct *mm,
27 extern long mm_iommu_newdev(struct mm_struct *mm, unsigned long ua,
30 extern long mm_iommu_put(struct mm_struct *mm,
32 extern void mm_iommu_init(struct mm_struct *mm);
33 extern void mm_iommu_cleanup(struct mm_struct *mm);
34 extern struct mm_iommu_table_group_mem_t *mm_iommu_lookup(struct mm_struct *mm,
[all …]
/kernel/linux/linux-6.6/drivers/gpu/drm/tests/
Ddrm_buddy_test.c46 static void __dump_block(struct kunit *test, struct drm_buddy *mm, in __dump_block() argument
52 drm_buddy_block_size(mm, block), !block->parent, buddy); in __dump_block()
55 static void dump_block(struct kunit *test, struct drm_buddy *mm, in dump_block() argument
60 __dump_block(test, mm, block, false); in dump_block()
64 __dump_block(test, mm, buddy, true); in dump_block()
67 static int check_block(struct kunit *test, struct drm_buddy *mm, in check_block() argument
84 block_size = drm_buddy_block_size(mm, block); in check_block()
87 if (block_size < mm->chunk_size) { in check_block()
98 if (!IS_ALIGNED(block_size, mm->chunk_size)) { in check_block()
103 if (!IS_ALIGNED(offset, mm->chunk_size)) { in check_block()
[all …]
/kernel/linux/linux-6.6/arch/s390/include/asm/
Dpgalloc.h18 #include <linux/mm.h>
26 struct page *page_table_alloc_pgste(struct mm_struct *mm);
37 int crst_table_upgrade(struct mm_struct *mm, unsigned long limit);
39 static inline unsigned long check_asce_limit(struct mm_struct *mm, unsigned long addr, in check_asce_limit() argument
44 if (addr + len > mm->context.asce_limit && in check_asce_limit()
46 rc = crst_table_upgrade(mm, addr + len); in check_asce_limit()
53 static inline p4d_t *p4d_alloc_one(struct mm_struct *mm, unsigned long address) in p4d_alloc_one() argument
55 unsigned long *table = crst_table_alloc(mm); in p4d_alloc_one()
62 static inline void p4d_free(struct mm_struct *mm, p4d_t *p4d) in p4d_free() argument
64 if (!mm_p4d_folded(mm)) in p4d_free()
[all …]
Dmmu_context.h20 struct mm_struct *mm) in init_new_context() argument
24 spin_lock_init(&mm->context.lock); in init_new_context()
25 INIT_LIST_HEAD(&mm->context.pgtable_list); in init_new_context()
26 INIT_LIST_HEAD(&mm->context.gmap_list); in init_new_context()
27 cpumask_clear(&mm->context.cpu_attach_mask); in init_new_context()
28 atomic_set(&mm->context.flush_count, 0); in init_new_context()
29 atomic_set(&mm->context.protected_count, 0); in init_new_context()
30 mm->context.gmap_asce = 0; in init_new_context()
31 mm->context.flush_mm = 0; in init_new_context()
33 mm->context.alloc_pgste = page_table_allocate_pgste || in init_new_context()
[all …]
/kernel/linux/linux-5.10/arch/s390/include/asm/
Dpgalloc.h18 #include <linux/mm.h>
26 struct page *page_table_alloc_pgste(struct mm_struct *mm);
37 int crst_table_upgrade(struct mm_struct *mm, unsigned long limit);
39 static inline unsigned long check_asce_limit(struct mm_struct *mm, unsigned long addr, in check_asce_limit() argument
44 if (addr + len > mm->context.asce_limit && in check_asce_limit()
46 rc = crst_table_upgrade(mm, addr + len); in check_asce_limit()
53 static inline p4d_t *p4d_alloc_one(struct mm_struct *mm, unsigned long address) in p4d_alloc_one() argument
55 unsigned long *table = crst_table_alloc(mm); in p4d_alloc_one()
62 static inline void p4d_free(struct mm_struct *mm, p4d_t *p4d) in p4d_free() argument
64 if (!mm_p4d_folded(mm)) in p4d_free()
[all …]
Dmmu_context.h19 struct mm_struct *mm) in init_new_context() argument
23 spin_lock_init(&mm->context.lock); in init_new_context()
24 INIT_LIST_HEAD(&mm->context.pgtable_list); in init_new_context()
25 INIT_LIST_HEAD(&mm->context.gmap_list); in init_new_context()
26 cpumask_clear(&mm->context.cpu_attach_mask); in init_new_context()
27 atomic_set(&mm->context.flush_count, 0); in init_new_context()
28 atomic_set(&mm->context.is_protected, 0); in init_new_context()
29 mm->context.gmap_asce = 0; in init_new_context()
30 mm->context.flush_mm = 0; in init_new_context()
32 mm->context.alloc_pgste = page_table_allocate_pgste || in init_new_context()
[all …]
/kernel/linux/linux-6.6/arch/powerpc/include/asm/
Dmmu_context.h7 #include <linux/mm.h>
18 extern int init_new_context(struct task_struct *tsk, struct mm_struct *mm);
20 extern void destroy_context(struct mm_struct *mm);
24 extern bool mm_iommu_preregistered(struct mm_struct *mm);
25 extern long mm_iommu_new(struct mm_struct *mm,
28 extern long mm_iommu_newdev(struct mm_struct *mm, unsigned long ua,
31 extern long mm_iommu_put(struct mm_struct *mm,
33 extern void mm_iommu_init(struct mm_struct *mm);
34 extern struct mm_iommu_table_group_mem_t *mm_iommu_lookup(struct mm_struct *mm,
36 extern struct mm_iommu_table_group_mem_t *mm_iommu_get(struct mm_struct *mm,
[all …]
/kernel/linux/linux-5.10/arch/m68k/include/asm/
Dmmu_context.h8 static inline void enter_lazy_tlb(struct mm_struct *mm, struct task_struct *tsk) in enter_lazy_tlb() argument
32 static inline void get_mmu_context(struct mm_struct *mm) in get_mmu_context() argument
36 if (mm->context != NO_CONTEXT) in get_mmu_context()
49 mm->context = ctx; in get_mmu_context()
50 context_mm[ctx] = mm; in get_mmu_context()
56 #define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0) argument
61 static inline void destroy_context(struct mm_struct *mm) in destroy_context() argument
63 if (mm->context != NO_CONTEXT) { in destroy_context()
64 clear_bit(mm->context, context_map); in destroy_context()
65 mm->context = NO_CONTEXT; in destroy_context()
[all …]
/kernel/linux/linux-6.6/include/trace/events/
Dksm.h71 * @mm: address of the mm object of the process
77 TP_PROTO(void *mm),
79 TP_ARGS(mm),
82 __field(void *, mm)
86 __entry->mm = mm;
89 TP_printk("mm %p", __entry->mm)
95 * @mm: address of the mm object of the process
101 TP_PROTO(void *mm),
103 TP_ARGS(mm)
109 * @mm: address of the mm object of the process
[all …]
/kernel/linux/linux-6.6/Documentation/core-api/
Dmm-api.rst14 .. kernel-doc:: mm/gup.c
40 .. kernel-doc:: mm/slab.c
43 .. kernel-doc:: mm/slab_common.c
46 .. kernel-doc:: mm/util.c
52 .. kernel-doc:: mm/vmalloc.c
61 .. kernel-doc:: mm/filemap.c
67 .. kernel-doc:: mm/readahead.c
70 .. kernel-doc:: mm/readahead.c
76 .. kernel-doc:: mm/page-writeback.c
82 .. kernel-doc:: mm/truncate.c
[all …]
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/selftests/
Di915_buddy.c11 static void __igt_dump_block(struct i915_buddy_mm *mm, in __igt_dump_block() argument
20 i915_buddy_block_size(mm, block), in __igt_dump_block()
25 static void igt_dump_block(struct i915_buddy_mm *mm, in igt_dump_block() argument
30 __igt_dump_block(mm, block, false); in igt_dump_block()
34 __igt_dump_block(mm, buddy, true); in igt_dump_block()
37 static int igt_check_block(struct i915_buddy_mm *mm, in igt_check_block() argument
55 block_size = i915_buddy_block_size(mm, block); in igt_check_block()
58 if (block_size < mm->chunk_size) { in igt_check_block()
68 if (!IS_ALIGNED(block_size, mm->chunk_size)) { in igt_check_block()
73 if (!IS_ALIGNED(offset, mm->chunk_size)) { in igt_check_block()
[all …]
/kernel/linux/linux-5.10/arch/x86/include/asm/
Dmmu_context.h61 static inline void init_new_context_ldt(struct mm_struct *mm) in init_new_context_ldt() argument
63 mm->context.ldt = NULL; in init_new_context_ldt()
64 init_rwsem(&mm->context.ldt_usr_sem); in init_new_context_ldt()
66 int ldt_dup_context(struct mm_struct *oldmm, struct mm_struct *mm);
67 void destroy_context_ldt(struct mm_struct *mm);
68 void ldt_arch_exit_mmap(struct mm_struct *mm);
70 static inline void init_new_context_ldt(struct mm_struct *mm) { } in init_new_context_ldt() argument
72 struct mm_struct *mm) in ldt_dup_context() argument
76 static inline void destroy_context_ldt(struct mm_struct *mm) { } in destroy_context_ldt() argument
77 static inline void ldt_arch_exit_mmap(struct mm_struct *mm) { } in ldt_arch_exit_mmap() argument
[all …]
/kernel/linux/linux-5.10/arch/s390/mm/
Dpgtable.c11 #include <linux/mm.h>
47 static inline void ptep_ipte_local(struct mm_struct *mm, unsigned long addr, in ptep_ipte_local() argument
54 asce = READ_ONCE(mm->context.gmap_asce); in ptep_ipte_local()
58 asce = asce ? : mm->context.asce; in ptep_ipte_local()
67 static inline void ptep_ipte_global(struct mm_struct *mm, unsigned long addr, in ptep_ipte_global() argument
74 asce = READ_ONCE(mm->context.gmap_asce); in ptep_ipte_global()
78 asce = asce ? : mm->context.asce; in ptep_ipte_global()
87 static inline pte_t ptep_flush_direct(struct mm_struct *mm, in ptep_flush_direct() argument
96 atomic_inc(&mm->context.flush_count); in ptep_flush_direct()
98 cpumask_equal(mm_cpumask(mm), cpumask_of(smp_processor_id()))) in ptep_flush_direct()
[all …]
/kernel/linux/linux-6.6/arch/s390/mm/
Dpgtable.c11 #include <linux/mm.h>
47 static inline void ptep_ipte_local(struct mm_struct *mm, unsigned long addr, in ptep_ipte_local() argument
54 asce = READ_ONCE(mm->context.gmap_asce); in ptep_ipte_local()
58 asce = asce ? : mm->context.asce; in ptep_ipte_local()
67 static inline void ptep_ipte_global(struct mm_struct *mm, unsigned long addr, in ptep_ipte_global() argument
74 asce = READ_ONCE(mm->context.gmap_asce); in ptep_ipte_global()
78 asce = asce ? : mm->context.asce; in ptep_ipte_global()
87 static inline pte_t ptep_flush_direct(struct mm_struct *mm, in ptep_flush_direct() argument
96 atomic_inc(&mm->context.flush_count); in ptep_flush_direct()
98 cpumask_equal(mm_cpumask(mm), cpumask_of(smp_processor_id()))) in ptep_flush_direct()
[all …]
/kernel/linux/linux-5.10/mm/
Drss_threshold.c3 * mm/rss_threshold.c
8 #include <linux/mm.h>
9 #include <linux/sched/mm.h>
15 struct mm_struct *mm = get_task_mm(task); in proc_pid_rss() local
18 if (mm) { in proc_pid_rss()
19 total_rss = get_mm_rss(mm); in proc_pid_rss()
21 mmput(mm); in proc_pid_rss()
26 void listen_rss_threshold(struct mm_struct *mm) in listen_rss_threshold() argument
30 total_rss = get_mm_rss(mm); in listen_rss_threshold()
32 if (!(mm->owner) || mm->rss_threshold == 0) in listen_rss_threshold()
[all …]
/kernel/linux/linux-6.6/arch/m68k/include/asm/
Dmmu_context.h28 static inline void get_mmu_context(struct mm_struct *mm) in get_mmu_context() argument
32 if (mm->context != NO_CONTEXT) in get_mmu_context()
45 mm->context = ctx; in get_mmu_context()
46 context_mm[ctx] = mm; in get_mmu_context()
52 #define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0) argument
58 static inline void destroy_context(struct mm_struct *mm) in destroy_context() argument
60 if (mm->context != NO_CONTEXT) { in destroy_context()
61 clear_bit(mm->context, context_map); in destroy_context()
62 mm->context = NO_CONTEXT; in destroy_context()
75 get_mmu_context(tsk->mm); in switch_mm()
[all …]
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/
Di915_buddy.c75 static void mark_free(struct i915_buddy_mm *mm, in mark_free() argument
82 &mm->free_list[i915_buddy_block_order(block)]); in mark_free()
93 int i915_buddy_init(struct i915_buddy_mm *mm, u64 size, u64 chunk_size) in i915_buddy_init() argument
109 mm->size = size; in i915_buddy_init()
110 mm->chunk_size = chunk_size; in i915_buddy_init()
111 mm->max_order = ilog2(size) - ilog2(chunk_size); in i915_buddy_init()
113 GEM_BUG_ON(mm->max_order > I915_BUDDY_MAX_ORDER); in i915_buddy_init()
115 mm->free_list = kmalloc_array(mm->max_order + 1, in i915_buddy_init()
118 if (!mm->free_list) in i915_buddy_init()
121 for (i = 0; i <= mm->max_order; ++i) in i915_buddy_init()
[all …]
/kernel/linux/linux-5.10/arch/arm/include/asm/
Dmmu_context.h24 void __check_vmalloc_seq(struct mm_struct *mm);
28 void check_and_switch_context(struct mm_struct *mm, struct task_struct *tsk);
30 init_new_context(struct task_struct *tsk, struct mm_struct *mm) in init_new_context() argument
32 atomic64_set(&mm->context.id, 0); in init_new_context()
37 void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm,
40 static inline void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, in a15_erratum_get_cpumask() argument
50 static inline void check_and_switch_context(struct mm_struct *mm, in check_and_switch_context() argument
53 if (unlikely(mm->context.vmalloc_seq != init_mm.context.vmalloc_seq)) in check_and_switch_context()
54 __check_vmalloc_seq(mm); in check_and_switch_context()
60 * running with the old mm. Since we only support UP systems in check_and_switch_context()
[all …]
/kernel/linux/linux-6.6/mm/
Ddebug.c3 * mm/debug.c
5 * mm/ specific debug routines.
10 #include <linux/mm.h>
150 pr_emerg("vma %px start %px end %px mm %px\n" in dump_vma()
162 void dump_mm(const struct mm_struct *mm) in dump_mm() argument
164 pr_emerg("mm %px task_size %lu\n" in dump_mm()
192 mm, mm->task_size, in dump_mm()
194 mm->get_unmapped_area, in dump_mm()
196 mm->mmap_base, mm->mmap_legacy_base, in dump_mm()
197 mm->pgd, atomic_read(&mm->mm_users), in dump_mm()
[all …]
/kernel/linux/linux-6.6/arch/arm/include/asm/
Dmmu_context.h24 void __check_vmalloc_seq(struct mm_struct *mm);
27 static inline void check_vmalloc_seq(struct mm_struct *mm) in check_vmalloc_seq() argument
30 unlikely(atomic_read(&mm->context.vmalloc_seq) != in check_vmalloc_seq()
32 __check_vmalloc_seq(mm); in check_vmalloc_seq()
38 void check_and_switch_context(struct mm_struct *mm, struct task_struct *tsk);
42 init_new_context(struct task_struct *tsk, struct mm_struct *mm) in init_new_context() argument
44 atomic64_set(&mm->context.id, 0); in init_new_context()
49 void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm,
52 static inline void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, in a15_erratum_get_cpumask() argument
62 static inline void check_and_switch_context(struct mm_struct *mm, in check_and_switch_context() argument
[all …]

12345678910>>...209