Home
last modified time | relevance | path

Searched refs:val (Results 1 – 25 of 31) sorted by relevance

12

/mm/
Dptdump.c32 pgd_t val = READ_ONCE(*pgd); in ptdump_pgd_entry() local
36 if (pgd_page(val) == virt_to_page(lm_alias(kasan_early_shadow_p4d))) in ptdump_pgd_entry()
41 st->effective_prot(st, 0, pgd_val(val)); in ptdump_pgd_entry()
43 if (pgd_leaf(val)) in ptdump_pgd_entry()
44 st->note_page(st, addr, 0, pgd_val(val)); in ptdump_pgd_entry()
53 p4d_t val = READ_ONCE(*p4d); in ptdump_p4d_entry() local
57 if (p4d_page(val) == virt_to_page(lm_alias(kasan_early_shadow_pud))) in ptdump_p4d_entry()
62 st->effective_prot(st, 1, p4d_val(val)); in ptdump_p4d_entry()
64 if (p4d_leaf(val)) in ptdump_p4d_entry()
65 st->note_page(st, addr, 1, p4d_val(val)); in ptdump_p4d_entry()
[all …]
Dcma_debug.c24 static int cma_debugfs_get(void *data, u64 *val) in cma_debugfs_get() argument
28 *val = *p; in cma_debugfs_get()
34 static int cma_used_get(void *data, u64 *val) in cma_used_get() argument
43 *val = (u64)used << cma->order_per_bit; in cma_used_get()
49 static int cma_maxchunk_get(void *data, u64 *val) in cma_maxchunk_get() argument
65 *val = (u64)maxchunk << cma->order_per_bit; in cma_maxchunk_get()
122 static int cma_free_write(void *data, u64 val) in cma_free_write() argument
124 int pages = val; in cma_free_write()
154 static int cma_alloc_write(void *data, u64 val) in cma_alloc_write() argument
156 int pages = val; in cma_alloc_write()
Dpage_pinner.c360 static int failure_tracking_set(void *data, u64 val) in failure_tracking_set() argument
364 on = (bool)val; in failure_tracking_set()
372 static int failure_tracking_get(void *data, u64 *val) in failure_tracking_get() argument
374 *val = static_branch_unlikely(&failure_tracking); in failure_tracking_get()
381 static int buffer_size_set(void *data, u64 val) in buffer_size_set() argument
386 new = kvmalloc_array(val, sizeof(*new), GFP_KERNEL); in buffer_size_set()
394 pp_buf_size = val; in buffer_size_set()
401 static int buffer_size_get(void *data, u64 *val) in buffer_size_get() argument
403 *val = pp_buf_size; in buffer_size_get()
Dhugetlb_cgroup.c26 #define MEMFILE_PRIVATE(x, val) (((x) << 16) | (val)) argument
27 #define MEMFILE_IDX(val) (((val) >> 16) & 0xffff) argument
28 #define MEMFILE_ATTR(val) ((val) & 0xffff) argument
459 u64 val; in hugetlb_cgroup_read_u64_max() local
476 val = (u64)page_counter_read(counter); in hugetlb_cgroup_read_u64_max()
477 seq_printf(seq, "%llu\n", val * PAGE_SIZE); in hugetlb_cgroup_read_u64_max()
483 val = (u64)counter->max; in hugetlb_cgroup_read_u64_max()
484 if (val == limit) in hugetlb_cgroup_read_u64_max()
487 seq_printf(seq, "%llu\n", val * PAGE_SIZE); in hugetlb_cgroup_read_u64_max()
Dhwpoison-inject.c14 static int hwpoison_inject(void *data, u64 val) in hwpoison_inject() argument
16 unsigned long pfn = val; in hwpoison_inject()
55 static int hwpoison_unpoison(void *data, u64 val) in hwpoison_unpoison() argument
60 return unpoison_memory(val); in hwpoison_unpoison()
Dpgsize_migration.c48 unsigned long val; in store_pgsize_migration_enabled() local
54 if (kstrtoul(buf, 10, &val)) in store_pgsize_migration_enabled()
57 if (val > 1) in store_pgsize_migration_enabled()
60 if (val == 1) in store_pgsize_migration_enabled()
62 else if (val == 0) in store_pgsize_migration_enabled()
Dmemcontrol.c219 #define MEMFILE_PRIVATE(x, val) ((x) << 16 | (val)) argument
220 #define MEMFILE_TYPE(val) ((val) >> 16 & 0xffff) argument
221 #define MEMFILE_ATTR(val) ((val) & 0xffff) argument
670 static inline void memcg_rstat_updated(struct mem_cgroup *memcg, int val) in memcg_rstat_updated() argument
676 x = __this_cpu_add_return(stats_updates, abs(val)); in memcg_rstat_updated()
720 void __mod_memcg_state(struct mem_cgroup *memcg, int idx, int val) in __mod_memcg_state() argument
725 __this_cpu_add(memcg->vmstats_percpu->state[idx], val); in __mod_memcg_state()
726 memcg_rstat_updated(memcg, val); in __mod_memcg_state()
745 int val) in __mod_memcg_lruvec_state() argument
754 __this_cpu_add(memcg->vmstats_percpu->state[idx], val); in __mod_memcg_lruvec_state()
[all …]
Dswap_slots.c308 entry.val = 0; in get_swap_page()
333 cache->slots[cache->cur++].val = 0; in get_swap_page()
340 if (entry.val) in get_swap_page()
348 entry.val = 0; in get_swap_page()
Dswapfile.c1131 if (!entry.val) in __swap_info_get()
1144 pr_err("%s: %s%08lx\n", __func__, Bad_offset, entry.val); in __swap_info_get()
1147 pr_err("%s: %s%08lx\n", __func__, Unused_file, entry.val); in __swap_info_get()
1150 pr_err("%s: %s%08lx\n", __func__, Bad_file, entry.val); in __swap_info_get()
1167 pr_err("%s: %s%08lx\n", __func__, Unused_offset, entry.val); in _swap_info_get()
1272 if (!entry.val) in get_swap_device()
1293 pr_err("%s: %s%08lx\n", __func__, Bad_file, entry.val); in get_swap_device()
1358 unsigned char val; in put_swap_page() local
1370 val = map[i]; in put_swap_page()
1371 VM_BUG_ON(!(val & SWAP_HAS_CACHE)); in put_swap_page()
[all …]
Dzswap.c753 static int __zswap_param_set(const char *val, const struct kernel_param *kp, in __zswap_param_set() argument
757 char *s = strstrip((char *)val); in __zswap_param_set()
848 static int zswap_compressor_param_set(const char *val, in zswap_compressor_param_set() argument
851 return __zswap_param_set(val, kp, zswap_zpool_type, NULL); in zswap_compressor_param_set()
854 static int zswap_zpool_param_set(const char *val, in zswap_zpool_param_set() argument
857 return __zswap_param_set(val, kp, NULL, zswap_compressor); in zswap_zpool_param_set()
860 static int zswap_enabled_param_set(const char *val, in zswap_enabled_param_set() argument
872 return param_set_bool(val, kp); in zswap_enabled_param_set()
Dpage_ext.c374 void *val; in __invalidate_page_ext() local
379 val = (void *)ms->page_ext + PAGE_EXT_INVALID; in __invalidate_page_ext()
380 WRITE_ONCE(ms->page_ext, val); in __invalidate_page_ext()
Dshuffle.c20 static __meminit int shuffle_store(const char *val, in shuffle_store() argument
23 int rc = param_set_bool(val, kp); in shuffle_store()
Dvmstat.c1877 long val; in vmstat_refresh() local
1905 val = atomic_long_read(&vm_zone_stat[i]); in vmstat_refresh()
1906 if (val < 0) { in vmstat_refresh()
1908 __func__, zone_stat_name(i), val); in vmstat_refresh()
1919 val = atomic_long_read(&vm_node_stat[i]); in vmstat_refresh()
1920 if (val < 0) { in vmstat_refresh()
1922 __func__, node_stat_name(i), val); in vmstat_refresh()
Dpage_alloc.c6103 void si_meminfo(struct sysinfo *val) in si_meminfo() argument
6105 val->totalram = totalram_pages(); in si_meminfo()
6106 val->sharedram = global_node_page_state(NR_SHMEM); in si_meminfo()
6107 val->freeram = global_zone_page_state(NR_FREE_PAGES); in si_meminfo()
6108 val->bufferram = nr_blockdev_pages(); in si_meminfo()
6109 val->totalhigh = totalhigh_pages(); in si_meminfo()
6110 val->freehigh = nr_free_highpages(); in si_meminfo()
6111 val->mem_unit = PAGE_SIZE; in si_meminfo()
6112 trace_android_vh_si_meminfo(val); in si_meminfo()
6118 void si_meminfo_node(struct sysinfo *val, int nid) in si_meminfo_node() argument
[all …]
Ddebug_vm_pgtable.c98 unsigned long val = idx, *ptr = &val; in pte_basic_tests() local
194 unsigned long val = idx, *ptr = &val; in pmd_basic_tests() local
329 unsigned long val = idx, *ptr = &val; in pud_basic_tests() local
Dfrontswap.c252 swp_entry_t entry = { .val = page_private(page), }; in __frontswap_store()
301 swp_entry_t entry = { .val = page_private(page), }; in __frontswap_load()
Dswap_state.c128 set_page_private(page + i, entry.val + i); in add_to_swap_cache()
193 if (!entry.val) in add_to_swap()
242 swp_entry_t entry = { .val = page_private(page) }; in delete_from_swap_cache()
Dmemory.c182 static void add_mm_counter_fast(struct mm_struct *mm, int member, int val) in add_mm_counter_fast() argument
187 task->rss_stat.count[member] += val; in add_mm_counter_fast()
189 add_mm_counter(mm, member, val); in add_mm_counter_fast()
1160 VM_WARN_ON_ONCE(!entry.val); in copy_pte_range()
1165 entry.val = 0; in copy_pte_range()
3802 set_page_private(page, entry.val); in do_swap_page()
3866 page_private(page) != entry.val)) && swapcache) in do_swap_page()
4387 static int fault_around_bytes_get(void *data, u64 *val) in fault_around_bytes_get() argument
4389 *val = fault_around_bytes; in fault_around_bytes_get()
4397 static int fault_around_bytes_set(void *data, u64 val) in fault_around_bytes_set() argument
[all …]
Dslub.c905 static void init_object(struct kmem_cache *s, void *object, u8 val) in init_object() argument
910 memset(p - s->red_left_pad, val, s->red_left_pad); in init_object()
918 memset(p + s->object_size, val, s->inuse - s->object_size); in init_object()
1054 void *object, u8 val) in check_object() argument
1061 object - s->red_left_pad, val, s->red_left_pad)) in check_object()
1065 endobject, val, s->inuse - s->object_size)) in check_object()
1076 if (val != SLUB_RED_ACTIVE && (s->flags & __OBJECT_POISON) && in check_object()
1088 if (!freeptr_outside_object(s) && val == SLUB_RED_ACTIVE) in check_object()
1640 void *object, u8 val) { return 1; } in check_object() argument
5014 u8 val = test_bit(__obj_to_index(s, addr, p), obj_map) ? in validate_slab() local
[all …]
Dvmscan.c1180 swp_entry_t swap = { .val = page_private(page) }; in __remove_mapping()
3865 pmd_t val = pmd_read_atomic(pmd + i); in walk_pmd_range() local
3872 if (!pmd_present(val) || is_huge_zero_pmd(val)) { in walk_pmd_range()
3878 if (pmd_trans_huge(val)) { in walk_pmd_range()
3879 unsigned long pfn = pmd_pfn(val); in walk_pmd_range()
3884 if (!pmd_young(val)) { in walk_pmd_range()
3901 if (!pmd_young(val)) in walk_pmd_range()
3912 if (!walk_pte_range(&val, addr, next, args)) in walk_pmd_range()
3941 pud_t val = READ_ONCE(pud[i]); in walk_pud_range() local
3945 if (!pud_present(val) || WARN_ON_ONCE(pud_leaf(val))) in walk_pud_range()
[all …]
Dmemory_hotplug.c65 static int set_online_policy(const char *val, const struct kernel_param *kp) in set_online_policy() argument
67 int ret = sysfs_match_string(online_policy_to_str, val); in set_online_policy()
Dslab.c1448 static void poison_obj(struct kmem_cache *cachep, void *addr, unsigned char val) in poison_obj() argument
1453 memset(addr, val, size); in poison_obj()
2324 unsigned int idx, freelist_idx_t val) in set_free_obj() argument
2326 ((freelist_idx_t *)(page->freelist))[idx] = val; in set_free_obj()
Doom_kill.c755 trace_mark_victim(tsk, cred->uid.val);
/mm/damon/
Dreclaim.c366 static int enabled_store(const char *val, in enabled_store() argument
369 int rc = param_set_bool(val, kp); in enabled_store()
/mm/kfence/
Dcore.c57 static int param_set_sample_interval(const char *val, const struct kernel_param *kp) in param_set_sample_interval() argument
60 int ret = kstrtoul(val, 0, &num); in param_set_sample_interval()
187 static void alloc_covered_add(u32 alloc_stack_hash, int val) in alloc_covered_add() argument
192 atomic_add(val, &alloc_covered[alloc_stack_hash & ALLOC_COVERED_MASK]); in alloc_covered_add()

12