Home
last modified time | relevance | path

Searched refs:regions (Results 1 – 5 of 5) sorted by relevance

/mm/damon/
Dvaddr-test.h75 struct damon_addr_range regions[3] = {0,}; in damon_test_three_regions_in_vmas() local
88 __damon_va_three_regions(&vmas[0], regions); in damon_test_three_regions_in_vmas()
90 KUNIT_EXPECT_EQ(test, 10ul, regions[0].start); in damon_test_three_regions_in_vmas()
91 KUNIT_EXPECT_EQ(test, 25ul, regions[0].end); in damon_test_three_regions_in_vmas()
92 KUNIT_EXPECT_EQ(test, 200ul, regions[1].start); in damon_test_three_regions_in_vmas()
93 KUNIT_EXPECT_EQ(test, 220ul, regions[1].end); in damon_test_three_regions_in_vmas()
94 KUNIT_EXPECT_EQ(test, 300ul, regions[2].start); in damon_test_three_regions_in_vmas()
95 KUNIT_EXPECT_EQ(test, 330ul, regions[2].end); in damon_test_three_regions_in_vmas()
134 unsigned long *regions, int nr_regions, in damon_do_test_apply_three_regions() argument
144 r = damon_new_region(regions[i * 2], regions[i * 2 + 1]); in damon_do_test_apply_three_regions()
[all …]
Dvaddr.c117 struct damon_addr_range regions[3]) in __damon_va_three_regions()
157 regions[0].start = ALIGN(start, DAMON_MIN_REGION); in __damon_va_three_regions()
158 regions[0].end = ALIGN(first_gap.start, DAMON_MIN_REGION); in __damon_va_three_regions()
159 regions[1].start = ALIGN(first_gap.end, DAMON_MIN_REGION); in __damon_va_three_regions()
160 regions[1].end = ALIGN(second_gap.start, DAMON_MIN_REGION); in __damon_va_three_regions()
161 regions[2].start = ALIGN(second_gap.end, DAMON_MIN_REGION); in __damon_va_three_regions()
162 regions[2].end = ALIGN(last_vma->vm_end, DAMON_MIN_REGION); in __damon_va_three_regions()
173 struct damon_addr_range regions[3]) in damon_va_three_regions()
183 rc = __damon_va_three_regions(mm->mmap, regions); in damon_va_three_regions()
237 struct damon_addr_range regions[3]; in __damon_va_init_regions() local
[all …]
/mm/
Dmemblock.c112 .memory.regions = memblock_memory_init_regions,
117 .reserved.regions = memblock_reserved_init_regions,
128 .regions = memblock_physmem_init_regions,
144 for (i = 0, rgn = &memblock_type->regions[0]; \
146 i++, rgn = &memblock_type->regions[i])
188 if (memblock_addrs_overlap(base, size, type->regions[i].base, in memblock_overlaps_region()
189 type->regions[i].size)) in memblock_overlaps_region()
341 type->total_size -= type->regions[r].size; in memblock_remove_region()
342 memmove(&type->regions[r], &type->regions[r + 1], in memblock_remove_region()
343 (type->cnt - (r + 1)) * sizeof(type->regions[r])); in memblock_remove_region()
[all …]
Dhugetlb.c340 if (&prg->link != &resv->regions && prg->to == rg->from && in coalesce_file_region()
352 if (&nrg->link != &resv->regions && nrg->from == rg->to && in coalesce_file_region()
393 struct list_head *head = &resv->regions; in add_reservation_in_range()
647 struct list_head *head = &resv->regions; in region_del()
774 struct list_head *head = &resv->regions; in region_count()
911 INIT_LIST_HEAD(&resv_map->regions); in resv_map_alloc()
DKconfig141 determines what happens to newly added memory regions. Policy setting
798 or other device driver discovered memory regions, in the