Lines Matching refs:mm
54 static bool assert_no_holes(const struct drm_mm *mm) in assert_no_holes() argument
61 drm_mm_for_each_hole(hole, mm, hole_start, hole_end) in assert_no_holes()
68 drm_mm_for_each_node(hole, mm) { in assert_no_holes()
78 static bool assert_one_hole(const struct drm_mm *mm, u64 start, u64 end) in assert_one_hole() argument
89 drm_mm_for_each_hole(hole, mm, hole_start, hole_end) { in assert_one_hole()
107 static bool assert_continuous(const struct drm_mm *mm, u64 size) in assert_continuous() argument
113 if (!assert_no_holes(mm)) in assert_continuous()
118 drm_mm_for_each_node(node, mm) { in assert_continuous()
137 drm_mm_for_each_node_in_range(check, mm, addr, addr + size) { in assert_continuous()
169 static bool assert_node(struct drm_mm_node *node, struct drm_mm *mm, in assert_node() argument
174 if (!drm_mm_node_allocated(node) || node->mm != mm) { in assert_node()
200 #define show_mm(mm) do { \ argument
202 drm_mm_print((mm), &__p); } while (0)
207 struct drm_mm mm; in igt_init() local
212 memset(&mm, 0, sizeof(mm)); in igt_init()
213 if (drm_mm_initialized(&mm)) { in igt_init()
218 memset(&mm, 0xff, sizeof(mm)); in igt_init()
219 drm_mm_init(&mm, 0, size); in igt_init()
220 if (!drm_mm_initialized(&mm)) { in igt_init()
225 if (!drm_mm_clean(&mm)) { in igt_init()
231 if (!assert_one_hole(&mm, 0, size)) { in igt_init()
239 ret = drm_mm_reserve_node(&mm, &tmp); in igt_init()
246 if (!assert_no_holes(&mm)) { in igt_init()
253 if (!assert_one_hole(&mm, 0, size)) { in igt_init()
260 show_mm(&mm); in igt_init()
261 drm_mm_takedown(&mm); in igt_init()
267 struct drm_mm mm; in igt_debug() local
275 drm_mm_init(&mm, 0, 4096); in igt_debug()
280 ret = drm_mm_reserve_node(&mm, &nodes[0]); in igt_debug()
289 ret = drm_mm_reserve_node(&mm, &nodes[1]); in igt_debug()
296 show_mm(&mm); in igt_debug()
308 static bool expect_reserve_fail(struct drm_mm *mm, struct drm_mm_node *node) in expect_reserve_fail() argument
312 err = drm_mm_reserve_node(mm, node); in expect_reserve_fail()
327 static bool check_reserve_boundaries(struct drm_mm *mm, in check_reserve_boundaries() argument
359 if (!expect_reserve_fail(mm, in check_reserve_boundaries()
375 struct drm_mm mm; in __igt_reserve() local
399 drm_mm_init(&mm, 0, count * size); in __igt_reserve()
401 if (!check_reserve_boundaries(&mm, count, size)) in __igt_reserve()
408 err = drm_mm_reserve_node(&mm, &nodes[n]); in __igt_reserve()
422 if (!expect_reserve_fail(&mm, &nodes[n])) in __igt_reserve()
427 if (!assert_continuous(&mm, size)) in __igt_reserve()
433 if (!expect_reserve_fail(&mm, in __igt_reserve()
439 err = drm_mm_reserve_node(&mm, &nodes[order[n]]); in __igt_reserve()
448 if (!assert_continuous(&mm, size)) in __igt_reserve()
453 if (!expect_reserve_fail(&mm, set_node(&tmp, 0, size*count))) in __igt_reserve()
457 if (!expect_reserve_fail(&mm, in __igt_reserve()
473 err = drm_mm_reserve_node(&mm, node); in __igt_reserve()
484 if (!assert_continuous(&mm, size)) in __igt_reserve()
490 drm_mm_for_each_node_safe(node, next, &mm) in __igt_reserve()
492 drm_mm_takedown(&mm); in __igt_reserve()
526 static bool expect_insert(struct drm_mm *mm, struct drm_mm_node *node, in expect_insert() argument
532 err = drm_mm_insert_node_generic(mm, node, in expect_insert()
541 if (!assert_node(node, mm, size, alignment, color)) { in expect_insert()
549 static bool expect_insert_fail(struct drm_mm *mm, u64 size) in expect_insert_fail() argument
554 err = drm_mm_insert_node(mm, &tmp, size); in expect_insert_fail()
573 struct drm_mm mm; in __igt_insert() local
593 drm_mm_init(&mm, 0, count * size); in __igt_insert()
601 if (!expect_insert(&mm, node, size, 0, n, mode)) { in __igt_insert()
615 if (!assert_node(&nodes[n], &mm, size, 0, n)) { in __igt_insert()
631 if (!assert_continuous(&mm, size)) in __igt_insert()
635 if (!expect_insert_fail(&mm, size)) in __igt_insert()
643 if (!expect_insert(&mm, &nodes[n], size, 0, n, mode)) { in __igt_insert()
655 if (!assert_continuous(&mm, size)) in __igt_insert()
668 if (!expect_insert(&mm, node, size, 0, n, mode)) { in __igt_insert()
677 if (!assert_continuous(&mm, size)) in __igt_insert()
680 if (!expect_insert_fail(&mm, size)) in __igt_insert()
684 drm_mm_for_each_node_safe(node, next, &mm) in __igt_insert()
686 DRM_MM_BUG_ON(!drm_mm_clean(&mm)); in __igt_insert()
693 drm_mm_for_each_node_safe(node, next, &mm) in __igt_insert()
695 drm_mm_takedown(&mm); in __igt_insert()
763 static bool expect_insert_in_range(struct drm_mm *mm, struct drm_mm_node *node, in expect_insert_in_range() argument
770 err = drm_mm_insert_node_in_range(mm, node, in expect_insert_in_range()
781 if (!assert_node(node, mm, size, alignment, color)) { in expect_insert_in_range()
789 static bool expect_insert_in_range_fail(struct drm_mm *mm, in expect_insert_in_range_fail() argument
797 err = drm_mm_insert_node_in_range(mm, &tmp, in expect_insert_in_range_fail()
816 static bool assert_contiguous_in_range(struct drm_mm *mm, in assert_contiguous_in_range() argument
824 if (!expect_insert_in_range_fail(mm, size, start, end)) in assert_contiguous_in_range()
828 drm_mm_for_each_node(node, mm) { in assert_contiguous_in_range()
857 node = __drm_mm_interval_first(mm, 0, start - 1); in assert_contiguous_in_range()
866 node = __drm_mm_interval_first(mm, end, U64_MAX); in assert_contiguous_in_range()
880 struct drm_mm mm; in __igt_insert_range() local
899 drm_mm_init(&mm, 0, count * size); in __igt_insert_range()
906 if (!expect_insert_in_range(&mm, &nodes[n], in __igt_insert_range()
917 if (!assert_contiguous_in_range(&mm, size, start, end)) { in __igt_insert_range()
928 if (!expect_insert_in_range(&mm, &nodes[n], in __igt_insert_range()
942 if (!assert_contiguous_in_range(&mm, size, start, end)) { in __igt_insert_range()
948 drm_mm_for_each_node_safe(node, next, &mm) in __igt_insert_range()
950 DRM_MM_BUG_ON(!drm_mm_clean(&mm)); in __igt_insert_range()
957 drm_mm_for_each_node_safe(node, next, &mm) in __igt_insert_range()
959 drm_mm_takedown(&mm); in __igt_insert_range()
967 struct drm_mm mm; in insert_outside_range() local
972 drm_mm_init(&mm, start, size); in insert_outside_range()
974 if (!expect_insert_in_range_fail(&mm, 1, 0, start)) in insert_outside_range()
977 if (!expect_insert_in_range_fail(&mm, size, in insert_outside_range()
981 if (!expect_insert_in_range_fail(&mm, size, in insert_outside_range()
985 if (!expect_insert_in_range_fail(&mm, 1, end, end + size)) in insert_outside_range()
988 drm_mm_takedown(&mm); in insert_outside_range()
1037 static int prepare_igt_frag(struct drm_mm *mm, in prepare_igt_frag() argument
1046 if (!expect_insert(mm, &nodes[i], size, 0, i, in prepare_igt_frag()
1063 static u64 get_insert_time(struct drm_mm *mm, in get_insert_time() argument
1074 if (!expect_insert(mm, &nodes[i], size, 0, i, mode) != 0) { in get_insert_time()
1085 struct drm_mm mm; in igt_frag() local
1106 drm_mm_init(&mm, 1, U64_MAX - 2); in igt_frag()
1114 ret = prepare_igt_frag(&mm, nodes, insert_size, mode); in igt_frag()
1118 insert_time1 = get_insert_time(&mm, insert_size, in igt_frag()
1123 insert_time2 = get_insert_time(&mm, (insert_size * 2), in igt_frag()
1139 drm_mm_for_each_node_safe(node, next, &mm) in igt_frag()
1145 drm_mm_for_each_node_safe(node, next, &mm) in igt_frag()
1147 drm_mm_takedown(&mm); in igt_frag()
1157 struct drm_mm mm; in igt_align() local
1171 drm_mm_init(&mm, 1, U64_MAX - 2); in igt_align()
1179 if (!expect_insert(&mm, &nodes[i], in igt_align()
1190 drm_mm_for_each_node_safe(node, next, &mm) in igt_align()
1192 DRM_MM_BUG_ON(!drm_mm_clean(&mm)); in igt_align()
1199 drm_mm_for_each_node_safe(node, next, &mm) in igt_align()
1201 drm_mm_takedown(&mm); in igt_align()
1209 struct drm_mm mm; in igt_align_pot() local
1216 drm_mm_init(&mm, 1, U64_MAX - 2); in igt_align_pot()
1229 if (!expect_insert(&mm, node, in igt_align_pot()
1242 drm_mm_for_each_node_safe(node, next, &mm) { in igt_align_pot()
1246 drm_mm_takedown(&mm); in igt_align_pot()
1267 static void show_holes(const struct drm_mm *mm, int count) in show_holes() argument
1272 drm_mm_for_each_hole(hole, mm, hole_start, hole_end) { in show_holes()
1351 static bool evict_nothing(struct drm_mm *mm, in evict_nothing() argument
1361 drm_mm_scan_init(&scan, mm, 1, 0, 0, 0); in evict_nothing()
1381 drm_mm_for_each_node(node, mm) { in evict_nothing()
1395 return assert_continuous(mm, nodes[0].node.size); in evict_nothing()
1398 static bool evict_everything(struct drm_mm *mm, in evict_everything() argument
1408 drm_mm_scan_init(&scan, mm, total_size, 0, 0, 0); in evict_everything()
1432 if (!assert_one_hole(mm, 0, total_size)) in evict_everything()
1436 err = drm_mm_reserve_node(mm, &e->node); in evict_everything()
1444 return assert_continuous(mm, nodes[0].node.size); in evict_everything()
1447 static int evict_something(struct drm_mm *mm, in evict_something() argument
1462 drm_mm_scan_init_with_range(&scan, mm, in evict_something()
1472 err = drm_mm_insert_node_generic(mm, &tmp, size, alignment, 0, in evict_something()
1478 show_holes(mm, 3); in evict_something()
1488 if (!assert_node(&tmp, mm, size, alignment, 0) || in evict_something()
1502 err = drm_mm_reserve_node(mm, &e->node); in evict_something()
1510 if (!assert_continuous(mm, nodes[0].node.size)) { in evict_something()
1523 struct drm_mm mm; in igt_evict() local
1546 drm_mm_init(&mm, 0, size); in igt_evict()
1548 err = drm_mm_insert_node(&mm, &nodes[n].node, 1); in igt_evict()
1557 if (!evict_nothing(&mm, size, nodes)) { in igt_evict()
1561 if (!evict_everything(&mm, size, nodes)) { in igt_evict()
1569 err = evict_something(&mm, 0, U64_MAX, in igt_evict()
1583 err = evict_something(&mm, 0, U64_MAX, in igt_evict()
1601 err = evict_something(&mm, 0, U64_MAX, in igt_evict()
1618 drm_mm_for_each_node_safe(node, next, &mm) in igt_evict()
1620 drm_mm_takedown(&mm); in igt_evict()
1636 struct drm_mm mm; in igt_evict_range() local
1656 drm_mm_init(&mm, 0, size); in igt_evict_range()
1658 err = drm_mm_insert_node(&mm, &nodes[n].node, 1); in igt_evict_range()
1669 err = evict_something(&mm, range_start, range_end, in igt_evict_range()
1682 err = evict_something(&mm, range_start, range_end, in igt_evict_range()
1699 err = evict_something(&mm, range_start, range_end, in igt_evict_range()
1715 drm_mm_for_each_node_safe(node, next, &mm) in igt_evict_range()
1717 drm_mm_takedown(&mm); in igt_evict_range()
1737 struct drm_mm mm; in igt_topdown() local
1762 drm_mm_init(&mm, 0, size*count); in igt_topdown()
1764 if (!expect_insert(&mm, &nodes[n], in igt_topdown()
1777 if (!assert_one_hole(&mm, 0, size*(count - n - 1))) in igt_topdown()
1781 if (!assert_continuous(&mm, size)) in igt_topdown()
1796 if (!expect_insert(&mm, node, in igt_topdown()
1824 drm_mm_for_each_node_safe(node, next, &mm) in igt_topdown()
1826 DRM_MM_BUG_ON(!drm_mm_clean(&mm)); in igt_topdown()
1832 drm_mm_for_each_node_safe(node, next, &mm) in igt_topdown()
1834 drm_mm_takedown(&mm); in igt_topdown()
1851 struct drm_mm mm; in igt_bottomup() local
1875 drm_mm_init(&mm, 0, size*count); in igt_bottomup()
1877 if (!expect_insert(&mm, &nodes[n], in igt_bottomup()
1884 if (!assert_one_hole(&mm, size*(n + 1), size*count)) in igt_bottomup()
1888 if (!assert_continuous(&mm, size)) in igt_bottomup()
1903 if (!expect_insert(&mm, node, in igt_bottomup()
1924 drm_mm_for_each_node_safe(node, next, &mm) in igt_bottomup()
1926 DRM_MM_BUG_ON(!drm_mm_clean(&mm)); in igt_bottomup()
1932 drm_mm_for_each_node_safe(node, next, &mm) in igt_bottomup()
1934 drm_mm_takedown(&mm); in igt_bottomup()
1946 struct drm_mm mm; in __igt_once() local
1950 drm_mm_init(&mm, 0, 7); in __igt_once()
1955 err = drm_mm_reserve_node(&mm, &rsvd_lo); in __igt_once()
1964 err = drm_mm_reserve_node(&mm, &rsvd_hi); in __igt_once()
1977 err = drm_mm_insert_node_generic(&mm, &node, 2, 0, 0, mode); in __igt_once()
1990 drm_mm_takedown(&mm); in __igt_once()
2036 struct drm_mm mm; in igt_color() local
2048 drm_mm_init(&mm, 0, U64_MAX); in igt_color()
2057 if (!expect_insert(&mm, node, in igt_color()
2066 drm_mm_for_each_node_safe(node, nn, &mm) { in igt_color()
2079 mm.color_adjust = separate_adjacent_colors; in igt_color()
2092 err = drm_mm_reserve_node(&mm, node); in igt_color()
2114 err = drm_mm_reserve_node(&mm, node); in igt_color()
2125 err = drm_mm_reserve_node(&mm, node); in igt_color()
2142 if (!expect_insert(&mm, node, in igt_color()
2152 drm_mm_for_each_node_safe(node, nn, &mm) { in igt_color()
2181 drm_mm_for_each_node_safe(node, nn, &mm) { in igt_color()
2185 drm_mm_takedown(&mm); in igt_color()
2189 static int evict_color(struct drm_mm *mm, in evict_color() argument
2205 drm_mm_scan_init_with_range(&scan, mm, in evict_color()
2215 err = drm_mm_insert_node_generic(mm, &tmp, size, alignment, color, in evict_color()
2221 show_holes(mm, 3); in evict_color()
2234 if (!assert_node(&tmp, mm, size, alignment, color)) { in evict_color()
2246 err = drm_mm_reserve_node(mm, &e->node); in evict_color()
2264 struct drm_mm mm; in igt_color_evict() local
2286 drm_mm_init(&mm, 0, 2*total_size - 1); in igt_color_evict()
2287 mm.color_adjust = separate_adjacent_colors; in igt_color_evict()
2289 if (!expect_insert(&mm, &nodes[n].node, in igt_color_evict()
2300 err = evict_color(&mm, 0, U64_MAX, in igt_color_evict()
2313 err = evict_color(&mm, 0, U64_MAX, in igt_color_evict()
2330 err = evict_color(&mm, 0, U64_MAX, in igt_color_evict()
2347 show_mm(&mm); in igt_color_evict()
2348 drm_mm_for_each_node_safe(node, next, &mm) in igt_color_evict()
2350 drm_mm_takedown(&mm); in igt_color_evict()
2367 struct drm_mm mm; in igt_color_evict_range() local
2387 drm_mm_init(&mm, 0, 2*total_size - 1); in igt_color_evict_range()
2388 mm.color_adjust = separate_adjacent_colors; in igt_color_evict_range()
2390 if (!expect_insert(&mm, &nodes[n].node, in igt_color_evict_range()
2401 err = evict_color(&mm, range_start, range_end, in igt_color_evict_range()
2414 err = evict_color(&mm, range_start, range_end, in igt_color_evict_range()
2431 err = evict_color(&mm, range_start, range_end, in igt_color_evict_range()
2448 show_mm(&mm); in igt_color_evict_range()
2449 drm_mm_for_each_node_safe(node, next, &mm) in igt_color_evict_range()
2451 drm_mm_takedown(&mm); in igt_color_evict_range()