Home
last modified time | relevance | path

Searched refs:drm_mm_node (Results 1 – 25 of 51) sorted by relevance

123

/drivers/gpu/drm/
Ddrm_mm.c107 static noinline void save_stack(struct drm_mm_node *node) in save_stack()
120 struct drm_mm_node *node; in show_leaks()
145 static void save_stack(struct drm_mm_node *node) { } in save_stack()
152 INTERVAL_TREE_DEFINE(struct drm_mm_node, rb, in INTERVAL_TREE_DEFINE() argument
156 struct drm_mm_node * in INTERVAL_TREE_DEFINE()
160 start, last) ?: (struct drm_mm_node *)&mm->head_node; in INTERVAL_TREE_DEFINE()
164 static void drm_mm_interval_tree_add_node(struct drm_mm_node *hole_node, in drm_mm_interval_tree_add_node()
165 struct drm_mm_node *node) in drm_mm_interval_tree_add_node()
169 struct drm_mm_node *parent; in drm_mm_interval_tree_add_node()
177 parent = rb_entry(rb, struct drm_mm_node, rb); in drm_mm_interval_tree_add_node()
[all …]
Ddrm_vma_manager.c144 struct drm_mm_node *node, *best; in drm_vma_offset_lookup_locked()
152 node = rb_entry(iter, struct drm_mm_node, rb); in drm_vma_offset_lookup_locked()
/drivers/gpu/drm/i915/gem/
Di915_gem_stolen.h12 struct drm_mm_node;
16 struct drm_mm_node *node, u64 size,
19 struct drm_mm_node *node, u64 size,
23 struct drm_mm_node *node);
Di915_gem_stolen.c41 struct drm_mm_node *node, u64 size, in i915_gem_stolen_insert_node_in_range()
63 struct drm_mm_node *node, u64 size, in i915_gem_stolen_insert_node()
73 struct drm_mm_node *node) in i915_gem_stolen_remove_node()
616 struct drm_mm_node *stolen = fetch_and_zero(&obj->stolen); in i915_gem_object_release_stolen()
634 struct drm_mm_node *stolen) in __i915_gem_object_create_stolen()
676 struct drm_mm_node *stolen; in _i915_gem_object_stolen_init()
Di915_gem_object_types.h658 struct drm_mm_node *stolen;
/drivers/gpu/drm/tests/
Ddrm_mm_test.c48 struct drm_mm_node *hole; in assert_no_holes()
73 struct drm_mm_node *hole; in assert_one_hole()
102 struct drm_mm_node *node, *check, *found; in assert_continuous()
151 static u64 misalignment(struct drm_mm_node *node, u64 alignment) in misalignment()
162 static bool assert_node(struct kunit *test, struct drm_mm_node *node, struct drm_mm *mm, in assert_node()
198 struct drm_mm_node tmp; in drm_test_mm_init()
251 struct drm_mm_node nodes[2]; in drm_test_mm_debug()
273 static struct drm_mm_node *set_node(struct drm_mm_node *node, in set_node()
281 static bool expect_reserve_fail(struct kunit *test, struct drm_mm *mm, struct drm_mm_node *node) in expect_reserve_fail()
329 struct drm_mm_node tmp = {}; in check_reserve_boundaries()
[all …]
/drivers/gpu/drm/i915/
Di915_gem_evict.h11 struct drm_mm_node;
24 struct drm_mm_node *node,
Di915_gem_gtt.h28 struct drm_mm_node *node,
34 struct drm_mm_node *node,
Di915_vgpu.c143 struct drm_mm_node space[4];
149 struct drm_mm_node *node) in vgt_deballoon_space()
187 struct drm_mm_node *node, in vgt_balloon_space()
Di915_gem_evict.c149 struct drm_mm_node *node; in i915_gem_evict_something()
313 struct drm_mm_node *target, in i915_gem_evict_for_node()
317 struct drm_mm_node *node; in i915_gem_evict_for_node()
Di915_scatterlist.h17 struct drm_mm_node;
223 struct i915_refct_sgt *i915_rsgt_from_mm_node(const struct drm_mm_node *node,
Di915_gem_gtt.c99 struct drm_mm_node *node, in i915_gem_gtt_reserve()
192 struct drm_mm_node *node, in i915_gem_gtt_insert()
Di915_gem.c64 insert_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node, u32 size) in insert_mappable_node()
84 remove_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node) in remove_mappable_node()
304 struct drm_mm_node *node, in i915_gem_gtt_prepare()
365 struct drm_mm_node *node, in i915_gem_gtt_cleanup()
387 struct drm_mm_node node; in i915_gem_gtt_pread()
544 struct drm_mm_node node; in i915_gem_gtt_pwrite_fast()
Di915_vma_types.h186 struct drm_mm_node node;
/drivers/gpu/drm/panfrost/
Dpanfrost_gem.h47 struct drm_mm_node mmnode;
59 drm_mm_node_to_panfrost_mapping(struct drm_mm_node *node) in drm_mm_node_to_panfrost_mapping()
/drivers/gpu/drm/etnaviv/
Detnaviv_mmu.c152 struct drm_mm_node *node, size_t size) in etnaviv_iommu_find_iova()
234 struct drm_mm_node *node, size_t size, u64 va) in etnaviv_iommu_insert_exact()
237 struct drm_mm_node *scan_node; in etnaviv_iommu_insert_exact()
283 struct drm_mm_node *node; in etnaviv_iommu_map_gem()
433 struct drm_mm_node *node = &mapping->vram_node; in etnaviv_iommu_get_suballoc_va()
465 struct drm_mm_node *node = &mapping->vram_node; in etnaviv_iommu_put_suballoc_va()
Detnaviv_gem.h29 struct drm_mm_node vram_node;
/drivers/gpu/drm/i915/selftests/
Di915_gem_evict.c193 struct drm_mm_node target = { in igt_evict_for_vma()
233 static void mock_color_adjust(const struct drm_mm_node *node, in mock_color_adjust()
245 struct drm_mm_node target = { in igt_evict_for_cache_color()
383 struct drm_mm_node node; in igt_evict_contexts()
387 struct drm_mm_node hole; in igt_evict_contexts()
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_res_cursor.h60 struct drm_mm_node *node; in amdgpu_res_first()
128 struct drm_mm_node *node; in amdgpu_res_next()
/drivers/gpu/drm/ttm/
Dttm_range_manager.c121 struct drm_mm_node *node = &to_ttm_range_mgr_node(res)->mm_nodes[0]; in ttm_range_man_intersects()
137 struct drm_mm_node *node = &to_ttm_range_mgr_node(res)->mm_nodes[0]; in ttm_range_man_compatible()
/drivers/gpu/drm/rockchip/
Drockchip_drm_gem.h22 struct drm_mm_node mm;
/drivers/gpu/drm/armada/
Darmada_gem.h17 struct drm_mm_node *linear; /* for linear backed */
/drivers/gpu/drm/msm/
Dmsm_gem.h61 struct drm_mm_node node;
116 struct drm_mm_node *vram_node;
/drivers/gpu/drm/tegra/
Dgem.h43 struct drm_mm_node *mm;
/drivers/gpu/drm/vc4/
Dvc4_drv.h337 struct drm_mm_node mitchell_netravali_filter;
398 struct drm_mm_node lbm;
551 struct drm_mm_node mm;

123