Searched refs:mm_node (Results 1 – 15 of 15) sorted by relevance
45 struct drm_mm_node mm_node; member111 &item->mm_node, in sis_drm_alloc()114 offset = item->mm_node.start; in sis_drm_alloc()124 &item->mm_node, in sis_drm_alloc()127 offset = item->mm_node.start; in sis_drm_alloc()150 drm_mm_remove_node(&item->mm_node); in sis_drm_alloc()180 if (drm_mm_node_allocated(&obj->mm_node)) in sis_drm_free()181 drm_mm_remove_node(&obj->mm_node); in sis_drm_free()338 if (drm_mm_node_allocated(&entry->mm_node)) in sis_reclaim_buffers_locked()339 drm_mm_remove_node(&entry->mm_node); in sis_reclaim_buffers_locked()
71 nvkm_mem_node_cleanup(mem->mm_node); in nouveau_vram_manager_del()72 ram->func->put(ram, (struct nvkm_mem **)&mem->mm_node); in nouveau_vram_manager_del()98 mem->mm_node = NULL; in nouveau_vram_manager_new()104 mem->mm_node = node; in nouveau_vram_manager_new()132 nvkm_mem_node_cleanup(mem->mm_node); in nouveau_gart_manager_del()133 kfree(mem->mm_node); in nouveau_gart_manager_del()134 mem->mm_node = NULL; in nouveau_gart_manager_del()176 mem->mm_node = node; in nouveau_gart_manager_new()220 struct nvkm_mem *node = mem->mm_node; in nv04_gart_manager_del()223 kfree(mem->mm_node); in nv04_gart_manager_del()[all …]
323 struct nvkm_mem *mem = bo->mem.mm_node; in nouveau_bo_pin()659 struct nvkm_mem *node = old_mem->mm_node; in nve0_bo_move_copy()691 struct nvkm_mem *node = old_mem->mm_node; in nvc0_bo_move_copy()729 struct nvkm_mem *node = old_mem->mm_node; in nvc0_bo_move_m2mf()768 struct nvkm_mem *node = old_mem->mm_node; in nva3_bo_move_copy()806 struct nvkm_mem *node = old_mem->mm_node; in nv98_bo_move_exec()824 struct nvkm_mem *node = old_mem->mm_node; in nv84_bo_move_exec()858 struct nvkm_mem *node = old_mem->mm_node; in nv50_bo_move_m2mf()863 int dst_tiled = !!((struct nvkm_mem *)new_mem->mm_node)->memtype; in nv50_bo_move_m2mf()1000 struct nvkm_mem *old_node = bo->mem.mm_node; in nouveau_bo_move_prep()[all …]
30 struct nvkm_mem *node = mem->mm_node; in nv04_sgdma_bind()64 struct nvkm_mem *node = mem->mm_node; in nv50_sgdma_bind()
36 struct drm_mm_node mm_node; member142 &item->mm_node, in via_mem_alloc()146 &item->mm_node, in via_mem_alloc()161 ((item->mm_node.start) << VIA_MM_ALIGN_SHIFT); in via_mem_alloc()167 drm_mm_remove_node(&item->mm_node); in via_mem_alloc()195 drm_mm_remove_node(&obj->mm_node); in via_mem_free()229 drm_mm_remove_node(&entry->mm_node); in via_reclaim_buffers_locked()
86 mem->mm_node = node; in ttm_bo_man_get_node()98 if (mem->mm_node) { in ttm_bo_man_put_node()100 drm_mm_remove_node(mem->mm_node); in ttm_bo_man_put_node()103 kfree(mem->mm_node); in ttm_bo_man_put_node()104 mem->mm_node = NULL; in ttm_bo_man_put_node()
146 BUG_ON(bo->mem.mm_node != NULL); in ttm_bo_release_list()347 mem->mm_node = NULL; in ttm_bo_handle_move_mem()387 if (bo->mem.mm_node) { in ttm_bo_handle_move_mem()689 evict_mem.mm_node = NULL; in ttm_bo_evict()783 if (mem->mm_node) in ttm_bo_mem_put()835 if (mem->mm_node) in ttm_bo_mem_force_space()917 mem->mm_node = NULL; in ttm_bo_mem_space()951 if (mem->mm_node) { in ttm_bo_mem_space()961 if ((type_ok && (mem_type == TTM_PL_SYSTEM)) || mem->mm_node) { in ttm_bo_mem_space()992 mem->mm_node = NULL; in ttm_bo_mem_space()[all …]
82 new_mem->mm_node = NULL; in ttm_bo_move_ttm()414 new_mem->mm_node = NULL; in ttm_bo_move_memcpy()703 new_mem->mm_node = NULL; in ttm_bo_move_accel_cleanup()795 new_mem->mm_node = NULL; in ttm_bo_pipeline_move()
53 struct drm_mm_node *node = bo_mem->mm_node; in ttm_agp_bind()
99 struct drm_mm_node *node = mem->mm_node; in amdgpu_gtt_mgr_alloc()171 mem->mm_node = node; in amdgpu_gtt_mgr_new()177 mem->mm_node = NULL; in amdgpu_gtt_mgr_new()200 struct drm_mm_node *node = mem->mm_node; in amdgpu_gtt_mgr_del()212 mem->mm_node = NULL; in amdgpu_gtt_mgr_del()
258 BUG_ON(old_mem->mm_node != NULL); in amdgpu_move_null()260 new_mem->mm_node = NULL; in amdgpu_move_null()337 tmp_mem.mm_node = NULL; in amdgpu_move_vram_ram()384 tmp_mem.mm_node = NULL; in amdgpu_move_ram_vram()
57 mem->mm_node = NULL; in vmw_gmrid_man_get_node()84 mem->mm_node = gman; in vmw_gmrid_man_get_node()107 if (mem->mm_node) { in vmw_gmrid_man_put_node()112 mem->mm_node = NULL; in vmw_gmrid_man_put_node()
173 mem->mm_node = (void *)1; in ttm_bo_man_get_node()180 mem->mm_node = (void *)NULL; in ttm_bo_man_put_node()368 BUG_ON(old_mem->mm_node != NULL); in virtio_gpu_move_null()370 new_mem->mm_node = NULL; in virtio_gpu_move_null()
343 BUG_ON(old_mem->mm_node != NULL); in qxl_move_null()345 new_mem->mm_node = NULL; in qxl_move_null()
249 BUG_ON(old_mem->mm_node != NULL); in radeon_move_null()251 new_mem->mm_node = NULL; in radeon_move_null()323 tmp_mem.mm_node = NULL; in radeon_move_vram_ram()370 tmp_mem.mm_node = NULL; in radeon_move_ram_vram()