/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_gem.c | 35 struct amdgpu_bo *robj = gem_to_amdgpu_bo(gobj); in amdgpu_gem_object_free() local 37 if (robj) { in amdgpu_gem_object_free() 38 if (robj->gem_base.import_attach) in amdgpu_gem_object_free() 39 drm_prime_gem_destroy(&robj->gem_base, robj->tbo.sg); in amdgpu_gem_object_free() 40 amdgpu_mn_unregister(robj); in amdgpu_gem_object_free() 41 amdgpu_bo_unref(&robj); in amdgpu_gem_object_free() 50 struct amdgpu_bo *robj; in amdgpu_gem_object_create() local 73 flags, NULL, NULL, &robj); in amdgpu_gem_object_create() 85 *obj = &robj->gem_base; in amdgpu_gem_object_create() 86 robj->pid = task_pid_nr(current); in amdgpu_gem_object_create() [all …]
|
D | amdgpu_gart.c | 126 if (adev->gart.robj == NULL) { in amdgpu_gart_table_vram_alloc() 130 NULL, NULL, &adev->gart.robj); in amdgpu_gart_table_vram_alloc() 153 r = amdgpu_bo_reserve(adev->gart.robj, false); in amdgpu_gart_table_vram_pin() 156 r = amdgpu_bo_pin(adev->gart.robj, in amdgpu_gart_table_vram_pin() 159 amdgpu_bo_unreserve(adev->gart.robj); in amdgpu_gart_table_vram_pin() 162 r = amdgpu_bo_kmap(adev->gart.robj, &adev->gart.ptr); in amdgpu_gart_table_vram_pin() 164 amdgpu_bo_unpin(adev->gart.robj); in amdgpu_gart_table_vram_pin() 165 amdgpu_bo_unreserve(adev->gart.robj); in amdgpu_gart_table_vram_pin() 182 if (adev->gart.robj == NULL) { in amdgpu_gart_table_vram_unpin() 185 r = amdgpu_bo_reserve(adev->gart.robj, false); in amdgpu_gart_table_vram_unpin() [all …]
|
D | amdgpu_bo_list.c | 107 entry->robj = amdgpu_bo_ref(gem_to_amdgpu_bo(gobj)); in amdgpu_bo_list_set() 110 entry->prefered_domains = entry->robj->initial_domain; in amdgpu_bo_list_set() 114 if (amdgpu_ttm_tt_has_userptr(entry->robj->tbo.ttm)) { in amdgpu_bo_list_set() 119 entry->tv.bo = &entry->robj->tbo; in amdgpu_bo_list_set() 120 entry->tv.shared = !entry->robj->prime_shared_count; in amdgpu_bo_list_set() 123 gds_obj = entry->robj; in amdgpu_bo_list_set() 125 gws_obj = entry->robj; in amdgpu_bo_list_set() 127 oa_obj = entry->robj; in amdgpu_bo_list_set() 129 trace_amdgpu_bo_list_set(list, entry->robj); in amdgpu_bo_list_set() 133 amdgpu_bo_unref(&list->array[i].robj); in amdgpu_bo_list_set() [all …]
|
D | amdgpu_device.c | 246 if (adev->vram_scratch.robj == NULL) { in amdgpu_vram_scratch_init() 250 NULL, NULL, &adev->vram_scratch.robj); in amdgpu_vram_scratch_init() 256 r = amdgpu_bo_reserve(adev->vram_scratch.robj, false); in amdgpu_vram_scratch_init() 259 r = amdgpu_bo_pin(adev->vram_scratch.robj, in amdgpu_vram_scratch_init() 262 amdgpu_bo_unreserve(adev->vram_scratch.robj); in amdgpu_vram_scratch_init() 265 r = amdgpu_bo_kmap(adev->vram_scratch.robj, in amdgpu_vram_scratch_init() 268 amdgpu_bo_unpin(adev->vram_scratch.robj); in amdgpu_vram_scratch_init() 269 amdgpu_bo_unreserve(adev->vram_scratch.robj); in amdgpu_vram_scratch_init() 278 if (adev->vram_scratch.robj == NULL) { in amdgpu_vram_scratch_fini() 281 r = amdgpu_bo_reserve(adev->vram_scratch.robj, false); in amdgpu_vram_scratch_fini() [all …]
|
D | amdgpu_fb.c | 387 struct amdgpu_bo *robj; in amdgpu_fbdev_total_size() local 393 robj = gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.obj); in amdgpu_fbdev_total_size() 394 size += amdgpu_bo_size(robj); in amdgpu_fbdev_total_size() 398 bool amdgpu_fbdev_robj_is_fb(struct amdgpu_device *adev, struct amdgpu_bo *robj) in amdgpu_fbdev_robj_is_fb() argument 402 if (robj == gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.obj)) in amdgpu_fbdev_robj_is_fb()
|
D | amdgpu_cs.c | 157 p->uf_entry.robj = amdgpu_bo_ref(p->uf.bo); in amdgpu_cs_user_fence_chunk() 161 p->uf_entry.tv.bo = &p->uf_entry.robj->tbo; in amdgpu_cs_user_fence_chunk() 357 bo = lobj->robj; in amdgpu_cs_list_validate() 453 struct reservation_object *resv = e->robj->tbo.resv; in amdgpu_cs_sync_rings() 469 return (int)la->robj->tbo.num_pages - (int)lb->robj->tbo.num_pages; in cmp_size_smaller_first() 520 amdgpu_bo_unref(&parser->uf_entry.robj); in amdgpu_cs_parser_fini() 548 bo = p->bo_list->array[i].robj; in amdgpu_bo_vm_update_pte() 574 bo = p->bo_list->array[i].robj; in amdgpu_bo_vm_update_pte()
|
D | amdgpu_vm.c | 100 list[0].robj = vm->page_directory; in amdgpu_vm_get_bos() 112 list[idx].robj = vm->page_tables[i].bo; in amdgpu_vm_get_bos() 116 list[idx].tv.bo = &list[idx].robj->tbo; in amdgpu_vm_get_bos()
|
/drivers/gpu/drm/radeon/ |
D | radeon_gem.c | 34 struct radeon_bo *robj = gem_to_radeon_bo(gobj); in radeon_gem_object_free() local 36 if (robj) { in radeon_gem_object_free() 37 if (robj->gem_base.import_attach) in radeon_gem_object_free() 38 drm_prime_gem_destroy(&robj->gem_base, robj->tbo.sg); in radeon_gem_object_free() 39 radeon_mn_unregister(robj); in radeon_gem_object_free() 40 radeon_bo_unref(&robj); in radeon_gem_object_free() 49 struct radeon_bo *robj; in radeon_gem_object_create() local 71 flags, NULL, NULL, &robj); in radeon_gem_object_create() 83 *obj = &robj->gem_base; in radeon_gem_object_create() 84 robj->pid = task_pid_nr(current); in radeon_gem_object_create() [all …]
|
D | radeon_gart.c | 128 if (rdev->gart.robj == NULL) { in radeon_gart_table_vram_alloc() 131 0, NULL, NULL, &rdev->gart.robj); in radeon_gart_table_vram_alloc() 154 r = radeon_bo_reserve(rdev->gart.robj, false); in radeon_gart_table_vram_pin() 157 r = radeon_bo_pin(rdev->gart.robj, in radeon_gart_table_vram_pin() 160 radeon_bo_unreserve(rdev->gart.robj); in radeon_gart_table_vram_pin() 163 r = radeon_bo_kmap(rdev->gart.robj, &rdev->gart.ptr); in radeon_gart_table_vram_pin() 165 radeon_bo_unpin(rdev->gart.robj); in radeon_gart_table_vram_pin() 166 radeon_bo_unreserve(rdev->gart.robj); in radeon_gart_table_vram_pin() 196 if (rdev->gart.robj == NULL) { in radeon_gart_table_vram_unpin() 199 r = radeon_bo_reserve(rdev->gart.robj, false); in radeon_gart_table_vram_unpin() [all …]
|
D | evergreen_cs.c | 1222 track->db_z_read_bo = reloc->robj; in evergreen_cs_handle_reg() 1234 track->db_z_write_bo = reloc->robj; in evergreen_cs_handle_reg() 1246 track->db_s_read_bo = reloc->robj; in evergreen_cs_handle_reg() 1258 track->db_s_write_bo = reloc->robj; in evergreen_cs_handle_reg() 1282 track->vgt_strmout_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1506 track->cb_color_fmask_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1523 track->cb_color_cmask_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1564 track->cb_color_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1580 track->cb_color_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1592 track->htile_bo = reloc->robj; in evergreen_cs_handle_reg() [all …]
|
D | radeon_cursor.c | 285 struct radeon_bo *robj; in radeon_crtc_cursor_set2() local 307 robj = gem_to_radeon_bo(obj); in radeon_crtc_cursor_set2() 308 ret = radeon_bo_reserve(robj, false); in radeon_crtc_cursor_set2() 314 ret = radeon_bo_pin_restricted(robj, RADEON_GEM_DOMAIN_VRAM, in radeon_crtc_cursor_set2() 317 radeon_bo_unreserve(robj); in radeon_crtc_cursor_set2() 349 struct radeon_bo *robj = gem_to_radeon_bo(radeon_crtc->cursor_bo); in radeon_crtc_cursor_set2() local 350 ret = radeon_bo_reserve(robj, false); in radeon_crtc_cursor_set2() 352 radeon_bo_unpin(robj); in radeon_crtc_cursor_set2() 353 radeon_bo_unreserve(robj); in radeon_crtc_cursor_set2()
|
D | r100_track.h | 13 struct radeon_bo *robj; member 20 struct radeon_bo *robj; member 25 struct radeon_bo *robj; member 36 struct radeon_bo *robj; member
|
D | r600_cs.c | 1086 track->vgt_strmout_bo[tmp] = reloc->robj; in r600_cs_check_reg() 1213 track->cb_color_frag_bo[tmp] = reloc->robj; in r600_cs_check_reg() 1244 track->cb_color_tile_bo[tmp] = reloc->robj; in r600_cs_check_reg() 1284 track->cb_color_bo[tmp] = reloc->robj; in r600_cs_check_reg() 1297 track->db_bo = reloc->robj; in r600_cs_check_reg() 1310 track->htile_bo = reloc->robj; in r600_cs_check_reg() 1808 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check() 1810 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check() 1838 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check() 1840 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check() [all …]
|
D | r200.c | 188 track->zb.robj = reloc->robj; in r200_packet0_check() 201 track->cb[0].robj = reloc->robj; in r200_packet0_check() 231 track->textures[i].robj = reloc->robj; in r200_packet0_check() 275 track->textures[i].cube_info[face - 1].robj = reloc->robj; in r200_packet0_check()
|
D | r100.c | 1334 track->arrays[i + 0].robj = reloc->robj; in r100_packet3_load_vbpntr() 1344 track->arrays[i + 1].robj = reloc->robj; in r100_packet3_load_vbpntr() 1358 track->arrays[i + 0].robj = reloc->robj; in r100_packet3_load_vbpntr() 1595 track->zb.robj = reloc->robj; in r100_packet0_check() 1608 track->cb[0].robj = reloc->robj; in r100_packet0_check() 1635 track->textures[i].robj = reloc->robj; in r100_packet0_check() 1653 track->textures[0].cube_info[i].robj = reloc->robj; in r100_packet0_check() 1671 track->textures[1].cube_info[i].robj = reloc->robj; in r100_packet0_check() 1689 track->textures[2].cube_info[i].robj = reloc->robj; in r100_packet0_check() 1895 struct radeon_bo *robj) in r100_cs_track_check_pkt3_indx_buffer() argument [all …]
|
D | radeon_device.c | 1613 struct radeon_bo *robj; in radeon_suspend_kms() local 1616 struct radeon_bo *robj = gem_to_radeon_bo(radeon_crtc->cursor_bo); in radeon_suspend_kms() local 1617 r = radeon_bo_reserve(robj, false); in radeon_suspend_kms() 1619 radeon_bo_unpin(robj); in radeon_suspend_kms() 1620 radeon_bo_unreserve(robj); in radeon_suspend_kms() 1627 robj = gem_to_radeon_bo(rfb->obj); in radeon_suspend_kms() 1629 if (!radeon_fbdev_robj_is_fb(rdev, robj)) { in radeon_suspend_kms() 1630 r = radeon_bo_reserve(robj, false); in radeon_suspend_kms() 1632 radeon_bo_unpin(robj); in radeon_suspend_kms() 1633 radeon_bo_unreserve(robj); in radeon_suspend_kms() [all …]
|
D | radeon_cs.c | 110 p->relocs[i].robj = gem_to_radeon_bo(gobj); in radeon_cs_parser_relocs() 154 if (radeon_ttm_tt_has_userptr(p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs() 167 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs() 241 resv = reloc->robj->tbo.resv; in radeon_cs_sync_rings() 385 return (int)la->robj->tbo.num_pages - (int)lb->robj->tbo.num_pages; in cmp_size_smaller_first() 423 struct radeon_bo *bo = parser->relocs[i].robj; in radeon_cs_parser_fini() 506 bo = p->relocs[i].robj; in radeon_bo_vm_update_pte()
|
D | r300.c | 129 if (rdev->gart.robj) { in rv370_pcie_gart_init() 153 if (rdev->gart.robj == NULL) { in rv370_pcie_gart_enable() 673 track->cb[i].robj = reloc->robj; in r300_packet0_check() 686 track->zb.robj = reloc->robj; in r300_packet0_check() 731 track->textures[i].robj = reloc->robj; in r300_packet0_check() 1130 track->aa.robj = reloc->robj; in r300_packet0_check() 1199 r = r100_cs_track_check_pkt3_indx_buffer(p, pkt, reloc->robj); in r300_packet3_check()
|
D | radeon_fb.c | 383 bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj) in radeon_fbdev_robj_is_fb() argument 385 if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->rfb.obj)) in radeon_fbdev_robj_is_fb()
|
D | r600.c | 1109 if (rdev->gart.robj) { in r600_pcie_gart_init() 1126 if (rdev->gart.robj == NULL) { in r600_pcie_gart_enable() 1504 if (rdev->vram_scratch.robj == NULL) { in r600_vram_scratch_init() 1507 0, NULL, NULL, &rdev->vram_scratch.robj); in r600_vram_scratch_init() 1513 r = radeon_bo_reserve(rdev->vram_scratch.robj, false); in r600_vram_scratch_init() 1516 r = radeon_bo_pin(rdev->vram_scratch.robj, in r600_vram_scratch_init() 1519 radeon_bo_unreserve(rdev->vram_scratch.robj); in r600_vram_scratch_init() 1522 r = radeon_bo_kmap(rdev->vram_scratch.robj, in r600_vram_scratch_init() 1525 radeon_bo_unpin(rdev->vram_scratch.robj); in r600_vram_scratch_init() 1526 radeon_bo_unreserve(rdev->vram_scratch.robj); in r600_vram_scratch_init() [all …]
|
D | radeon_object.c | 543 struct radeon_bo *bo = lobj->robj; in radeon_bo_list_validate() 590 lobj->gpu_offset = radeon_bo_gpu_offset(lobj->robj); in radeon_bo_list_validate() 591 lobj->tiling_flags = lobj->robj->tiling_flags; in radeon_bo_list_validate()
|
D | radeon_vm.c | 141 list[0].robj = vm->page_directory; in radeon_vm_get_bos() 153 list[idx].robj = vm->page_tables[i].bo; in radeon_vm_get_bos() 156 list[idx].tv.bo = &list[idx].robj->tbo; in radeon_vm_get_bos()
|
D | radeon_uvd.c | 549 end = start + radeon_bo_size(reloc->robj); in radeon_uvd_cs_reloc() 593 r = radeon_uvd_cs_msg(p, reloc->robj, offset, buf_sizes); in radeon_uvd_cs_reloc()
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_prime.c | 64 struct reservation_object *robj = attach->dmabuf->resv; in nouveau_gem_prime_import_sg_table() local 70 ww_mutex_lock(&robj->lock, NULL); in nouveau_gem_prime_import_sg_table() 72 sg, robj, &nvbo); in nouveau_gem_prime_import_sg_table() 73 ww_mutex_unlock(&robj->lock); in nouveau_gem_prime_import_sg_table()
|
D | nouveau_bo.h | 74 struct reservation_object *robj,
|