Home
last modified time | relevance | path

Searched refs:robj (Results 1 – 25 of 39) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/
Dradeon_gem.c40 struct radeon_bo *robj = gem_to_radeon_bo(gobj); in radeon_gem_object_free() local
42 if (robj) { in radeon_gem_object_free()
43 radeon_mn_unregister(robj); in radeon_gem_object_free()
44 radeon_bo_unref(&robj); in radeon_gem_object_free()
53 struct radeon_bo *robj; in radeon_gem_object_create() local
75 flags, NULL, NULL, &robj); in radeon_gem_object_create()
87 *obj = &robj->tbo.base; in radeon_gem_object_create()
88 robj->pid = task_pid_nr(current); in radeon_gem_object_create()
91 list_add_tail(&robj->list, &rdev->gem.objects); in radeon_gem_object_create()
100 struct radeon_bo *robj; in radeon_gem_set_domain() local
[all …]
Dradeon_gart.c132 if (rdev->gart.robj == NULL) { in radeon_gart_table_vram_alloc()
135 0, NULL, NULL, &rdev->gart.robj); in radeon_gart_table_vram_alloc()
158 r = radeon_bo_reserve(rdev->gart.robj, false); in radeon_gart_table_vram_pin()
161 r = radeon_bo_pin(rdev->gart.robj, in radeon_gart_table_vram_pin()
164 radeon_bo_unreserve(rdev->gart.robj); in radeon_gart_table_vram_pin()
167 r = radeon_bo_kmap(rdev->gart.robj, &rdev->gart.ptr); in radeon_gart_table_vram_pin()
169 radeon_bo_unpin(rdev->gart.robj); in radeon_gart_table_vram_pin()
170 radeon_bo_unreserve(rdev->gart.robj); in radeon_gart_table_vram_pin()
200 if (rdev->gart.robj == NULL) { in radeon_gart_table_vram_unpin()
203 r = radeon_bo_reserve(rdev->gart.robj, false); in radeon_gart_table_vram_unpin()
[all …]
Dr100_track.h16 struct radeon_bo *robj; member
23 struct radeon_bo *robj; member
28 struct radeon_bo *robj; member
39 struct radeon_bo *robj; member
Devergreen_cs.c1222 track->db_z_read_bo = reloc->robj; in evergreen_cs_handle_reg()
1234 track->db_z_write_bo = reloc->robj; in evergreen_cs_handle_reg()
1246 track->db_s_read_bo = reloc->robj; in evergreen_cs_handle_reg()
1258 track->db_s_write_bo = reloc->robj; in evergreen_cs_handle_reg()
1282 track->vgt_strmout_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg()
1506 track->cb_color_fmask_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg()
1523 track->cb_color_cmask_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg()
1564 track->cb_color_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg()
1580 track->cb_color_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg()
1592 track->htile_bo = reloc->robj; in evergreen_cs_handle_reg()
[all …]
Dradeon_cursor.c287 struct radeon_bo *robj; in radeon_crtc_cursor_set2() local
309 robj = gem_to_radeon_bo(obj); in radeon_crtc_cursor_set2()
310 ret = radeon_bo_reserve(robj, false); in radeon_crtc_cursor_set2()
316 ret = radeon_bo_pin_restricted(robj, RADEON_GEM_DOMAIN_VRAM, in radeon_crtc_cursor_set2()
319 radeon_bo_unreserve(robj); in radeon_crtc_cursor_set2()
351 struct radeon_bo *robj = gem_to_radeon_bo(radeon_crtc->cursor_bo); in radeon_crtc_cursor_set2() local
352 ret = radeon_bo_reserve(robj, false); in radeon_crtc_cursor_set2()
354 radeon_bo_unpin(robj); in radeon_crtc_cursor_set2()
355 radeon_bo_unreserve(robj); in radeon_crtc_cursor_set2()
Dr600_cs.c1084 track->vgt_strmout_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1211 track->cb_color_frag_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1242 track->cb_color_tile_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1282 track->cb_color_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1295 track->db_bo = reloc->robj; in r600_cs_check_reg()
1308 track->htile_bo = reloc->robj; in r600_cs_check_reg()
1806 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
1808 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check()
1836 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check()
1838 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check()
[all …]
Dr200.c188 track->zb.robj = reloc->robj; in r200_packet0_check()
201 track->cb[0].robj = reloc->robj; in r200_packet0_check()
231 track->textures[i].robj = reloc->robj; in r200_packet0_check()
275 track->textures[i].cube_info[face - 1].robj = reloc->robj; in r200_packet0_check()
Dr100.c1334 track->arrays[i + 0].robj = reloc->robj; in r100_packet3_load_vbpntr()
1344 track->arrays[i + 1].robj = reloc->robj; in r100_packet3_load_vbpntr()
1358 track->arrays[i + 0].robj = reloc->robj; in r100_packet3_load_vbpntr()
1595 track->zb.robj = reloc->robj; in r100_packet0_check()
1608 track->cb[0].robj = reloc->robj; in r100_packet0_check()
1635 track->textures[i].robj = reloc->robj; in r100_packet0_check()
1653 track->textures[0].cube_info[i].robj = reloc->robj; in r100_packet0_check()
1671 track->textures[1].cube_info[i].robj = reloc->robj; in r100_packet0_check()
1689 track->textures[2].cube_info[i].robj = reloc->robj; in r100_packet0_check()
1894 struct radeon_bo *robj) in r100_cs_track_check_pkt3_indx_buffer() argument
[all …]
Dradeon_device.c1593 struct radeon_bo *robj; in radeon_suspend_kms() local
1596 struct radeon_bo *robj = gem_to_radeon_bo(radeon_crtc->cursor_bo); in radeon_suspend_kms() local
1597 r = radeon_bo_reserve(robj, false); in radeon_suspend_kms()
1599 radeon_bo_unpin(robj); in radeon_suspend_kms()
1600 radeon_bo_unreserve(robj); in radeon_suspend_kms()
1607 robj = gem_to_radeon_bo(fb->obj[0]); in radeon_suspend_kms()
1609 if (!radeon_fbdev_robj_is_fb(rdev, robj)) { in radeon_suspend_kms()
1610 r = radeon_bo_reserve(robj, false); in radeon_suspend_kms()
1612 radeon_bo_unpin(robj); in radeon_suspend_kms()
1613 radeon_bo_unreserve(robj); in radeon_suspend_kms()
[all …]
Dradeon_cs.c116 p->relocs[i].robj = gem_to_radeon_bo(gobj); in radeon_cs_parser_relocs()
163 if (radeon_ttm_tt_has_userptr(p->rdev, p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs()
177 if (p->relocs[i].robj->prime_shared_count) { in radeon_cs_parser_relocs()
186 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs()
260 resv = reloc->robj->tbo.base.resv; in radeon_cs_sync_rings()
404 return (int)la->robj->tbo.num_pages - (int)lb->robj->tbo.num_pages; in cmp_size_smaller_first()
442 struct radeon_bo *bo = parser->relocs[i].robj; in radeon_cs_parser_fini()
525 bo = p->relocs[i].robj; in radeon_bo_vm_update_pte()
Dr300.c135 if (rdev->gart.robj) { in rv370_pcie_gart_init()
159 if (rdev->gart.robj == NULL) { in rv370_pcie_gart_enable()
676 track->cb[i].robj = reloc->robj; in r300_packet0_check()
689 track->zb.robj = reloc->robj; in r300_packet0_check()
734 track->textures[i].robj = reloc->robj; in r300_packet0_check()
1133 track->aa.robj = reloc->robj; in r300_packet0_check()
1202 r = r100_cs_track_check_pkt3_indx_buffer(p, pkt, reloc->robj); in r300_packet3_check()
Dradeon_fb.c393 bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj) in radeon_fbdev_robj_is_fb() argument
398 if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->fb.obj[0])) in radeon_fbdev_robj_is_fb()
Dr600.c1117 if (rdev->gart.robj) { in r600_pcie_gart_init()
1134 if (rdev->gart.robj == NULL) { in r600_pcie_gart_enable()
1512 if (rdev->vram_scratch.robj == NULL) { in r600_vram_scratch_init()
1515 0, NULL, NULL, &rdev->vram_scratch.robj); in r600_vram_scratch_init()
1521 r = radeon_bo_reserve(rdev->vram_scratch.robj, false); in r600_vram_scratch_init()
1524 r = radeon_bo_pin(rdev->vram_scratch.robj, in r600_vram_scratch_init()
1527 radeon_bo_unreserve(rdev->vram_scratch.robj); in r600_vram_scratch_init()
1530 r = radeon_bo_kmap(rdev->vram_scratch.robj, in r600_vram_scratch_init()
1533 radeon_bo_unpin(rdev->vram_scratch.robj); in r600_vram_scratch_init()
1534 radeon_bo_unreserve(rdev->vram_scratch.robj); in r600_vram_scratch_init()
[all …]
Dradeon_object.c551 struct radeon_bo *bo = lobj->robj; in radeon_bo_list_validate()
598 lobj->gpu_offset = radeon_bo_gpu_offset(lobj->robj); in radeon_bo_list_validate()
599 lobj->tiling_flags = lobj->robj->tiling_flags; in radeon_bo_list_validate()
Dradeon_vm.c141 list[0].robj = vm->page_directory; in radeon_vm_get_bos()
153 list[idx].robj = vm->page_tables[i].bo; in radeon_vm_get_bos()
156 list[idx].tv.bo = &list[idx].robj->tbo; in radeon_vm_get_bos()
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
Damdgpu_gem.c43 struct amdgpu_bo *robj = gem_to_amdgpu_bo(gobj); in amdgpu_gem_object_free() local
45 if (robj) { in amdgpu_gem_object_free()
46 amdgpu_mn_unregister(robj); in amdgpu_gem_object_free()
47 amdgpu_bo_unref(&robj); in amdgpu_gem_object_free()
385 struct amdgpu_bo *robj; in amdgpu_mode_dumb_mmap() local
391 robj = gem_to_amdgpu_bo(gobj); in amdgpu_mode_dumb_mmap()
392 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm) || in amdgpu_mode_dumb_mmap()
393 (robj->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS)) { in amdgpu_mode_dumb_mmap()
397 *offset_p = amdgpu_bo_mmap_offset(robj); in amdgpu_mode_dumb_mmap()
444 struct amdgpu_bo *robj; in amdgpu_gem_wait_idle_ioctl() local
[all …]
Damdgpu_fb.c374 struct amdgpu_bo *robj; in amdgpu_fbdev_total_size() local
380 robj = gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.base.obj[0]); in amdgpu_fbdev_total_size()
381 size += amdgpu_bo_size(robj); in amdgpu_fbdev_total_size()
385 bool amdgpu_fbdev_robj_is_fb(struct amdgpu_device *adev, struct amdgpu_bo *robj) in amdgpu_fbdev_robj_is_fb() argument
389 if (robj == gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.base.obj[0])) in amdgpu_fbdev_robj_is_fb()
Damdgpu_device.c841 &adev->vram_scratch.robj, in amdgpu_device_vram_scratch_init()
855 amdgpu_bo_free_kernel(&adev->vram_scratch.robj, NULL, NULL); in amdgpu_device_vram_scratch_fini()
3673 struct amdgpu_bo *robj; in amdgpu_device_suspend() local
3687 robj = gem_to_amdgpu_bo(fb->obj[0]); in amdgpu_device_suspend()
3689 if (!amdgpu_fbdev_robj_is_fb(adev, robj)) { in amdgpu_device_suspend()
3690 r = amdgpu_bo_reserve(robj, true); in amdgpu_device_suspend()
3692 amdgpu_bo_unpin(robj); in amdgpu_device_suspend()
3693 amdgpu_bo_unreserve(robj); in amdgpu_device_suspend()
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/
Dnouveau_prime.c65 struct dma_resv *robj = attach->dmabuf->resv; in nouveau_gem_prime_import_sg_table() local
70 dma_resv_lock(robj, NULL); in nouveau_gem_prime_import_sg_table()
90 sg, robj); in nouveau_gem_prime_import_sg_table()
99 dma_resv_unlock(robj); in nouveau_gem_prime_import_sg_table()
Dnouveau_bo.h82 struct sg_table *sg, struct dma_resv *robj);
85 struct dma_resv *robj,
Dnouveau_bo.c302 struct sg_table *sg, struct dma_resv *robj) in nouveau_bo_init() argument
316 acc_size, sg, robj, nouveau_bo_del_ttm); in nouveau_bo_init()
328 struct sg_table *sg, struct dma_resv *robj, in nouveau_bo_new() argument
339 ret = nouveau_bo_init(nvbo, size, align, domain, sg, robj); in nouveau_bo_new()
/kernel/linux/linux-5.10/drivers/gpu/drm/etnaviv/
Detnaviv_gem_submit.c180 struct dma_resv *robj = bo->obj->base.resv; in submit_fence_sync() local
183 ret = dma_resv_reserve_shared(robj, 1); in submit_fence_sync()
192 ret = dma_resv_get_fences_rcu(robj, &bo->excl, in submit_fence_sync()
198 bo->excl = dma_resv_get_excl_rcu(robj); in submit_fence_sync()
Detnaviv_gem.c454 struct dma_resv *robj = obj->resv; in etnaviv_gem_describe() local
465 fobj = rcu_dereference(robj->fence); in etnaviv_gem_describe()
475 fence = rcu_dereference(robj->fence_excl); in etnaviv_gem_describe()
/kernel/linux/linux-5.10/drivers/dma-buf/
Ddma-buf.c1295 struct dma_resv *robj; in dma_buf_debug_show() local
1330 robj = buf_obj->resv; in dma_buf_debug_show()
1332 seq = read_seqcount_begin(&robj->seq); in dma_buf_debug_show()
1334 fobj = rcu_dereference(robj->fence); in dma_buf_debug_show()
1336 fence = rcu_dereference(robj->fence_excl); in dma_buf_debug_show()
1337 if (!read_seqcount_retry(&robj->seq, seq)) in dma_buf_debug_show()
/kernel/linux/linux-5.10/drivers/gpu/drm/msm/
Dmsm_gem.c811 struct dma_resv *robj = obj->resv; in msm_gem_describe() local
872 fobj = rcu_dereference(robj->fence); in msm_gem_describe()
882 fence = rcu_dereference(robj->fence_excl); in msm_gem_describe()

12