Home
last modified time | relevance | path

Searched refs:reloc (Results 1 – 17 of 17) sorted by relevance

/drivers/gpu/drm/radeon/
Dr600_cs.c970 struct radeon_bo_list *reloc; in r600_cs_check_reg() local
1015 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r600_cs_check_reg()
1021 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1033 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1042 if (reloc->tiling_flags & RADEON_TILING_MACRO) { in r600_cs_check_reg()
1075 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
1083 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in r600_cs_check_reg()
1084 track->vgt_strmout_bo[tmp] = reloc->robj; in r600_cs_check_reg()
1085 track->vgt_strmout_bo_mc[tmp] = reloc->gpu_offset; in r600_cs_check_reg()
1098 r = radeon_cs_packet_next_reloc(p, &reloc, r600_nomm); in r600_cs_check_reg()
[all …]
Devergreen_cs.c1097 struct radeon_bo_list *reloc; in evergreen_cs_handle_reg() local
1143 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in evergreen_cs_handle_reg()
1149 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg()
1172 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in evergreen_cs_handle_reg()
1180 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_handle_reg()
1181 track->db_z_info |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_handle_reg()
1182 if (reloc->tiling_flags & RADEON_TILING_MACRO) { in evergreen_cs_handle_reg()
1185 evergreen_tiling_fields(reloc->tiling_flags, in evergreen_cs_handle_reg()
1214 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in evergreen_cs_handle_reg()
1221 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg()
[all …]
Dr200.c149 struct radeon_bo_list *reloc; in r200_packet0_check() local
181 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r200_packet0_check()
188 track->zb.robj = reloc->robj; in r200_packet0_check()
191 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
194 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r200_packet0_check()
201 track->cb[0].robj = reloc->robj; in r200_packet0_check()
204 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
213 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r200_packet0_check()
221 if (reloc->tiling_flags & RADEON_TILING_MACRO) in r200_packet0_check()
223 if (reloc->tiling_flags & RADEON_TILING_MICRO) in r200_packet0_check()
[all …]
Dr300.c635 struct radeon_bo_list *reloc; in r300_packet0_check() local
669 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r300_packet0_check()
676 track->cb[i].robj = reloc->robj; in r300_packet0_check()
679 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r300_packet0_check()
682 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r300_packet0_check()
689 track->zb.robj = reloc->robj; in r300_packet0_check()
692 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r300_packet0_check()
711 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r300_packet0_check()
721 ((idx_value & ~31) + (u32)reloc->gpu_offset); in r300_packet0_check()
723 if (reloc->tiling_flags & RADEON_TILING_MACRO) in r300_packet0_check()
[all …]
Dr100.c1267 struct radeon_bo_list *reloc; in r100_reloc_pitch_offset() local
1270 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r100_reloc_pitch_offset()
1280 tmp += (((u32)reloc->gpu_offset) >> 10); in r100_reloc_pitch_offset()
1283 if (reloc->tiling_flags & RADEON_TILING_MACRO) in r100_reloc_pitch_offset()
1285 if (reloc->tiling_flags & RADEON_TILING_MICRO) { in r100_reloc_pitch_offset()
1306 struct radeon_bo_list *reloc; in r100_packet3_load_vbpntr() local
1323 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r100_packet3_load_vbpntr()
1331 ib[idx+1] = radeon_get_ib_value(p, idx + 1) + ((u32)reloc->gpu_offset); in r100_packet3_load_vbpntr()
1334 track->arrays[i + 0].robj = reloc->robj; in r100_packet3_load_vbpntr()
1336 r = radeon_cs_packet_next_reloc(p, &reloc, 0); in r100_packet3_load_vbpntr()
[all …]
Dradeon_uvd.c577 struct radeon_bo_list *reloc; in radeon_uvd_cs_reloc() local
591 reloc = &p->relocs[(idx / 4)]; in radeon_uvd_cs_reloc()
592 start = reloc->gpu_offset; in radeon_uvd_cs_reloc()
593 end = start + radeon_bo_size(reloc->robj); in radeon_uvd_cs_reloc()
637 r = radeon_uvd_cs_msg(p, reloc->robj, offset, buf_sizes); in radeon_uvd_cs_reloc()
Dradeon_vce.c474 struct radeon_bo_list *reloc; in radeon_vce_cs_reloc() local
488 reloc = &p->relocs[(idx / 4)]; in radeon_vce_cs_reloc()
489 start = reloc->gpu_offset; in radeon_vce_cs_reloc()
490 end = start + radeon_bo_size(reloc->robj); in radeon_vce_cs_reloc()
Dradeon_cs.c254 struct radeon_bo_list *reloc; in radeon_cs_sync_rings() local
257 list_for_each_entry(reloc, &p->validated, tv.head) { in radeon_cs_sync_rings()
260 resv = reloc->robj->tbo.base.resv; in radeon_cs_sync_rings()
262 reloc->tv.num_shared); in radeon_cs_sync_rings()
/drivers/gpu/drm/qxl/
Dqxl_ioctl.c208 struct drm_qxl_reloc reloc; in qxl_process_single_command() local
211 if (copy_from_user(&reloc, u + i, sizeof(reloc))) { in qxl_process_single_command()
218 if (reloc.reloc_type != QXL_RELOC_TYPE_BO && reloc.reloc_type != QXL_RELOC_TYPE_SURF) { in qxl_process_single_command()
219 DRM_DEBUG("unknown reloc type %d\n", reloc.reloc_type); in qxl_process_single_command()
224 reloc_info[i].type = reloc.reloc_type; in qxl_process_single_command()
226 if (reloc.dst_handle) { in qxl_process_single_command()
227 ret = qxlhw_handle_to_bo(file_priv, reloc.dst_handle, release, in qxl_process_single_command()
231 reloc_info[i].dst_offset = reloc.dst_offset; in qxl_process_single_command()
234 reloc_info[i].dst_offset = reloc.dst_offset + release->release_offset; in qxl_process_single_command()
239 if (reloc.reloc_type == QXL_RELOC_TYPE_BO || reloc.src_handle) { in qxl_process_single_command()
[all …]
/drivers/gpu/host1x/
Djob.c117 struct host1x_reloc *reloc = &job->relocs[i]; in pin_job() local
121 reloc->target.bo = host1x_bo_get(reloc->target.bo); in pin_job()
122 if (!reloc->target.bo) { in pin_job()
144 sgt = host1x_bo_pin(dev, reloc->target.bo, phys); in pin_job()
155 switch (reloc->flags & mask) { in pin_job()
183 job->unpins[job->num_unpins].bo = reloc->target.bo; in pin_job()
287 struct host1x_reloc *reloc = &job->relocs[i]; in do_relocs() local
289 reloc->target.offset) >> reloc->shift; in do_relocs()
293 if (cmdbuf != reloc->cmdbuf.bo) in do_relocs()
298 reloc->cmdbuf.offset / sizeof(u32) + in do_relocs()
[all …]
/drivers/gpu/drm/i915/gem/selftests/
Di915_gem_execbuffer.c13 u64 reloc; in read_reloc() local
15 memcpy(&reloc, &map[x], sizeof(reloc)); in read_reloc()
16 return reloc & mask; in read_reloc()
83 u64 reloc = read_reloc(map, offsets[i], mask); in __igt_gpu_reloc() local
85 if (reloc != i) { in __igt_gpu_reloc()
87 eb->engine->name, i, offsets[i], reloc, i); in __igt_gpu_reloc()
/drivers/gpu/drm/i915/gem/
Di915_gem_execbuffer.c977 relocation_target(const struct drm_i915_gem_relocation_entry *reloc, in relocation_target() argument
980 return gen8_canonical_addr((int)reloc->delta + target->node.start); in relocation_target()
1538 const struct drm_i915_gem_relocation_entry *reloc, in relocate_entry() argument
1542 u64 target_addr = relocation_target(reloc, target); in relocate_entry()
1543 u64 offset = reloc->offset; in relocate_entry()
1578 const struct drm_i915_gem_relocation_entry *reloc) in eb_relocate_entry() argument
1585 target = eb_get_vma(eb, reloc->target_handle); in eb_relocate_entry()
1590 if (unlikely(reloc->write_domain & (reloc->write_domain - 1))) { in eb_relocate_entry()
1594 reloc->target_handle, in eb_relocate_entry()
1595 (int) reloc->offset, in eb_relocate_entry()
[all …]
/drivers/gpu/drm/i915/gt/
Dintel_renderstate.h35 const u32 *reloc; member
42 .reloc = gen ## _g ## _null_state_relocs, \
Dintel_renderstate.c84 if (i * 4 == rodata->reloc[reloc_index]) { in render_state_setup()
102 if (rodata->reloc[reloc_index] != -1) { in render_state_setup()
/drivers/gpu/drm/nouveau/
Dnouveau_gem.c595 struct drm_nouveau_gem_pushbuf_reloc *reloc, in nouveau_gem_pushbuf_reloc_apply() argument
602 struct drm_nouveau_gem_pushbuf_reloc *r = &reloc[i]; in nouveau_gem_pushbuf_reloc_apply()
678 struct drm_nouveau_gem_pushbuf_reloc *reloc = NULL; in nouveau_gem_ioctl_pushbuf() local
757 if (!reloc) { in nouveau_gem_ioctl_pushbuf()
759 reloc = u_memcpya(req->relocs, req->nr_relocs, sizeof(*reloc)); in nouveau_gem_ioctl_pushbuf()
760 if (IS_ERR(reloc)) { in nouveau_gem_ioctl_pushbuf()
761 ret = PTR_ERR(reloc); in nouveau_gem_ioctl_pushbuf()
768 ret = nouveau_gem_pushbuf_reloc_apply(cli, req, reloc, bo); in nouveau_gem_ioctl_pushbuf()
876 if (!IS_ERR(reloc)) in nouveau_gem_ioctl_pushbuf()
877 u_free(reloc); in nouveau_gem_ioctl_pushbuf()
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_execbuf.c1165 struct vmw_relocation *reloc; in vmw_translate_mob_ptr() local
1180 reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); in vmw_translate_mob_ptr()
1181 if (!reloc) in vmw_translate_mob_ptr()
1184 reloc->mob_loc = id; in vmw_translate_mob_ptr()
1185 reloc->vbo = vmw_bo; in vmw_translate_mob_ptr()
1188 list_add_tail(&reloc->head, &sw_context->bo_relocations); in vmw_translate_mob_ptr()
1220 struct vmw_relocation *reloc; in vmw_translate_guest_ptr() local
1235 reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); in vmw_translate_guest_ptr()
1236 if (!reloc) in vmw_translate_guest_ptr()
1239 reloc->location = ptr; in vmw_translate_guest_ptr()
[all …]
/drivers/gpu/drm/tegra/
Ddrm.c263 struct host1x_reloc *reloc; in tegra_drm_submit() local
272 reloc = &job->relocs[num_relocs]; in tegra_drm_submit()
273 obj = host1x_to_tegra_bo(reloc->cmdbuf.bo); in tegra_drm_submit()
281 if (reloc->cmdbuf.offset & 3 || in tegra_drm_submit()
282 reloc->cmdbuf.offset >= obj->gem.size) { in tegra_drm_submit()
287 obj = host1x_to_tegra_bo(reloc->target.bo); in tegra_drm_submit()
290 if (reloc->target.offset >= obj->gem.size) { in tegra_drm_submit()