Home
last modified time | relevance | path

Searched refs:relocs (Results 1 – 17 of 17) sorted by relevance

/drivers/gpu/drm/etnaviv/
Detnaviv_cmd_parser.c17 const struct drm_etnaviv_gem_submit_reloc *relocs; member
92 if (state->num_relocs && state->relocs->submit_offset < buf_offset) { in etnaviv_warn_if_non_sensitive()
96 state->relocs->submit_offset); in etnaviv_warn_if_non_sensitive()
98 state->relocs->submit_offset < buf_offset) { in etnaviv_warn_if_non_sensitive()
99 state->relocs++; in etnaviv_warn_if_non_sensitive()
117 state->relocs->submit_offset == buf_offset) { in etnaviv_validate_load_state()
118 state->relocs++; in etnaviv_validate_load_state()
132 state->relocs->submit_offset - in etnaviv_validate_load_state()
149 struct drm_etnaviv_gem_submit_reloc *relocs, in etnaviv_cmd_validate_one() argument
157 state.relocs = relocs; in etnaviv_cmd_validate_one()
Detnaviv_gem_submit.c271 u32 size, const struct drm_etnaviv_gem_submit_reloc *relocs, in submit_reloc() argument
283 const struct drm_etnaviv_gem_submit_reloc *r = relocs + i; in submit_reloc()
430 struct drm_etnaviv_gem_submit_reloc *relocs; in etnaviv_ioctl_gem_submit() local
483 relocs = kvmalloc_array(args->nr_relocs, sizeof(*relocs), GFP_KERNEL); in etnaviv_ioctl_gem_submit()
486 if (!bos || !relocs || !pmrs || !stream) { in etnaviv_ioctl_gem_submit()
498 ret = copy_from_user(relocs, u64_to_user_ptr(args->relocs), in etnaviv_ioctl_gem_submit()
499 args->nr_relocs * sizeof(*relocs)); in etnaviv_ioctl_gem_submit()
551 relocs, args->nr_relocs)) { in etnaviv_ioctl_gem_submit()
569 relocs, args->nr_relocs); in etnaviv_ioctl_gem_submit()
622 kvfree(relocs); in etnaviv_ioctl_gem_submit()
Detnaviv_drv.h77 struct drm_etnaviv_gem_submit_reloc *relocs, unsigned int reloc_size);
/drivers/scsi/sym53c8xx_2/
Dsym_fw.c352 int relocs; in sym_fw_bind_script() local
395 relocs = 0; in sym_fw_bind_script()
401 relocs = 1; in sym_fw_bind_script()
407 relocs = 2; in sym_fw_bind_script()
429 relocs = 1; in sym_fw_bind_script()
437 relocs = 0; in sym_fw_bind_script()
447 relocs = 1; in sym_fw_bind_script()
456 relocs = 0; in sym_fw_bind_script()
465 relocs = 0; in sym_fw_bind_script()
467 relocs = 2; in sym_fw_bind_script()
[all …]
/drivers/gpu/drm/radeon/
Dradeon_cs.c96 p->relocs = kvcalloc(p->nrelocs, sizeof(struct radeon_bo_list), in radeon_cs_parser_relocs()
98 if (p->relocs == NULL) { in radeon_cs_parser_relocs()
116 p->relocs[i].robj = gem_to_radeon_bo(gobj); in radeon_cs_parser_relocs()
138 p->relocs[i].preferred_domains = in radeon_cs_parser_relocs()
141 p->relocs[i].allowed_domains = in radeon_cs_parser_relocs()
156 p->relocs[i].preferred_domains = domain; in radeon_cs_parser_relocs()
159 p->relocs[i].allowed_domains = domain; in radeon_cs_parser_relocs()
162 if (radeon_ttm_tt_has_userptr(p->rdev, p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs()
163 uint32_t domain = p->relocs[i].preferred_domains; in radeon_cs_parser_relocs()
171 p->relocs[i].preferred_domains = domain; in radeon_cs_parser_relocs()
[all …]
Dradeon_vce.c487 reloc = &p->relocs[(idx / 4)]; in radeon_vce_cs_reloc()
Dradeon_uvd.c593 reloc = &p->relocs[(idx / 4)]; in radeon_uvd_cs_reloc()
Dr600_cs.c2360 *cs_reloc = &p->relocs[idx]; in r600_dma_cs_next_reloc()
Dradeon.h1068 struct radeon_bo_list *relocs; member
/drivers/gpu/drm/i915/gem/
Di915_gem_execbuffer.c262 struct list_head relocs; member
556 list_add_tail(&ev->reloc_link, &eb->relocs); in eb_add_vma()
840 INIT_LIST_HEAD(&eb->relocs); in eb_lookup_vmas()
1512 struct drm_i915_gem_relocation_entry *relocs = in eb_relocate_vma_slow() local
1513 u64_to_ptr(typeof(*relocs), entry->relocs_ptr); in eb_relocate_vma_slow()
1518 u64 offset = eb_relocate_entry(eb, ev, &relocs[i]); in eb_relocate_vma_slow()
1560 struct drm_i915_gem_relocation_entry *relocs; in eb_copy_relocations() local
1579 size = nreloc * sizeof(*relocs); in eb_copy_relocations()
1581 relocs = kvmalloc_array(size, 1, GFP_KERNEL); in eb_copy_relocations()
1582 if (!relocs) { in eb_copy_relocations()
[all …]
/drivers/gpu/drm/msm/
Dmsm_gem_submit.c98 kfree(submit->cmd[i].relocs); in __msm_gem_submit_destroy()
213 userptr = u64_to_user_ptr(submit_cmd.relocs); in submit_lookup_cmds()
222 submit->cmd[i].relocs = kmalloc(sz, GFP_KERNEL); in submit_lookup_cmds()
223 if (!submit->cmd[i].relocs) { in submit_lookup_cmds()
227 ret = copy_from_user(submit->cmd[i].relocs, userptr, sz); in submit_lookup_cmds()
439 uint32_t offset, uint32_t nr_relocs, struct drm_msm_gem_submit_reloc *relocs) in submit_reloc() argument
465 struct drm_msm_gem_submit_reloc submit_reloc = relocs[i]; in submit_reloc()
879 submit->cmd[i].nr_relocs, submit->cmd[i].relocs); in msm_ioctl_gem_submit()
Dmsm_gem.h345 struct drm_msm_gem_submit_reloc *relocs; member
/drivers/gpu/host1x/
Djob.c62 job->relocs = num_relocs ? mem : NULL; in host1x_job_alloc()
149 struct host1x_reloc *reloc = &job->relocs[i]; in pin_job()
321 struct host1x_reloc *reloc = &job->relocs[i]; in do_relocs()
568 fw.reloc = job->relocs; in copy_gathers()
/drivers/gpu/drm/nouveau/
Dnouveau_gem.c533 int ret, relocs = 0; in validate_list() local
574 relocs++; in validate_list()
578 return relocs; in validate_list()
809 reloc = u_memcpya(req->relocs, req->nr_relocs, sizeof(*reloc)); in nouveau_gem_ioctl_pushbuf()
/drivers/gpu/drm/tegra/
Ddrm.c186 user_relocs = u64_to_user_ptr(args->relocs); in tegra_drm_submit()
273 err = host1x_reloc_copy_from_user(&job->relocs[num_relocs], in tegra_drm_submit()
279 reloc = &job->relocs[num_relocs]; in tegra_drm_submit()
/drivers/gpu/drm/qxl/
Dqxl_ioctl.c208 struct drm_qxl_reloc __user *u = u64_to_user_ptr(cmd->relocs); in qxl_process_single_command()
/drivers/scsi/
Dncr53c8xx.c3489 int relocs; in ncr_script_copy_and_bind() local
3526 relocs = 2; in ncr_script_copy_and_bind()
3556 relocs = 1; in ncr_script_copy_and_bind()
3565 relocs = 0; in ncr_script_copy_and_bind()
3567 relocs = 1; in ncr_script_copy_and_bind()
3574 relocs = 1; in ncr_script_copy_and_bind()
3578 relocs = 0; in ncr_script_copy_and_bind()
3582 if (relocs) { in ncr_script_copy_and_bind()
3583 while (relocs--) { in ncr_script_copy_and_bind()