/drivers/gpu/drm/tegra/ |
D | gem.c | 27 drm_gem_object_put_unlocked(&obj->gem); in tegra_bo_put() 49 else if (obj->gem.import_attach) in tegra_bo_mmap() 50 return dma_buf_vmap(obj->gem.import_attach->dmabuf); in tegra_bo_mmap() 62 else if (obj->gem.import_attach) in tegra_bo_munmap() 63 dma_buf_vunmap(obj->gem.import_attach->dmabuf, addr); in tegra_bo_munmap() 74 else if (obj->gem.import_attach) in tegra_bo_kmap() 75 return dma_buf_kmap(obj->gem.import_attach->dmabuf, page); in tegra_bo_kmap() 88 else if (obj->gem.import_attach) in tegra_bo_kunmap() 89 dma_buf_kunmap(obj->gem.import_attach->dmabuf, page, addr); in tegra_bo_kunmap() 98 drm_gem_object_get(&obj->gem); in tegra_bo_get() [all …]
|
D | gem.h | 30 struct drm_gem_object gem; member 46 static inline struct tegra_bo *to_tegra_bo(struct drm_gem_object *gem) in to_tegra_bo() argument 48 return container_of(gem, struct tegra_bo, gem); in to_tegra_bo() 63 void tegra_bo_free_object(struct drm_gem_object *gem); 69 int __tegra_gem_mmap(struct drm_gem_object *gem, struct vm_area_struct *vma); 72 struct dma_buf *tegra_gem_prime_export(struct drm_gem_object *gem,
|
D | drm.c | 275 struct drm_gem_object *gem; in host1x_bo_lookup() local 278 gem = drm_gem_object_lookup(file, handle); in host1x_bo_lookup() 279 if (!gem) in host1x_bo_lookup() 282 bo = to_tegra_bo(gem); in host1x_bo_lookup() 408 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 415 if (offset & 3 || offset > obj->gem.size) { in tegra_drm_submit() 438 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 446 reloc->cmdbuf.offset >= obj->gem.size) { in tegra_drm_submit() 452 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 454 if (reloc->target.offset >= obj->gem.size) { in tegra_drm_submit() [all …]
|
D | fb.c | 116 fb->obj[i] = &planes[i]->gem; in tegra_fb_alloc() 135 struct drm_gem_object *gem; in tegra_fb_create() local 145 gem = drm_gem_object_lookup(file, cmd->handles[i]); in tegra_fb_create() 146 if (!gem) { in tegra_fb_create() 156 if (gem->size < size) { in tegra_fb_create() 161 planes[i] = to_tegra_bo(gem); in tegra_fb_create() 174 drm_gem_object_put_unlocked(&planes[i]->gem); in tegra_fb_create() 188 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); in tegra_fb_mmap() 192 return __tegra_gem_mmap(&bo->gem, vma); in tegra_fb_mmap() 238 drm_gem_object_put_unlocked(&bo->gem); in tegra_fbdev_probe() [all …]
|
/drivers/gpu/drm/i915/ |
D | Makefile | 104 obj-y += gem/ 105 gem-y += \ 106 gem/i915_gem_busy.o \ 107 gem/i915_gem_clflush.o \ 108 gem/i915_gem_client_blt.o \ 109 gem/i915_gem_context.o \ 110 gem/i915_gem_dmabuf.o \ 111 gem/i915_gem_domain.o \ 112 gem/i915_gem_execbuffer.o \ 113 gem/i915_gem_fence.o \ [all …]
|
/drivers/gpu/drm/vkms/ |
D | vkms_gem.c | 19 ret = drm_gem_object_init(dev, &obj->gem, size); in __vkms_gem_create() 32 struct vkms_gem_object *gem = container_of(obj, struct vkms_gem_object, in vkms_gem_free_object() local 33 gem); in vkms_gem_free_object() 35 WARN_ON(gem->pages); in vkms_gem_free_object() 36 WARN_ON(gem->vaddr); in vkms_gem_free_object() 38 mutex_destroy(&gem->pages_lock); in vkms_gem_free_object() 40 kfree(gem); in vkms_gem_free_object() 53 num_pages = DIV_ROUND_UP(obj->gem.size, PAGE_SIZE); in vkms_gem_fault() 69 mapping = file_inode(obj->gem.filp)->i_mapping; in vkms_gem_fault() 113 ret = drm_gem_handle_create(file, &obj->gem, handle); in vkms_gem_create() [all …]
|
/drivers/gpu/drm/i915/gem/ |
D | i915_gem_pm.c | 48 container_of(work, typeof(*i915), gem.idle_work); in idle_work_handler() 51 cancel_delayed_work_sync(&i915->gem.retire_work); in idle_work_handler() 62 &i915->gem.retire_work, in idle_work_handler() 71 container_of(work, typeof(*i915), gem.retire_work.work); in retire_work_handler() 80 &i915->gem.retire_work, in retire_work_handler() 89 container_of(nb, typeof(*i915), gem.pm_notifier); in pm_notifier() 95 &i915->gem.retire_work, in pm_notifier() 100 queue_work(i915->wq, &i915->gem.idle_work); in pm_notifier() 277 INIT_WORK(&i915->gem.idle_work, idle_work_handler); in i915_gem_init__pm() 278 INIT_DELAYED_WORK(&i915->gem.retire_work, retire_work_handler); in i915_gem_init__pm() [all …]
|
/drivers/gpu/drm/lima/ |
D | lima_object.c | 14 drm_prime_gem_destroy(&bo->gem, bo->sgt); in lima_bo_destroy() 17 int i, npages = bo->gem.size >> PAGE_SHIFT; in lima_bo_destroy() 21 dma_unmap_page(bo->gem.dev->dev, in lima_bo_destroy() 28 drm_gem_put_pages(&bo->gem, bo->pages, true, true); in lima_bo_destroy() 32 drm_gem_object_release(&bo->gem); in lima_bo_destroy() 50 err = drm_gem_object_init(dev->ddev, &bo->gem, size); in lima_bo_create_struct() 70 npages = bo->gem.size >> PAGE_SHIFT; in lima_bo_create() 94 mapping_set_gfp_mask(bo->gem.filp->f_mapping, GFP_DMA32); in lima_bo_create() 95 bo->pages = drm_gem_get_pages(&bo->gem); in lima_bo_create()
|
D | lima_gem.c | 31 err = drm_gem_handle_create(file, &bo->gem, handle); in lima_gem_create_handle() 34 drm_gem_object_put_unlocked(&bo->gem); in lima_gem_create_handle() 139 err = dma_resv_reserve_shared(bo->gem.resv, 1); in lima_gem_sync_bo() 148 return drm_gem_fence_array_add_implicit(&task->deps, &bo->gem, write); in lima_gem_sync_bo() 165 ret = ww_mutex_lock_interruptible(&bos[i]->gem.resv->lock, ctx); in lima_gem_lock_bos() 177 ww_mutex_unlock(&bos[i]->gem.resv->lock); in lima_gem_lock_bos() 180 ww_mutex_unlock(&bos[slow_locked]->gem.resv->lock); in lima_gem_lock_bos() 185 &bos[contended]->gem.resv->lock, ctx); in lima_gem_lock_bos() 202 ww_mutex_unlock(&bos[i]->gem.resv->lock); in lima_gem_unlock_bos() 299 dma_resv_add_excl_fence(bos[i]->gem.resv, fence); in lima_gem_submit() [all …]
|
D | lima_object.h | 12 struct drm_gem_object gem; member 26 return container_of(obj, struct lima_bo, gem); in to_lima_bo()
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_gem.c | 40 nouveau_gem_object_del(struct drm_gem_object *gem) in nouveau_gem_object_del() argument 42 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_del() 51 if (gem->import_attach) in nouveau_gem_object_del() 52 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del() 61 nouveau_gem_object_open(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_open() argument 64 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_open() 136 nouveau_gem_object_close(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_close() argument 139 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_close() 221 nouveau_gem_info(struct drm_file *file_priv, struct drm_gem_object *gem, in nouveau_gem_info() argument 225 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_info() [all …]
|
D | nouveau_ttm.c | 246 drm->gem.vram_available = drm->client.device.info.ram_user; in nouveau_ttm_init() 252 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init() 263 drm->gem.gart_available = drm->client.vmm.vmm.limit; in nouveau_ttm_init() 265 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init() 269 drm->gem.gart_available >> PAGE_SHIFT); in nouveau_ttm_init() 275 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init() 276 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
|
D | nouveau_gem.h | 9 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument 11 return gem ? container_of(gem, struct nouveau_bo, bo.base) : NULL; in nouveau_gem_object()
|
D | nouveau_display.c | 276 struct drm_gem_object *gem; in nouveau_user_framebuffer_create() local 279 gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); in nouveau_user_framebuffer_create() 280 if (!gem) in nouveau_user_framebuffer_create() 282 nvbo = nouveau_gem_object(gem); in nouveau_user_framebuffer_create() 288 drm_gem_object_put_unlocked(gem); in nouveau_user_framebuffer_create() 675 struct drm_gem_object *gem; in nouveau_display_dumb_map_offset() local 677 gem = drm_gem_object_lookup(file_priv, handle); in nouveau_display_dumb_map_offset() 678 if (gem) { in nouveau_display_dumb_map_offset() 679 struct nouveau_bo *bo = nouveau_gem_object(gem); in nouveau_display_dumb_map_offset() 681 drm_gem_object_put_unlocked(gem); in nouveau_display_dumb_map_offset()
|
/drivers/gpu/drm/ |
D | drm_gem_vram_helper.c | 476 static void drm_gem_vram_object_free(struct drm_gem_object *gem) in drm_gem_vram_object_free() argument 478 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_object_free() 530 struct drm_gem_object *gem; in drm_gem_vram_driver_dumb_mmap_offset() local 533 gem = drm_gem_object_lookup(file, handle); in drm_gem_vram_driver_dumb_mmap_offset() 534 if (!gem) in drm_gem_vram_driver_dumb_mmap_offset() 537 gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_driver_dumb_mmap_offset() 540 drm_gem_object_put_unlocked(gem); in drm_gem_vram_driver_dumb_mmap_offset() 559 static int drm_gem_vram_object_pin(struct drm_gem_object *gem) in drm_gem_vram_object_pin() argument 561 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_object_pin() 579 static void drm_gem_vram_object_unpin(struct drm_gem_object *gem) in drm_gem_vram_object_unpin() argument [all …]
|
D | drm_fb_cma_helper.c | 41 struct drm_gem_object *gem; in drm_fb_cma_get_gem_obj() local 43 gem = drm_gem_fb_get_obj(fb, plane); in drm_fb_cma_get_gem_obj() 44 if (!gem) in drm_fb_cma_get_gem_obj() 47 return to_drm_gem_cma_obj(gem); in drm_fb_cma_get_gem_obj()
|
D | drm_client.c | 237 drm_gem_vunmap(buffer->gem, buffer->vaddr); in drm_client_buffer_delete() 239 if (buffer->gem) in drm_client_buffer_delete() 240 drm_gem_object_put_unlocked(buffer->gem); in drm_client_buffer_delete() 280 buffer->gem = obj; in drm_client_buffer_create() 320 vaddr = drm_gem_vmap(buffer->gem); in drm_client_buffer_vmap() 340 drm_gem_vunmap(buffer->gem, buffer->vaddr); in drm_client_buffer_vunmap()
|
/drivers/gpu/drm/gma500/ |
D | gem.c | 23 struct gtt_range *gtt = container_of(obj, struct gtt_range, gem); in psb_gem_free_object() 67 if (drm_gem_object_init(dev, &r->gem, size) != 0) { in psb_gem_create() 74 mapping_set_gfp_mask(r->gem.filp->f_mapping, GFP_KERNEL | __GFP_DMA32); in psb_gem_create() 76 ret = drm_gem_handle_create(file, &r->gem, &handle); in psb_gem_create() 79 &r->gem, size); in psb_gem_create() 80 drm_gem_object_release(&r->gem); in psb_gem_create() 85 drm_gem_object_put_unlocked(&r->gem); in psb_gem_create() 142 r = container_of(obj, struct gtt_range, gem); /* Get the gtt range */ in psb_gem_fault()
|
/drivers/net/ethernet/sun/ |
D | sungem.c | 117 static u16 __sungem_phy_read(struct gem *gp, int phy_addr, int reg) in __sungem_phy_read() 145 struct gem *gp = netdev_priv(dev); in _sungem_phy_read() 149 static inline u16 sungem_phy_read(struct gem *gp, int reg) in sungem_phy_read() 154 static void __sungem_phy_write(struct gem *gp, int phy_addr, int reg, u16 val) in __sungem_phy_write() 178 struct gem *gp = netdev_priv(dev); in _sungem_phy_write() 182 static inline void sungem_phy_write(struct gem *gp, int reg, u16 val) in sungem_phy_write() 187 static inline void gem_enable_ints(struct gem *gp) in gem_enable_ints() 193 static inline void gem_disable_ints(struct gem *gp) in gem_disable_ints() 200 static void gem_get_cell(struct gem *gp) in gem_get_cell() 214 static void gem_put_cell(struct gem *gp) in gem_put_cell() [all …]
|
/drivers/gpu/drm/mediatek/ |
D | mtk_drm_fb.c | 59 struct drm_gem_object *gem; in mtk_drm_mode_fb_create() local 68 gem = drm_gem_object_lookup(file, cmd->handles[0]); in mtk_drm_mode_fb_create() 69 if (!gem) in mtk_drm_mode_fb_create() 76 if (gem->size < size) { in mtk_drm_mode_fb_create() 81 fb = mtk_drm_framebuffer_init(dev, cmd, gem); in mtk_drm_mode_fb_create() 90 drm_gem_object_put_unlocked(gem); in mtk_drm_mode_fb_create()
|
/drivers/gpu/drm/qxl/ |
D | qxl_gem.c | 68 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create() 69 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create() 70 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create() 116 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init()
|
D | qxl_object.c | 40 mutex_lock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 42 mutex_unlock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 308 if (list_empty(&qdev->gem.objects)) in qxl_bo_force_delete() 311 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) { in qxl_bo_force_delete() 315 mutex_lock(&qdev->gem.mutex); in qxl_bo_force_delete() 317 mutex_unlock(&qdev->gem.mutex); in qxl_bo_force_delete()
|
/drivers/gpu/drm/shmobile/ |
D | shmob_drm_plane.c | 43 struct drm_gem_cma_object *gem; in shmob_drm_plane_compute_base() local 47 gem = drm_fb_cma_get_gem_obj(fb, 0); in shmob_drm_plane_compute_base() 48 splane->dma[0] = gem->paddr + fb->offsets[0] in shmob_drm_plane_compute_base() 53 gem = drm_fb_cma_get_gem_obj(fb, 1); in shmob_drm_plane_compute_base() 54 splane->dma[1] = gem->paddr + fb->offsets[1] in shmob_drm_plane_compute_base()
|
/drivers/gpu/drm/radeon/ |
D | radeon_prime.c | 78 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table() 79 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table() 80 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
|
/drivers/gpu/drm/aspeed/ |
D | aspeed_gfx_crtc.c | 169 struct drm_gem_cma_object *gem; in aspeed_gfx_pipe_update() local 186 gem = drm_fb_cma_get_gem_obj(fb, 0); in aspeed_gfx_pipe_update() 187 if (!gem) in aspeed_gfx_pipe_update() 189 writel(gem->paddr, priv->base + CRT_ADDR); in aspeed_gfx_pipe_update()
|