/drivers/gpu/drm/i915/gt/ |
D | intel_ggtt.c | 42 static int ggtt_init_hw(struct i915_ggtt *ggtt) in ggtt_init_hw() argument 44 struct drm_i915_private *i915 = ggtt->vm.i915; in ggtt_init_hw() 46 i915_address_space_init(&ggtt->vm, VM_CLASS_GGTT); in ggtt_init_hw() 48 ggtt->vm.is_ggtt = true; in ggtt_init_hw() 51 ggtt->vm.has_read_only = IS_VALLEYVIEW(i915); in ggtt_init_hw() 54 ggtt->vm.mm.color_adjust = i915_ggtt_color_adjust; in ggtt_init_hw() 56 if (ggtt->mappable_end) { in ggtt_init_hw() 57 if (!io_mapping_init_wc(&ggtt->iomap, in ggtt_init_hw() 58 ggtt->gmadr.start, in ggtt_init_hw() 59 ggtt->mappable_end)) { in ggtt_init_hw() [all …]
|
D | intel_ggtt_fencing.c | 63 return fence->ggtt->vm.i915; in fence_to_i915() 68 return fence->ggtt->vm.gt->uncore; in fence_to_uncore() 215 struct i915_ggtt *ggtt = fence->ggtt; in fence_update() local 264 list_move(&fence->link, &ggtt->fence_list); in fence_update() 288 list_move_tail(&fence->link, &ggtt->fence_list); in fence_update() 334 static struct i915_fence_reg *fence_find(struct i915_ggtt *ggtt) in fence_find() argument 338 list_for_each_entry(fence, &ggtt->fence_list, link) { in fence_find() 348 if (intel_has_pending_fb_unpin(ggtt->vm.i915)) in fence_find() 356 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vma->vm); in __i915_vma_pin_fence() local 369 list_move_tail(&fence->link, &ggtt->fence_list); in __i915_vma_pin_fence() [all …]
|
D | selftest_reset.c | 21 struct i915_ggtt *ggtt = >->i915->ggtt; in __igt_reset_stolen() local 33 if (!drm_mm_node_allocated(&ggtt->error_capture)) in __igt_reset_stolen() 86 ggtt->vm.insert_page(&ggtt->vm, dma, in __igt_reset_stolen() 87 ggtt->error_capture.start, in __igt_reset_stolen() 91 s = io_mapping_map_wc(&ggtt->iomap, in __igt_reset_stolen() 92 ggtt->error_capture.start, in __igt_reset_stolen() 108 ggtt->vm.clear_range(&ggtt->vm, ggtt->error_capture.start, PAGE_SIZE); in __igt_reset_stolen() 127 ggtt->vm.insert_page(&ggtt->vm, dma, in __igt_reset_stolen() 128 ggtt->error_capture.start, in __igt_reset_stolen() 132 s = io_mapping_map_wc(&ggtt->iomap, in __igt_reset_stolen() [all …]
|
D | intel_ggtt_fencing.h | 43 struct i915_ggtt *ggtt; member 63 struct i915_fence_reg *i915_reserve_fence(struct i915_ggtt *ggtt); 66 void intel_ggtt_restore_fences(struct i915_ggtt *ggtt); 73 void intel_ggtt_init_fences(struct i915_ggtt *ggtt); 74 void intel_ggtt_fini_fences(struct i915_ggtt *ggtt);
|
D | gen6_ppgtt.c | 174 gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt); in gen6_flush_pd() 307 struct i915_ggtt *ggtt = i915_vm_to_ggtt(vm); in pd_vma_bind() local 312 ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset; in pd_vma_bind() 348 struct i915_ggtt *ggtt = ppgtt->base.vm.gt->ggtt; in pd_vma_create() local 352 GEM_BUG_ON(size > ggtt->vm.total); in pd_vma_create() 362 vma->vm = i915_vm_get(&ggtt->vm); in pd_vma_create() 433 struct i915_ggtt * const ggtt = gt->ggtt; in gen6_ppgtt_create() local 455 ppgtt->base.vm.pte_encode = ggtt->vm.pte_encode; in gen6_ppgtt_create()
|
D | intel_gtt.h | 61 #define ggtt_total_entries(ggtt) ((ggtt)->vm.total >> PAGE_SHIFT) argument 307 void (*invalidate)(struct i915_ggtt *ggtt); 488 void i915_ggtt_enable_guc(struct i915_ggtt *ggtt); 489 void i915_ggtt_disable_guc(struct i915_ggtt *ggtt); 493 static inline bool i915_ggtt_has_aperture(const struct i915_ggtt *ggtt) in i915_ggtt_has_aperture() argument 495 return ggtt->mappable_end > 0; in i915_ggtt_has_aperture() 503 void i915_ggtt_resume(struct i915_ggtt *ggtt); 550 void gen6_ggtt_invalidate(struct i915_ggtt *ggtt);
|
D | intel_ring.c | 103 static struct i915_vma *create_ring_vma(struct i915_ggtt *ggtt, int size) in create_ring_vma() argument 105 struct i915_address_space *vm = &ggtt->vm; in create_ring_vma() 111 if (i915_ggtt_has_aperture(ggtt) && !HAS_LLC(i915)) in create_ring_vma() 165 vma = create_ring_vma(engine->gt->ggtt, size); in intel_engine_create_ring()
|
/drivers/gpu/drm/i915/selftests/ |
D | i915_gem_evict.c | 46 static int populate_ggtt(struct i915_ggtt *ggtt, struct list_head *objects) in populate_ggtt() argument 55 obj = i915_gem_object_create_internal(ggtt->vm.i915, in populate_ggtt() 73 count, ggtt->vm.total / PAGE_SIZE); in populate_ggtt() 75 if (list_empty(&ggtt->vm.bound_list)) { in populate_ggtt() 83 static void unpin_ggtt(struct i915_ggtt *ggtt) in unpin_ggtt() argument 87 list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link) in unpin_ggtt() 92 static void cleanup_objects(struct i915_ggtt *ggtt, struct list_head *list) in cleanup_objects() argument 102 i915_gem_drain_freed_objects(ggtt->vm.i915); in cleanup_objects() 108 struct i915_ggtt *ggtt = gt->ggtt; in igt_evict_something() local 114 err = populate_ggtt(ggtt, &objects); in igt_evict_something() [all …]
|
D | mock_gtt.c | 109 void mock_init_ggtt(struct drm_i915_private *i915, struct i915_ggtt *ggtt) in mock_init_ggtt() argument 111 memset(ggtt, 0, sizeof(*ggtt)); in mock_init_ggtt() 113 ggtt->vm.gt = &i915->gt; in mock_init_ggtt() 114 ggtt->vm.i915 = i915; in mock_init_ggtt() 115 ggtt->vm.is_ggtt = true; in mock_init_ggtt() 117 ggtt->gmadr = (struct resource) DEFINE_RES_MEM(0, 2048 * PAGE_SIZE); in mock_init_ggtt() 118 ggtt->mappable_end = resource_size(&ggtt->gmadr); in mock_init_ggtt() 119 ggtt->vm.total = 4096 * PAGE_SIZE; in mock_init_ggtt() 121 ggtt->vm.alloc_pt_dma = alloc_pt_dma; in mock_init_ggtt() 123 ggtt->vm.clear_range = mock_clear_range; in mock_init_ggtt() [all …]
|
D | i915_gem_gtt.c | 1095 struct i915_ggtt *ggtt = &i915->ggtt; in exercise_ggtt() local 1102 list_sort(NULL, &ggtt->vm.mm.hole_stack, sort_holes); in exercise_ggtt() 1103 drm_mm_for_each_hole(node, &ggtt->vm.mm, hole_start, hole_end) { in exercise_ggtt() 1107 if (ggtt->vm.mm.color_adjust) in exercise_ggtt() 1108 ggtt->vm.mm.color_adjust(node, 0, in exercise_ggtt() 1113 err = func(&ggtt->vm, hole_start, hole_end, end_time); in exercise_ggtt() 1155 struct i915_ggtt *ggtt = &i915->ggtt; in igt_ggtt_page() local 1162 if (!i915_ggtt_has_aperture(ggtt)) in igt_ggtt_page() 1174 mutex_lock(&ggtt->vm.mutex); in igt_ggtt_page() 1175 err = drm_mm_insert_node_in_range(&ggtt->vm.mm, &tmp, in igt_ggtt_page() [all …]
|
D | i915_gem.c | 43 struct i915_ggtt *ggtt = &i915->ggtt; in trash_stolen() local 44 const u64 slot = ggtt->error_capture.start; in trash_stolen() 50 if (!i915_ggtt_has_aperture(ggtt)) in trash_stolen() 58 ggtt->vm.insert_page(&ggtt->vm, dma, slot, I915_CACHE_NONE, 0); in trash_stolen() 60 s = io_mapping_map_atomic_wc(&ggtt->iomap, slot); in trash_stolen() 68 ggtt->vm.clear_range(&ggtt->vm, slot, PAGE_SIZE); in trash_stolen() 101 i915_ggtt_suspend(&i915->ggtt); in pm_suspend() 111 i915_ggtt_suspend(&i915->ggtt); in pm_hibernate() 127 i915_ggtt_resume(&i915->ggtt); in pm_resume()
|
D | i915_vma.c | 150 struct i915_ggtt *ggtt = arg; in igt_vma_create() local 151 struct drm_i915_private *i915 = ggtt->vm.i915; in igt_vma_create() 257 struct i915_ggtt *ggtt = arg; in igt_vma_pin1() local 268 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)), in igt_vma_pin1() 269 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)), in igt_vma_pin1() 270 VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)), in igt_vma_pin1() 272 VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)), in igt_vma_pin1() 273 INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end), in igt_vma_pin1() 274 VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)), in igt_vma_pin1() 275 INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total), in igt_vma_pin1() [all …]
|
D | mock_gtt.h | 31 void mock_init_ggtt(struct drm_i915_private *i915, struct i915_ggtt *ggtt); 32 void mock_fini_ggtt(struct i915_ggtt *ggtt);
|
D | mock_gem_device.c | 72 mock_fini_ggtt(&i915->ggtt); in mock_device_release() 193 mock_init_ggtt(i915, &i915->ggtt); in mock_gem_device() 194 i915->gt.vm = i915_vm_get(&i915->ggtt.vm); in mock_gem_device()
|
/drivers/gpu/drm/i915/ |
D | i915_vgpu.c | 148 static void vgt_deballoon_space(struct i915_ggtt *ggtt, in vgt_deballoon_space() argument 151 struct drm_i915_private *dev_priv = ggtt->vm.i915; in vgt_deballoon_space() 161 ggtt->vm.reserved -= node->size; in vgt_deballoon_space() 172 void intel_vgt_deballoon(struct i915_ggtt *ggtt) in intel_vgt_deballoon() argument 174 struct drm_i915_private *dev_priv = ggtt->vm.i915; in intel_vgt_deballoon() 177 if (!intel_vgpu_active(ggtt->vm.i915)) in intel_vgt_deballoon() 183 vgt_deballoon_space(ggtt, &bl_info.space[i]); in intel_vgt_deballoon() 186 static int vgt_balloon_space(struct i915_ggtt *ggtt, in vgt_balloon_space() argument 190 struct drm_i915_private *dev_priv = ggtt->vm.i915; in vgt_balloon_space() 200 ret = i915_gem_gtt_reserve(&ggtt->vm, node, in vgt_balloon_space() [all …]
|
D | i915_gem.c | 60 insert_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node, u32 size) in insert_mappable_node() argument 64 err = mutex_lock_interruptible(&ggtt->vm.mutex); in insert_mappable_node() 69 err = drm_mm_insert_node_in_range(&ggtt->vm.mm, node, in insert_mappable_node() 71 0, ggtt->mappable_end, in insert_mappable_node() 74 mutex_unlock(&ggtt->vm.mutex); in insert_mappable_node() 80 remove_mappable_node(struct i915_ggtt *ggtt, struct drm_mm_node *node) in remove_mappable_node() argument 82 mutex_lock(&ggtt->vm.mutex); in remove_mappable_node() 84 mutex_unlock(&ggtt->vm.mutex); in remove_mappable_node() 91 struct i915_ggtt *ggtt = &to_i915(dev)->ggtt; in i915_gem_get_aperture_ioctl() local 96 if (mutex_lock_interruptible(&ggtt->vm.mutex)) in i915_gem_get_aperture_ioctl() [all …]
|
D | intel_region_lmem.c | 15 struct i915_ggtt *ggtt = &i915->ggtt; in init_fake_lmem_bar() local 25 ret = drm_mm_reserve_node(&ggtt->vm.mm, &mem->fake_mappable); in init_fake_lmem_bar() 40 ggtt->vm.insert_page(&ggtt->vm, in init_fake_lmem_bar() 113 GEM_BUG_ON(i915_ggtt_has_aperture(&i915->ggtt)); in intel_setup_fake_lmem()
|
D | i915_gem_gtt.c | 60 struct i915_ggtt *ggtt = &dev_priv->ggtt; in i915_gem_gtt_finish_pages() local 62 if (unlikely(ggtt->do_idle_maps)) { in i915_gem_gtt_finish_pages() 64 if (intel_gt_retire_requests_timeout(ggtt->vm.gt, in i915_gem_gtt_finish_pages() 112 GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); in i915_gem_gtt_reserve() 210 GEM_BUG_ON(vm == &vm->i915->ggtt.alias->vm); in i915_gem_gtt_insert()
|
D | i915_vgpu.h | 39 int intel_vgt_balloon(struct i915_ggtt *ggtt); 40 void intel_vgt_deballoon(struct i915_ggtt *ggtt);
|
D | i915_gpu_error.c | 999 struct i915_ggtt *ggtt = gt->ggtt; in i915_vma_coredump_create() local 1000 const u64 slot = ggtt->error_capture.start; in i915_vma_coredump_create() 1033 if (drm_mm_node_allocated(&ggtt->error_capture)) { in i915_vma_coredump_create() 1038 ggtt->vm.insert_page(&ggtt->vm, dma, slot, in i915_vma_coredump_create() 1042 s = io_mapping_map_wc(&ggtt->iomap, slot, PAGE_SIZE); in i915_vma_coredump_create() 1097 struct i915_ggtt *ggtt = gt->_gt->ggtt; in gt_record_fences() local 1102 for (i = 0; i < ggtt->num_fences; i++) in gt_record_fences() 1107 for (i = 0; i < ggtt->num_fences; i++) in gt_record_fences() 1112 for (i = 0; i < ggtt->num_fences; i++) in gt_record_fences() 1519 struct i915_ggtt *ggtt = gt->_gt->ggtt; in gt_capture_prepare() local [all …]
|
/drivers/gpu/drm/i915/gem/ |
D | i915_gem_tiling.c | 184 struct i915_ggtt *ggtt = &to_i915(obj->base.dev)->ggtt; in i915_gem_object_fence_prepare() local 192 mutex_lock(&ggtt->vm.mutex); in i915_gem_object_fence_prepare() 196 GEM_BUG_ON(vma->vm != &ggtt->vm); in i915_gem_object_fence_prepare() 209 list_splice(&unbind, &ggtt->vm.bound_list); in i915_gem_object_fence_prepare() 214 mutex_unlock(&ggtt->vm.mutex); in i915_gem_object_fence_prepare() 342 if (!dev_priv->ggtt.num_fences) in i915_gem_set_tiling_ioctl() 368 args->swizzle_mode = to_i915(dev)->ggtt.bit_6_swizzle_x; in i915_gem_set_tiling_ioctl() 370 args->swizzle_mode = to_i915(dev)->ggtt.bit_6_swizzle_y; in i915_gem_set_tiling_ioctl() 425 if (!dev_priv->ggtt.num_fences) in i915_gem_get_tiling_ioctl() 441 args->swizzle_mode = dev_priv->ggtt.bit_6_swizzle_x; in i915_gem_get_tiling_ioctl() [all …]
|
D | i915_gem_mman.c | 285 struct i915_ggtt *ggtt = &i915->ggtt; in vm_fault_gtt() local 317 ret = intel_gt_reset_trylock(ggtt->vm.gt, &srcu); in vm_fault_gtt() 369 (ggtt->gmadr.start + vma->node.start) >> PAGE_SHIFT, in vm_fault_gtt() 371 &ggtt->iomap); in vm_fault_gtt() 378 mutex_lock(&i915->ggtt.vm.mutex); in vm_fault_gtt() 380 list_add(&obj->userfault_link, &i915->ggtt.userfault_list); in vm_fault_gtt() 381 mutex_unlock(&i915->ggtt.vm.mutex); in vm_fault_gtt() 387 intel_wakeref_auto(&i915->ggtt.userfault_wakeref, in vm_fault_gtt() 401 intel_gt_reset_unlock(ggtt->vm.gt, srcu); in vm_fault_gtt() 482 mutex_lock(&i915->ggtt.vm.mutex); in i915_gem_object_release_mmap_gtt() [all …]
|
/drivers/gpu/drm/i915/gvt/ |
D | aperture_gm.c | 64 mutex_lock(>->ggtt->vm.mutex); in alloc_gm() 66 ret = i915_gem_gtt_insert(>->ggtt->vm, node, in alloc_gm() 71 mutex_unlock(>->ggtt->vm.mutex); in alloc_gm() 101 mutex_lock(>->ggtt->vm.mutex); in alloc_vgpu_gm() 103 mutex_unlock(>->ggtt->vm.mutex); in alloc_vgpu_gm() 112 mutex_lock(>->ggtt->vm.mutex); in free_vgpu_gm() 115 mutex_unlock(>->ggtt->vm.mutex); in free_vgpu_gm() 178 mutex_lock(&gvt->gt->ggtt->vm.mutex); in free_vgpu_fence() 185 mutex_unlock(&gvt->gt->ggtt->vm.mutex); in free_vgpu_fence() 201 mutex_lock(&gvt->gt->ggtt->vm.mutex); in alloc_vgpu_fence() [all …]
|
/drivers/gpu/drm/i915/gt/uc/ |
D | intel_uc_fw.c | 403 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; in uc_fw_ggtt_offset() local 404 struct drm_mm_node *node = &ggtt->uc_fw; in uc_fw_ggtt_offset() 416 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; in uc_fw_bind_ggtt() local 421 .vm = &ggtt->vm, in uc_fw_bind_ggtt() 425 GEM_BUG_ON(dummy.node.size > ggtt->uc_fw.size); in uc_fw_bind_ggtt() 430 ggtt->vm.insert_entries(&ggtt->vm, &dummy, I915_CACHE_NONE, 0); in uc_fw_bind_ggtt() 436 struct i915_ggtt *ggtt = __uc_fw_to_gt(uc_fw)->ggtt; in uc_fw_unbind_ggtt() local 439 ggtt->vm.clear_range(&ggtt->vm, start, obj->base.size); in uc_fw_unbind_ggtt()
|
/drivers/gpu/drm/i915/gem/selftests/ |
D | i915_gem_mman.c | 307 if (!i915_ggtt_has_aperture(&i915->ggtt)) in igt_partial_tiling() 320 (1 + next_prime_number(i915->ggtt.vm.total >> PAGE_SHIFT)) << PAGE_SHIFT); in igt_partial_tiling() 366 tile.swizzle = i915->ggtt.bit_6_swizzle_x; in igt_partial_tiling() 369 tile.swizzle = i915->ggtt.bit_6_swizzle_y; in igt_partial_tiling() 440 if (!i915_ggtt_has_aperture(&i915->ggtt)) in igt_smoke_tiling() 457 (1 + next_prime_number(i915->ggtt.vm.total >> PAGE_SHIFT)) << PAGE_SHIFT); in igt_smoke_tiling() 486 tile.swizzle = i915->ggtt.bit_6_swizzle_x; in igt_smoke_tiling() 489 tile.swizzle = i915->ggtt.bit_6_swizzle_y; in igt_smoke_tiling() 534 vma = i915_vma_instance(obj, &engine->gt->ggtt->vm, NULL); in make_obj_busy() 833 !i915_ggtt_has_aperture(&to_i915(obj->base.dev)->ggtt)) in can_mmap()
|