/drivers/gpu/drm/i915/gt/ |
D | gen6_ppgtt.c | 15 static void gen6_write_pde(const struct gen6_ppgtt *ppgtt, in gen6_write_pde() argument 19 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde() 23 ppgtt->pd_addr + pde); in gen6_write_pde() 73 struct gen6_ppgtt * const ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_ppgtt_clear_range() local 82 i915_pt_entry(ppgtt->base.pd, pde++); in gen6_ppgtt_clear_range() 90 ppgtt->scan_for_unused_pt = true; in gen6_ppgtt_clear_range() 111 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in gen6_ppgtt_insert_entries() local 112 struct i915_page_directory * const pd = ppgtt->pd; in gen6_ppgtt_insert_entries() 146 static void gen6_flush_pd(struct gen6_ppgtt *ppgtt, u64 start, u64 end) in gen6_flush_pd() argument 148 struct i915_page_directory * const pd = ppgtt->base.pd; in gen6_flush_pd() [all …]
|
D | gen8_ppgtt.c | 58 static void gen8_ppgtt_notify_vgt(struct i915_ppgtt *ppgtt, bool create) in gen8_ppgtt_notify_vgt() argument 60 struct drm_i915_private *i915 = ppgtt->vm.i915; in gen8_ppgtt_notify_vgt() 61 struct intel_uncore *uncore = ppgtt->vm.gt->uncore; in gen8_ppgtt_notify_vgt() 66 atomic_inc(px_used(ppgtt->pd)); /* never remove */ in gen8_ppgtt_notify_vgt() 68 atomic_dec(px_used(ppgtt->pd)); in gen8_ppgtt_notify_vgt() 72 if (i915_vm_is_4lvl(&ppgtt->vm)) { in gen8_ppgtt_notify_vgt() 73 const u64 daddr = px_dma(ppgtt->pd); in gen8_ppgtt_notify_vgt() 85 const u64 daddr = i915_page_dir_dma_addr(ppgtt, i); in gen8_ppgtt_notify_vgt() 160 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(vm); in gen8_pdp_for_page_index() local 163 return ppgtt->pd; in gen8_pdp_for_page_index() [all …]
|
D | intel_ppgtt.c | 168 struct i915_ppgtt *ppgtt; in i915_ppgtt_create() local 170 ppgtt = __ppgtt_create(gt); in i915_ppgtt_create() 171 if (IS_ERR(ppgtt)) in i915_ppgtt_create() 172 return ppgtt; in i915_ppgtt_create() 174 trace_i915_ppgtt_create(&ppgtt->vm); in i915_ppgtt_create() 176 return ppgtt; in i915_ppgtt_create() 301 void ppgtt_init(struct i915_ppgtt *ppgtt, struct intel_gt *gt) in ppgtt_init() argument 305 ppgtt->vm.gt = gt; in ppgtt_init() 306 ppgtt->vm.i915 = i915; in ppgtt_init() 307 ppgtt->vm.dma = i915->drm.dev; in ppgtt_init() [all …]
|
D | intel_lrc_reg.h | 33 #define ASSIGN_CTX_PDP(ppgtt, reg_state, n) do { \ argument 35 const u64 addr__ = i915_page_dir_dma_addr((ppgtt), (n)); \ 40 #define ASSIGN_CTX_PML4(ppgtt, reg_state) do { \ argument 42 const u64 addr__ = px_dma((ppgtt)->pd); \
|
D | intel_ggtt.c | 644 struct i915_ppgtt *ppgtt; in init_aliasing_ppgtt() local 647 ppgtt = i915_ppgtt_create(ggtt->vm.gt); in init_aliasing_ppgtt() 648 if (IS_ERR(ppgtt)) in init_aliasing_ppgtt() 649 return PTR_ERR(ppgtt); in init_aliasing_ppgtt() 651 if (GEM_WARN_ON(ppgtt->vm.total < ggtt->vm.total)) { in init_aliasing_ppgtt() 656 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, ggtt->vm.total); in init_aliasing_ppgtt() 660 i915_gem_object_lock(ppgtt->vm.scratch[0], NULL); in init_aliasing_ppgtt() 661 err = i915_vm_map_pt_stash(&ppgtt->vm, &stash); in init_aliasing_ppgtt() 662 i915_gem_object_unlock(ppgtt->vm.scratch[0]); in init_aliasing_ppgtt() 672 ppgtt->vm.allocate_va_range(&ppgtt->vm, &stash, 0, ggtt->vm.total); in init_aliasing_ppgtt() [all …]
|
D | intel_gtt.h | 515 i915_page_dir_dma_addr(const struct i915_ppgtt *ppgtt, const unsigned int n) in i915_page_dir_dma_addr() argument 517 struct i915_page_table *pt = ppgtt->pd->entry[n]; in i915_page_dir_dma_addr() 519 return __px_dma(pt ? px_base(pt) : ppgtt->vm.scratch[ppgtt->vm.top]); in i915_page_dir_dma_addr() 522 void ppgtt_init(struct i915_ppgtt *ppgtt, struct intel_gt *gt);
|
D | intel_lrc.c | 720 static void init_ppgtt_regs(u32 *regs, const struct i915_ppgtt *ppgtt) in init_ppgtt_regs() argument 722 if (i915_vm_is_4lvl(&ppgtt->vm)) { in init_ppgtt_regs() 727 ASSIGN_CTX_PML4(ppgtt, regs); in init_ppgtt_regs() 729 ASSIGN_CTX_PDP(ppgtt, regs, 3); in init_ppgtt_regs() 730 ASSIGN_CTX_PDP(ppgtt, regs, 2); in init_ppgtt_regs() 731 ASSIGN_CTX_PDP(ppgtt, regs, 1); in init_ppgtt_regs() 732 ASSIGN_CTX_PDP(ppgtt, regs, 0); in init_ppgtt_regs()
|
D | intel_gtt.c | 126 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in i915_vm_lock_objects() local 129 return i915_gem_object_lock(ppgtt->pd->pt.base, ww); in i915_vm_lock_objects()
|
D | selftest_hangcheck.c | 1601 struct i915_ppgtt *ppgtt; in igt_reset_evict_ppgtt() local 1608 ppgtt = i915_ppgtt_create(gt); in igt_reset_evict_ppgtt() 1609 if (IS_ERR(ppgtt)) in igt_reset_evict_ppgtt() 1610 return PTR_ERR(ppgtt); in igt_reset_evict_ppgtt() 1612 err = __igt_reset_evict_vma(gt, &ppgtt->vm, in igt_reset_evict_ppgtt() 1614 i915_vm_put(&ppgtt->vm); in igt_reset_evict_ppgtt()
|
D | intel_execlists_submission.c | 2643 struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(rq->context->vm); in emit_pdps() local 2681 const dma_addr_t pd_daddr = i915_page_dir_dma_addr(ppgtt, i); in emit_pdps()
|
/drivers/gpu/drm/i915/selftests/ |
D | mock_gtt.c | 67 struct i915_ppgtt *ppgtt; in mock_ppgtt() local 69 ppgtt = kzalloc(sizeof(*ppgtt), GFP_KERNEL); in mock_ppgtt() 70 if (!ppgtt) in mock_ppgtt() 73 ppgtt->vm.gt = &i915->gt; in mock_ppgtt() 74 ppgtt->vm.i915 = i915; in mock_ppgtt() 75 ppgtt->vm.total = round_down(U64_MAX, PAGE_SIZE); in mock_ppgtt() 76 ppgtt->vm.dma = i915->drm.dev; in mock_ppgtt() 78 i915_address_space_init(&ppgtt->vm, VM_CLASS_PPGTT); in mock_ppgtt() 80 ppgtt->vm.alloc_pt_dma = alloc_pt_dma; in mock_ppgtt() 82 ppgtt->vm.clear_range = mock_clear_range; in mock_ppgtt() [all …]
|
D | i915_gem_gtt.c | 148 struct i915_ppgtt *ppgtt; in igt_ppgtt_alloc() local 158 ppgtt = i915_ppgtt_create(&dev_priv->gt); in igt_ppgtt_alloc() 159 if (IS_ERR(ppgtt)) in igt_ppgtt_alloc() 160 return PTR_ERR(ppgtt); in igt_ppgtt_alloc() 162 if (!ppgtt->vm.allocate_va_range) in igt_ppgtt_alloc() 173 limit = min(ppgtt->vm.total, limit); in igt_ppgtt_alloc() 177 err = i915_vm_lock_objects(&ppgtt->vm, &ww); in igt_ppgtt_alloc() 185 err = i915_vm_alloc_pt_stash(&ppgtt->vm, &stash, size); in igt_ppgtt_alloc() 189 err = i915_vm_map_pt_stash(&ppgtt->vm, &stash); in igt_ppgtt_alloc() 191 i915_vm_free_pt_stash(&ppgtt->vm, &stash); in igt_ppgtt_alloc() [all …]
|
/drivers/gpu/drm/i915/gem/selftests/ |
D | huge_pages.c | 402 struct i915_ppgtt *ppgtt = arg; in igt_mock_exhaust_device_supported_pages() local 403 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_exhaust_device_supported_pages() 439 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_exhaust_device_supported_pages() 478 struct i915_ppgtt *ppgtt = arg; in igt_mock_memory_region_huge_pages() local 479 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_memory_region_huge_pages() 507 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_memory_region_huge_pages() 556 struct i915_ppgtt *ppgtt = arg; in igt_mock_ppgtt_misaligned_dma() local 557 struct drm_i915_private *i915 = ppgtt->vm.i915; in igt_mock_ppgtt_misaligned_dma() 599 vma = i915_vma_instance(obj, &ppgtt->vm, NULL); in igt_mock_ppgtt_misaligned_dma() 676 struct i915_ppgtt *ppgtt) in close_object_list() argument [all …]
|
D | mock_context.c | 35 struct i915_ppgtt *ppgtt; in mock_context() local 39 ppgtt = mock_ppgtt(i915, name); in mock_context() 40 if (!ppgtt) in mock_context() 43 ctx->vm = i915_vm_open(&ppgtt->vm); in mock_context() 44 i915_vm_put(&ppgtt->vm); in mock_context()
|
/drivers/gpu/drm/i915/gvt/ |
D | scheduler.c | 436 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ce->vm); in set_context_ppgtt_from_shadow() local 440 set_dma_address(ppgtt->pd, mm->ppgtt_mm.shadow_pdps[0]); in set_context_ppgtt_from_shadow() 444 i915_pd_entry(ppgtt->pd, i); in set_context_ppgtt_from_shadow() 549 if (!bb->ppgtt) { in prepare_shadow_batch_buffer() 1294 struct i915_ppgtt *ppgtt) in i915_context_ppgtt_root_restore() argument 1298 if (i915_vm_is_4lvl(&ppgtt->vm)) { in i915_context_ppgtt_root_restore() 1299 set_dma_address(ppgtt->pd, s->i915_context_pml4); in i915_context_ppgtt_root_restore() 1303 i915_pd_entry(ppgtt->pd, i); in i915_context_ppgtt_root_restore() 1355 struct i915_ppgtt *ppgtt) in i915_context_ppgtt_root_save() argument 1359 if (i915_vm_is_4lvl(&ppgtt->vm)) { in i915_context_ppgtt_root_save() [all …]
|
D | scheduler.h | 133 bool ppgtt; member
|
D | cmd_parser.c | 1914 bb->ppgtt = (s->buf_addr_type == GTT_BUFFER) ? false : true; in perform_bb_shadow() 1927 if (bb->ppgtt) in perform_bb_shadow()
|
/drivers/gpu/drm/i915/gem/ |
D | i915_gem_context.c | 1361 struct i915_ppgtt *ppgtt; in i915_gem_create_context() local 1363 ppgtt = i915_ppgtt_create(&i915->gt); in i915_gem_create_context() 1364 if (IS_ERR(ppgtt)) { in i915_gem_create_context() 1366 PTR_ERR(ppgtt)); in i915_gem_create_context() 1367 err = PTR_ERR(ppgtt); in i915_gem_create_context() 1370 vm = &ppgtt->vm; in i915_gem_create_context() 1531 struct i915_ppgtt *ppgtt; in i915_gem_vm_create_ioctl() local 1541 ppgtt = i915_ppgtt_create(&i915->gt); in i915_gem_vm_create_ioctl() 1542 if (IS_ERR(ppgtt)) in i915_gem_vm_create_ioctl() 1543 return PTR_ERR(ppgtt); in i915_gem_vm_create_ioctl() [all …]
|