Lines Matching refs:mem
70 struct kgd_mem *mem) in kfd_mem_is_attached() argument
74 list_for_each_entry(entry, &mem->attachments, list) in kfd_mem_is_attached()
88 uint64_t mem; in amdgpu_amdkfd_gpuvm_init_mem_limits() local
91 mem = si.freeram - si.freehigh; in amdgpu_amdkfd_gpuvm_init_mem_limits()
92 mem *= si.mem_unit; in amdgpu_amdkfd_gpuvm_init_mem_limits()
95 kfd_mem_limit.max_system_mem_limit = mem - (mem >> 4); in amdgpu_amdkfd_gpuvm_init_mem_limits()
96 kfd_mem_limit.max_ttm_mem_limit = (mem >> 1) - (mem >> 3); in amdgpu_amdkfd_gpuvm_init_mem_limits()
401 static uint64_t get_pte_flags(struct amdgpu_device *adev, struct kgd_mem *mem) in get_pte_flags() argument
403 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in get_pte_flags()
404 bool coherent = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_COHERENT; in get_pte_flags()
405 bool uncached = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_UNCACHED; in get_pte_flags()
411 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE) in get_pte_flags()
413 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_EXECUTABLE) in get_pte_flags()
418 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM) { in get_pte_flags()
433 !(mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM)) in get_pte_flags()
436 } else if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM) { in get_pte_flags()
466 kfd_mem_dmamap_userptr(struct kgd_mem *mem, in kfd_mem_dmamap_userptr() argument
470 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmamap_userptr()
475 struct ttm_tt *src_ttm = mem->bo->tbo.ttm; in kfd_mem_dmamap_userptr()
530 kfd_mem_dmamap_attachment(struct kgd_mem *mem, in kfd_mem_dmamap_attachment() argument
537 return kfd_mem_dmamap_userptr(mem, attachment); in kfd_mem_dmamap_attachment()
547 kfd_mem_dmaunmap_userptr(struct kgd_mem *mem, in kfd_mem_dmaunmap_userptr() argument
551 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_dmaunmap_userptr()
581 kfd_mem_dmaunmap_attachment(struct kgd_mem *mem, in kfd_mem_dmaunmap_attachment() argument
588 kfd_mem_dmaunmap_userptr(mem, attachment); in kfd_mem_dmaunmap_attachment()
599 kfd_mem_attach_userptr(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach_userptr() argument
602 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach_userptr()
606 ret = amdgpu_bo_reserve(mem->bo, false); in kfd_mem_attach_userptr()
613 ttm_bo_type_sg, mem->bo->tbo.base.resv, in kfd_mem_attach_userptr()
615 amdgpu_bo_unreserve(mem->bo); in kfd_mem_attach_userptr()
620 (*bo)->parent = amdgpu_bo_ref(mem->bo); in kfd_mem_attach_userptr()
626 kfd_mem_attach_dmabuf(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach_dmabuf() argument
632 if (!mem->dmabuf) { in kfd_mem_attach_dmabuf()
633 mem->dmabuf = amdgpu_gem_prime_export(&mem->bo->tbo.base, in kfd_mem_attach_dmabuf()
634 mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE ? in kfd_mem_attach_dmabuf()
636 if (IS_ERR(mem->dmabuf)) { in kfd_mem_attach_dmabuf()
637 ret = PTR_ERR(mem->dmabuf); in kfd_mem_attach_dmabuf()
638 mem->dmabuf = NULL; in kfd_mem_attach_dmabuf()
643 gobj = amdgpu_gem_prime_import(adev_to_drm(adev), mem->dmabuf); in kfd_mem_attach_dmabuf()
651 dma_buf_put(mem->dmabuf); in kfd_mem_attach_dmabuf()
655 (*bo)->parent = amdgpu_bo_ref(mem->bo); in kfd_mem_attach_dmabuf()
673 static int kfd_mem_attach(struct amdgpu_device *adev, struct kgd_mem *mem, in kfd_mem_attach() argument
676 struct amdgpu_device *bo_adev = amdgpu_ttm_adev(mem->bo->tbo.bdev); in kfd_mem_attach()
677 unsigned long bo_size = mem->bo->tbo.base.size; in kfd_mem_attach()
678 uint64_t va = mem->va; in kfd_mem_attach()
698 if (adev == bo_adev || (mem->domain == AMDGPU_GEM_DOMAIN_VRAM && in kfd_mem_attach()
704 bo[i] = mem->bo; in kfd_mem_attach()
711 } else if (amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm)) { in kfd_mem_attach()
714 ret = kfd_mem_attach_userptr(adev, mem, &bo[i]); in kfd_mem_attach()
717 } else if (mem->domain == AMDGPU_GEM_DOMAIN_GTT && in kfd_mem_attach()
718 mem->bo->tbo.type != ttm_bo_type_sg) { in kfd_mem_attach()
724 ret = kfd_mem_attach_dmabuf(adev, mem, &bo[i]); in kfd_mem_attach()
732 bo[i] = mem->bo; in kfd_mem_attach()
746 attachment[i]->pte_flags = get_pte_flags(adev, mem); in kfd_mem_attach()
748 list_add(&attachment[i]->list, &mem->attachments); in kfd_mem_attach()
782 static void add_kgd_mem_to_kfd_bo_list(struct kgd_mem *mem, in add_kgd_mem_to_kfd_bo_list() argument
786 struct ttm_validate_buffer *entry = &mem->validate_list; in add_kgd_mem_to_kfd_bo_list()
787 struct amdgpu_bo *bo = mem->bo; in add_kgd_mem_to_kfd_bo_list()
800 static void remove_kgd_mem_from_kfd_bo_list(struct kgd_mem *mem, in remove_kgd_mem_from_kfd_bo_list() argument
805 bo_list_entry = &mem->validate_list; in remove_kgd_mem_from_kfd_bo_list()
823 static int init_user_pages(struct kgd_mem *mem, uint64_t user_addr) in init_user_pages() argument
825 struct amdkfd_process_info *process_info = mem->process_info; in init_user_pages()
826 struct amdgpu_bo *bo = mem->bo; in init_user_pages()
856 amdgpu_bo_placement_from_domain(bo, mem->domain); in init_user_pages()
899 static int reserve_bo_and_vm(struct kgd_mem *mem, in reserve_bo_and_vm() argument
903 struct amdgpu_bo *bo = mem->bo; in reserve_bo_and_vm()
910 ctx->sync = &mem->sync; in reserve_bo_and_vm()
949 static int reserve_bo_and_cond_vms(struct kgd_mem *mem, in reserve_bo_and_cond_vms() argument
953 struct amdgpu_bo *bo = mem->bo; in reserve_bo_and_cond_vms()
961 ctx->sync = &mem->sync; in reserve_bo_and_cond_vms()
966 list_for_each_entry(entry, &mem->attachments, list) { in reserve_bo_and_cond_vms()
988 list_for_each_entry(entry, &mem->attachments, list) { in reserve_bo_and_cond_vms()
1042 static void unmap_bo_from_gpuvm(struct kgd_mem *mem, in unmap_bo_from_gpuvm() argument
1056 kfd_mem_dmaunmap_attachment(mem, entry); in unmap_bo_from_gpuvm()
1059 static int update_gpuvm_pte(struct kgd_mem *mem, in update_gpuvm_pte() argument
1068 ret = kfd_mem_dmamap_attachment(mem, entry); in update_gpuvm_pte()
1082 static int map_bo_to_gpuvm(struct kgd_mem *mem, in map_bo_to_gpuvm() argument
1103 ret = update_gpuvm_pte(mem, entry, sync, table_freed); in map_bo_to_gpuvm()
1112 unmap_bo_from_gpuvm(mem, entry, sync); in map_bo_to_gpuvm()
1381 void *drm_priv, struct kgd_mem **mem, in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() argument
1428 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1429 if (!*mem) { in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1433 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1434 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1435 (*mem)->aql_queue = !!(flags & KFD_IOC_ALLOC_MEM_FLAGS_AQL_QUEUE_MEM); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1441 if ((*mem)->aql_queue) in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1444 (*mem)->alloc_flags = flags; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1446 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1474 bo->kfd_bo = *mem; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1475 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1479 (*mem)->va = va; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1480 (*mem)->domain = domain; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1481 (*mem)->mapped_to_gpu_memory = 0; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1482 (*mem)->process_info = avm->process_info; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1483 add_kgd_mem_to_kfd_bo_list(*mem, avm->process_info, user_addr); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1486 ret = init_user_pages(*mem, user_addr); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1497 remove_kgd_mem_from_kfd_bo_list(*mem, avm->process_info); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1505 mutex_destroy(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1509 kfree(*mem); in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu()
1519 struct kgd_dev *kgd, struct kgd_mem *mem, void *drm_priv, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu() argument
1522 struct amdkfd_process_info *process_info = mem->process_info; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1523 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1531 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1532 mapped_to_gpu_memory = mem->mapped_to_gpu_memory; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1533 is_imported = mem->is_imported; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1534 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1541 mem->va, bo_size); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1546 bo_list_entry = &mem->validate_list; in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1552 amdgpu_mn_unregister(mem->bo); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1554 ret = reserve_bo_and_cond_vms(mem, NULL, BO_VM_ALL, &ctx); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1562 amdgpu_amdkfd_remove_eviction_fence(mem->bo, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1564 pr_debug("Release VA 0x%llx - 0x%llx\n", mem->va, in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1565 mem->va + bo_size * (1 + mem->aql_queue)); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1570 list_for_each_entry_safe(entry, tmp, &mem->attachments, list) in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1574 amdgpu_sync_free(&mem->sync); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1579 if (mem->bo->tbo.sg) { in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1580 sg_free_table(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1581 kfree(mem->bo->tbo.sg); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1588 if ((mem->bo->preferred_domains == AMDGPU_GEM_DOMAIN_VRAM) && in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1596 drm_vma_node_revoke(&mem->bo->tbo.base.vma_node, drm_priv); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1597 if (mem->dmabuf) in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1598 dma_buf_put(mem->dmabuf); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1599 drm_gem_object_put(&mem->bo->tbo.base); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1600 mutex_destroy(&mem->lock); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1601 kfree(mem); in amdgpu_amdkfd_gpuvm_free_memory_of_gpu()
1607 struct kgd_dev *kgd, struct kgd_mem *mem, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu() argument
1620 bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1630 mutex_lock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1638 is_invalid_userptr = atomic_read(&mem->invalid); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1642 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1644 domain = mem->domain; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1648 mem->va, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1649 mem->va + bo_size * (1 + mem->aql_queue), in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1652 if (!kfd_mem_is_attached(avm, mem)) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1653 ret = kfd_mem_attach(adev, mem, avm, mem->aql_queue); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1658 ret = reserve_bo_and_vm(mem, avm, &ctx); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1675 if (mem->mapped_to_gpu_memory == 0 && in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1688 list_for_each_entry(entry, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1695 ret = map_bo_to_gpuvm(mem, entry, ctx.sync, in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1709 mem->mapped_to_gpu_memory++; in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1711 mem->mapped_to_gpu_memory); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1731 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1732 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_map_memory_to_gpu()
1737 struct kgd_dev *kgd, struct kgd_mem *mem, void *drm_priv) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu() argument
1741 unsigned long bo_size = mem->bo->tbo.base.size; in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1746 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1748 ret = reserve_bo_and_cond_vms(mem, avm, BO_VM_MAPPED, &ctx); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1762 mem->va, in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1763 mem->va + bo_size * (1 + mem->aql_queue), in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1766 list_for_each_entry(entry, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1773 unmap_bo_from_gpuvm(mem, entry, ctx.sync); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1776 mem->mapped_to_gpu_memory--; in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1778 mem->mapped_to_gpu_memory); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1784 if (mem->mapped_to_gpu_memory == 0 && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1785 !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1786 !mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1787 amdgpu_amdkfd_remove_eviction_fence(mem->bo, in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1793 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu()
1798 struct kgd_dev *kgd, struct kgd_mem *mem, bool intr) in amdgpu_amdkfd_gpuvm_sync_memory() argument
1805 mutex_lock(&mem->lock); in amdgpu_amdkfd_gpuvm_sync_memory()
1806 amdgpu_sync_clone(&mem->sync, &sync); in amdgpu_amdkfd_gpuvm_sync_memory()
1807 mutex_unlock(&mem->lock); in amdgpu_amdkfd_gpuvm_sync_memory()
1815 struct kgd_mem *mem, void **kptr, uint64_t *size) in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel() argument
1818 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1825 mutex_lock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1846 bo, mem->process_info->eviction_fence); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1853 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1861 mutex_unlock(&mem->process_info->lock); in amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel()
1867 struct kfd_vm_fault_info *mem) in amdgpu_amdkfd_gpuvm_get_vm_fault_info() argument
1873 *mem = *adev->gmc.vm_fault_info; in amdgpu_amdkfd_gpuvm_get_vm_fault_info()
1883 struct kgd_mem **mem, uint64_t *size, in amdgpu_amdkfd_gpuvm_import_dmabuf() argument
1907 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1908 if (!*mem) in amdgpu_amdkfd_gpuvm_import_dmabuf()
1913 kfree(*mem); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1923 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1924 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1926 (*mem)->alloc_flags = in amdgpu_amdkfd_gpuvm_import_dmabuf()
1933 (*mem)->bo = bo; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1934 (*mem)->va = va; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1935 (*mem)->domain = (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) ? in amdgpu_amdkfd_gpuvm_import_dmabuf()
1937 (*mem)->mapped_to_gpu_memory = 0; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1938 (*mem)->process_info = avm->process_info; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1939 add_kgd_mem_to_kfd_bo_list(*mem, avm->process_info, false); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1940 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_gpuvm_import_dmabuf()
1941 (*mem)->is_imported = true; in amdgpu_amdkfd_gpuvm_import_dmabuf()
1957 int amdgpu_amdkfd_evict_userptr(struct kgd_mem *mem, in amdgpu_amdkfd_evict_userptr() argument
1960 struct amdkfd_process_info *process_info = mem->process_info; in amdgpu_amdkfd_evict_userptr()
1964 atomic_inc(&mem->invalid); in amdgpu_amdkfd_evict_userptr()
1987 struct kgd_mem *mem, *tmp_mem; in update_invalid_user_pages() local
1995 list_for_each_entry_safe(mem, tmp_mem, in update_invalid_user_pages()
1998 if (!atomic_read(&mem->invalid)) in update_invalid_user_pages()
2001 bo = mem->bo; in update_invalid_user_pages()
2014 list_move_tail(&mem->validate_list.head, in update_invalid_user_pages()
2022 list_for_each_entry(mem, &process_info->userptr_inval_list, in update_invalid_user_pages()
2024 invalid = atomic_read(&mem->invalid); in update_invalid_user_pages()
2031 bo = mem->bo; in update_invalid_user_pages()
2052 if (atomic_cmpxchg(&mem->invalid, invalid, 0) != invalid) in update_invalid_user_pages()
2073 struct kgd_mem *mem, *tmp_mem; in validate_invalid_user_pages() local
2097 list_for_each_entry(mem, &process_info->userptr_inval_list, in validate_invalid_user_pages()
2099 list_add_tail(&mem->resv_list.head, &resv_list); in validate_invalid_user_pages()
2100 mem->resv_list.bo = mem->validate_list.bo; in validate_invalid_user_pages()
2101 mem->resv_list.num_shared = mem->validate_list.num_shared; in validate_invalid_user_pages()
2117 list_for_each_entry_safe(mem, tmp_mem, in validate_invalid_user_pages()
2122 bo = mem->bo; in validate_invalid_user_pages()
2126 amdgpu_bo_placement_from_domain(bo, mem->domain); in validate_invalid_user_pages()
2134 list_move_tail(&mem->validate_list.head, in validate_invalid_user_pages()
2143 list_for_each_entry(attachment, &mem->attachments, list) { in validate_invalid_user_pages()
2147 kfd_mem_dmaunmap_attachment(mem, attachment); in validate_invalid_user_pages()
2148 ret = update_gpuvm_pte(mem, attachment, &sync, NULL); in validate_invalid_user_pages()
2152 atomic_inc(&mem->invalid); in validate_invalid_user_pages()
2267 struct kgd_mem *mem; in amdgpu_amdkfd_gpuvm_restore_process_bos() local
2295 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2298 list_add_tail(&mem->resv_list.head, &ctx.list); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2299 mem->resv_list.bo = mem->validate_list.bo; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2300 mem->resv_list.num_shared = mem->validate_list.num_shared; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2324 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2327 struct amdgpu_bo *bo = mem->bo; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2328 uint32_t domain = mem->domain; in amdgpu_amdkfd_gpuvm_restore_process_bos()
2349 list_for_each_entry(attachment, &mem->attachments, list) { in amdgpu_amdkfd_gpuvm_restore_process_bos()
2356 kfd_mem_dmaunmap_attachment(mem, attachment); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2357 ret = update_gpuvm_pte(mem, attachment, &sync_obj, NULL); in amdgpu_amdkfd_gpuvm_restore_process_bos()
2396 list_for_each_entry(mem, &process_info->kfd_bo_list, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2398 if (mem->bo->tbo.pin_count) in amdgpu_amdkfd_gpuvm_restore_process_bos()
2401 amdgpu_bo_fence(mem->bo, in amdgpu_amdkfd_gpuvm_restore_process_bos()
2421 int amdgpu_amdkfd_add_gws_to_process(void *info, void *gws, struct kgd_mem **mem) in amdgpu_amdkfd_add_gws_to_process() argument
2430 *mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL); in amdgpu_amdkfd_add_gws_to_process()
2431 if (!*mem) in amdgpu_amdkfd_add_gws_to_process()
2434 mutex_init(&(*mem)->lock); in amdgpu_amdkfd_add_gws_to_process()
2435 INIT_LIST_HEAD(&(*mem)->attachments); in amdgpu_amdkfd_add_gws_to_process()
2436 (*mem)->bo = amdgpu_bo_ref(gws_bo); in amdgpu_amdkfd_add_gws_to_process()
2437 (*mem)->domain = AMDGPU_GEM_DOMAIN_GWS; in amdgpu_amdkfd_add_gws_to_process()
2438 (*mem)->process_info = process_info; in amdgpu_amdkfd_add_gws_to_process()
2439 add_kgd_mem_to_kfd_bo_list(*mem, process_info, false); in amdgpu_amdkfd_add_gws_to_process()
2440 amdgpu_sync_create(&(*mem)->sync); in amdgpu_amdkfd_add_gws_to_process()
2444 mutex_lock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2465 mutex_unlock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2473 mutex_unlock(&(*mem)->process_info->lock); in amdgpu_amdkfd_add_gws_to_process()
2474 amdgpu_sync_free(&(*mem)->sync); in amdgpu_amdkfd_add_gws_to_process()
2475 remove_kgd_mem_from_kfd_bo_list(*mem, process_info); in amdgpu_amdkfd_add_gws_to_process()
2477 mutex_destroy(&(*mem)->lock); in amdgpu_amdkfd_add_gws_to_process()
2478 kfree(*mem); in amdgpu_amdkfd_add_gws_to_process()
2479 *mem = NULL; in amdgpu_amdkfd_add_gws_to_process()
2483 int amdgpu_amdkfd_remove_gws_from_process(void *info, void *mem) in amdgpu_amdkfd_remove_gws_from_process() argument
2487 struct kgd_mem *kgd_mem = (struct kgd_mem *)mem; in amdgpu_amdkfd_remove_gws_from_process()
2507 kfree(mem); in amdgpu_amdkfd_remove_gws_from_process()