• Home
  • Raw
  • Download

Lines Matching refs:page_size

60 	u32 num_curr_pgs, page_size;  in alloc_device_memory()  local
65 page_size = hdev->asic_prop.dram_page_size; in alloc_device_memory()
66 num_pgs = DIV_ROUND_UP_ULL(args->alloc.mem_size, page_size); in alloc_device_memory()
67 total_size = num_pgs * page_size; in alloc_device_memory()
95 phys_pg_pack->page_size = page_size; in alloc_device_memory()
108 phys_pg_pack->pages[i] = paddr + i * page_size; in alloc_device_memory()
113 page_size); in alloc_device_memory()
153 page_size); in alloc_device_memory()
302 phys_pg_pack->page_size); in free_phys_pg_pack()
309 phys_pg_pack->page_size); in free_phys_pg_pack()
587 bool is_align_pow_2 = is_power_of_2(va_range->page_size); in get_va_block()
599 size = DIV_ROUND_UP_ULL(size, va_range->page_size) * in get_va_block()
600 va_range->page_size; in get_va_block()
607 do_div(tmp_hint_addr, va_range->page_size))) { in get_va_block()
737 max(alignment, ctx->va_range[type]->page_size), in hl_reserve_va_block()
820 u32 npages, page_size = PAGE_SIZE, in init_phys_pg_pack_from_userptr() local
821 huge_page_size = ctx->hdev->asic_prop.pmmu_huge.page_size; in init_phys_pg_pack_from_userptr()
822 u32 pgs_in_huge_page = huge_page_size >> __ffs(page_size); in init_phys_pg_pack_from_userptr()
859 page_size = huge_page_size; in init_phys_pg_pack_from_userptr()
863 page_mask = ~(((u64) page_size) - 1); in init_phys_pg_pack_from_userptr()
873 phys_pg_pack->page_size = page_size; in init_phys_pg_pack_from_userptr()
874 phys_pg_pack->total_size = total_npages * page_size; in init_phys_pg_pack_from_userptr()
883 phys_pg_pack->offset = dma_addr & (page_size - 1); in init_phys_pg_pack_from_userptr()
889 dma_addr += page_size; in init_phys_pg_pack_from_userptr()
924 u32 page_size = phys_pg_pack->page_size; in map_phys_pg_pack() local
931 rc = hl_mmu_map_page(ctx, next_vaddr, paddr, page_size, in map_phys_pg_pack()
942 next_vaddr += page_size; in map_phys_pg_pack()
952 if (hl_mmu_unmap_page(ctx, next_vaddr, page_size, in map_phys_pg_pack()
957 phys_pg_pack->pages[i], page_size); in map_phys_pg_pack()
959 next_vaddr += page_size; in map_phys_pg_pack()
988 u32 page_size; in unmap_phys_pg_pack() local
991 page_size = phys_pg_pack->page_size; in unmap_phys_pg_pack()
994 for (i = 0 ; i < phys_pg_pack->npages ; i++, next_vaddr += page_size) { in unmap_phys_pg_pack()
995 if (hl_mmu_unmap_page(ctx, next_vaddr, page_size, in unmap_phys_pg_pack()
1072 u32 page_size = hdev->asic_prop.pmmu.page_size, in map_device_va() local
1073 huge_page_size = hdev->asic_prop.pmmu_huge.page_size; in map_device_va()
1095 if (phys_pg_pack->page_size == page_size) { in map_device_va()
1103 va_block_align = page_size; in map_device_va()
1139 va_block_align = hdev->asic_prop.dmmu.page_size; in map_device_va()
1302 if (phys_pg_pack->page_size == in unmap_device_va()
1303 hdev->asic_prop.pmmu.page_size) in unmap_device_va()
1325 if (!is_userptr && !is_power_of_2(phys_pg_pack->page_size)) in unmap_device_va()
1328 phys_pg_pack->page_size) * in unmap_device_va()
1329 phys_pg_pack->page_size; in unmap_device_va()
1331 vaddr &= ~(((u64) phys_pg_pack->page_size) - 1); in unmap_device_va()
1864 u64 start, u64 end, u32 page_size) in va_range_init() argument
1876 if (is_power_of_2(page_size)) { in va_range_init()
1900 va_range->page_size = page_size; in va_range_init()
2060 prop->dram_page_size : prop->dmmu.page_size; in hl_vm_ctx_init()
2063 host_page_size = prop->pmmu.page_size; in hl_vm_ctx_init()
2066 host_huge_page_size = prop->pmmu_huge.page_size; in hl_vm_ctx_init()