Home
last modified time | relevance | path

Searched refs:page_index (Results 1 – 12 of 12) sorted by relevance

/drivers/android/
Dbinder_trace.h319 TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
320 TP_ARGS(alloc, page_index),
323 __field(size_t, page_index)
327 __entry->page_index = page_index;
330 __entry->proc, __entry->page_index)
334 TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
335 TP_ARGS(alloc, page_index));
338 TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
339 TP_ARGS(alloc, page_index));
342 TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
[all …]
Dbinder_alloc_selftest.c106 int page_index; in check_buffer_pages_allocated() local
111 page_index = (page_addr - alloc->buffer) / PAGE_SIZE; in check_buffer_pages_allocated()
112 if (!alloc->pages[page_index].page_ptr || in check_buffer_pages_allocated()
113 !list_empty(&alloc->pages[page_index].lru)) { in check_buffer_pages_allocated()
115 alloc->pages[page_index].page_ptr ? in check_buffer_pages_allocated()
116 "lru" : "free", page_index); in check_buffer_pages_allocated()
/drivers/gpu/drm/amd/amdkfd/
Dkfd_events.c69 uint32_t page_index; /* Index into the mmap aperture. */ member
152 page->page_index = 0; in allocate_signal_page()
154 page->page_index = list_tail_entry(&p->signal_event_pages, in allocate_signal_page()
156 event_pages)->page_index + 1; in allocate_signal_page()
160 pr_debug("page index is %d\n", page->page_index); in allocate_signal_page()
201 unsigned int page_index) in lookup_signal_page_by_index() argument
210 if (page->page_index == page_index) in lookup_signal_page_by_index()
234 return page->page_index | in make_signal_event_id()
433 *event_page_offset = (ev->signal_page->page_index | in kfd_event_create()
812 unsigned int page_index; in kfd_event_mmap() local
[all …]
/drivers/infiniband/core/
Dumem_odp.c437 int page_index, in ib_umem_odp_map_dma_single_page() argument
458 if (!(umem->odp_data->dma_list[page_index])) { in ib_umem_odp_map_dma_single_page()
467 umem->odp_data->dma_list[page_index] = dma_addr | access_mask; in ib_umem_odp_map_dma_single_page()
468 umem->odp_data->page_list[page_index] = page; in ib_umem_odp_map_dma_single_page()
470 } else if (umem->odp_data->page_list[page_index] == page) { in ib_umem_odp_map_dma_single_page()
471 umem->odp_data->dma_list[page_index] |= access_mask; in ib_umem_odp_map_dma_single_page()
474 umem->odp_data->page_list[page_index], page); in ib_umem_odp_map_dma_single_page()
488 base_virt_addr + (page_index * PAGE_SIZE), in ib_umem_odp_map_dma_single_page()
489 base_virt_addr + ((page_index+1)*PAGE_SIZE), in ib_umem_odp_map_dma_single_page()
/drivers/s390/block/
Dxpram.c161 unsigned int page_index, add_bit; in xpram_highest_page_index() local
168 page_index = 0; in xpram_highest_page_index()
171 if (xpram_page_in(mem_page, page_index | add_bit) == 0) in xpram_highest_page_index()
172 page_index |= add_bit; in xpram_highest_page_index()
178 return page_index; in xpram_highest_page_index()
/drivers/staging/lustre/include/linux/libcfs/linux/
Dlinux-mem.h61 #define page_index(p) ((p)->index) macro
/drivers/infiniband/hw/nes/
Dnes_verbs.c2304 int entry, page_index; in nes_reg_user_mr() local
2394 for (page_index = skip_pages; page_index < chunk_pages; page_index++) { in nes_reg_user_mr()
2467 (page_index*4096))) in nes_reg_user_mr()
2470 (page_index*4096); in nes_reg_user_mr()
2473 (page_index*4096); in nes_reg_user_mr()
2480 (page_index*4096))); in nes_reg_user_mr()
2483 (page_index*4096))) >> 32))); in nes_reg_user_mr()
2537 for (page_index=0; page_index<root_pbl_index; page_index++) { in nes_reg_user_mr()
2539 root_vpbl.leaf_vpbl[page_index].pbl_vbase, in nes_reg_user_mr()
2540 root_vpbl.leaf_vpbl[page_index].pbl_pbase); in nes_reg_user_mr()
[all …]
/drivers/misc/vmw_vmci/
Dvmci_queue_pair.c364 const u64 page_index = in __qp_memcpy_to_queue() local
372 va = kmap(kernel_if->u.h.page[page_index]); in __qp_memcpy_to_queue()
374 va = kernel_if->u.g.vas[page_index + 1]; in __qp_memcpy_to_queue()
392 kunmap(kernel_if->u.h.page[page_index]); in __qp_memcpy_to_queue()
402 kunmap(kernel_if->u.h.page[page_index]); in __qp_memcpy_to_queue()
424 const u64 page_index = in __qp_memcpy_from_queue() local
432 va = kmap(kernel_if->u.h.page[page_index]); in __qp_memcpy_from_queue()
434 va = kernel_if->u.g.vas[page_index + 1]; in __qp_memcpy_from_queue()
452 kunmap(kernel_if->u.h.page[page_index]); in __qp_memcpy_from_queue()
462 kunmap(kernel_if->u.h.page[page_index]); in __qp_memcpy_from_queue()
/drivers/misc/mic/scif/
Dscif_rma.c1011 s64 page_index; in scif_get_window_offset() local
1016 page_index = SCIF_IOVA_PFN(offset); in scif_get_window_offset()
1017 iova_ptr = reserve_iova(&ep->rma_info.iovad, page_index, in scif_get_window_offset()
1018 page_index + num_pages - 1); in scif_get_window_offset()
/drivers/staging/lustre/lustre/llite/
Dvvp_page.c392 page_index(vmpage), in vvp_page_print()
/drivers/net/ethernet/mellanox/mlxsw/
Dpci.c371 int page_index) in __mlxsw_pci_queue_page_get() argument
373 return q->mem_item.mapaddr + MLXSW_PCI_PAGE_SIZE * page_index; in __mlxsw_pci_queue_page_get()
/drivers/staging/lustre/lustre/osc/
Dosc_request.c1902 pga[i]->pg, page_index(oap->oap_page), oap, in osc_build_rpc()