Home
last modified time | relevance | path

Searched refs:page_idx (Results 1 – 19 of 19) sorted by relevance

/drivers/net/ethernet/huawei/hinic/
Dhinic_hw_wq.c45 ((void *)((wqs)->page_vaddr[(wq)->page_idx]) \
49 ((wqs)->page_paddr[(wq)->page_idx] \
53 ((void *)((wqs)->shadow_page_vaddr[(wq)->page_idx]) \
137 static int wqs_allocate_page(struct hinic_wqs *wqs, int page_idx) in wqs_allocate_page() argument
139 return queue_alloc_page(wqs->hwif, &wqs->page_vaddr[page_idx], in wqs_allocate_page()
140 &wqs->page_paddr[page_idx], in wqs_allocate_page()
141 &wqs->shadow_page_vaddr[page_idx], in wqs_allocate_page()
150 static void wqs_free_page(struct hinic_wqs *wqs, int page_idx) in wqs_free_page() argument
156 wqs->page_vaddr[page_idx], in wqs_free_page()
157 (dma_addr_t)wqs->page_paddr[page_idx]); in wqs_free_page()
[all …]
Dhinic_hw_wq.h18 int page_idx; member
25 int page_idx; member
/drivers/infiniband/hw/mlx5/
Ddm.c24 u64 page_idx = 0; in mlx5_cmd_alloc_memic() local
44 while (page_idx < num_memic_hw_pages) { in mlx5_cmd_alloc_memic()
46 page_idx = bitmap_find_next_zero_area(dm->memic_alloc_pages, in mlx5_cmd_alloc_memic()
48 page_idx, in mlx5_cmd_alloc_memic()
51 if (page_idx < num_memic_hw_pages) in mlx5_cmd_alloc_memic()
53 page_idx, num_pages); in mlx5_cmd_alloc_memic()
57 if (page_idx >= num_memic_hw_pages) in mlx5_cmd_alloc_memic()
61 hw_start_addr + (page_idx * PAGE_SIZE)); in mlx5_cmd_alloc_memic()
67 page_idx, num_pages); in mlx5_cmd_alloc_memic()
71 page_idx++; in mlx5_cmd_alloc_memic()
[all …]
Dmain.c2137 clear_bit(mentry->page_idx, var_table->bitmap); in mlx5_ib_mmap_free()
2143 mlx5_cmd_free_uar(dev->mdev, mentry->page_idx); in mlx5_ib_mmap_free()
3369 u32 page_idx; in alloc_var_entry() local
3378 page_idx = find_first_zero_bit(var_table->bitmap, in alloc_var_entry()
3380 if (page_idx >= var_table->num_var_hw_entries) { in alloc_var_entry()
3386 set_bit(page_idx, var_table->bitmap); in alloc_var_entry()
3390 (page_idx * var_table->stride_size); in alloc_var_entry()
3391 entry->page_idx = page_idx; in alloc_var_entry()
3403 clear_bit(page_idx, var_table->bitmap); in alloc_var_entry()
3440 &entry->page_idx, sizeof(entry->page_idx)); in UVERBS_HANDLER()
[all …]
Dmlx5_ib.h619 u32 page_idx; member
/drivers/net/ethernet/mellanox/mlx4/
Dicm.h75 int page_idx; member
103 iter->page_idx = 0; in mlx4_icm_first()
113 if (++iter->page_idx >= iter->chunk->nsg) { in mlx4_icm_next()
121 iter->page_idx = 0; in mlx4_icm_next()
128 return iter->chunk->buf[iter->page_idx].dma_addr; in mlx4_icm_addr()
130 return sg_dma_address(&iter->chunk->sg[iter->page_idx]); in mlx4_icm_addr()
136 return iter->chunk->buf[iter->page_idx].size; in mlx4_icm_size()
138 return sg_dma_len(&iter->chunk->sg[iter->page_idx]); in mlx4_icm_size()
/drivers/infiniband/hw/mthca/
Dmthca_memfree.h77 int page_idx; member
106 iter->page_idx = 0; in mthca_icm_first()
116 if (++iter->page_idx >= iter->chunk->nsg) { in mthca_icm_next()
124 iter->page_idx = 0; in mthca_icm_next()
130 return sg_dma_address(&iter->chunk->mem[iter->page_idx]); in mthca_icm_addr()
135 return sg_dma_len(&iter->chunk->mem[iter->page_idx]); in mthca_icm_size()
/drivers/infiniband/hw/hns/
Dhns_roce_hem.h91 int page_idx; member
144 iter->page_idx = 0; in hns_roce_hem_first()
154 if (++iter->page_idx >= iter->chunk->nsg) { in hns_roce_hem_next()
162 iter->page_idx = 0; in hns_roce_hem_next()
168 return sg_dma_address(&iter->chunk->mem[iter->page_idx]); in hns_roce_hem_addr()
/drivers/infiniband/sw/siw/
Dsiw_mem.h60 unsigned int page_idx = (addr - umem->fp_addr) >> PAGE_SHIFT, in siw_get_upage() local
61 chunk_idx = page_idx >> CHUNK_SHIFT, in siw_get_upage()
62 page_in_chunk = page_idx & ~CHUNK_MASK; in siw_get_upage()
64 if (likely(page_idx < umem->num_pages)) in siw_get_upage()
/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
Drx.c30 u32 page_idx) in mlx5e_xsk_skb_from_cqe_mpwrq_linear() argument
32 struct xdp_buff *xdp = wi->umr.dma_info[page_idx].xsk; in mlx5e_xsk_skb_from_cqe_mpwrq_linear()
70 __set_bit(page_idx, wi->xdp_xmit_bitmap); /* non-atomic */ in mlx5e_xsk_skb_from_cqe_mpwrq_linear()
Drx.h16 u32 page_idx);
/drivers/net/ethernet/mellanox/mlx5/core/
Den_rx.c59 u16 cqe_bcnt, u32 head_offset, u32 page_idx);
62 u16 cqe_bcnt, u32 head_offset, u32 page_idx);
1340 u32 page_idx = wqe_offset >> PAGE_SHIFT; in mlx5e_handle_rx_cqe_mpwrq_rep() local
1368 rq, wi, cqe_bcnt, head_offset, page_idx); in mlx5e_handle_rx_cqe_mpwrq_rep()
1402 u16 cqe_bcnt, u32 head_offset, u32 page_idx) in mlx5e_skb_from_cqe_mpwrq_nonlinear() argument
1405 struct mlx5e_dma_info *di = &wi->umr.dma_info[page_idx]; in mlx5e_skb_from_cqe_mpwrq_nonlinear()
1448 u16 cqe_bcnt, u32 head_offset, u32 page_idx) in mlx5e_skb_from_cqe_mpwrq_linear() argument
1450 struct mlx5e_dma_info *di = &wi->umr.dma_info[page_idx]; in mlx5e_skb_from_cqe_mpwrq_linear()
1476 __set_bit(page_idx, wi->xdp_xmit_bitmap); /* non-atomic */ in mlx5e_skb_from_cqe_mpwrq_linear()
1500 u32 page_idx = wqe_offset >> PAGE_SHIFT; in mlx5e_handle_rx_cqe_mpwrq() local
[all …]
Den.h584 u16 cqe_bcnt, u32 head_offset, u32 page_idx);
/drivers/mmc/host/
Dusdhi6rol0.c181 int page_idx; /* page index within an SG segment */ member
419 (host->page_idx << PAGE_SHIFT) + data->blksz - blk_head) in usdhi6_sg_unmap()
443 host->page_idx++; in usdhi6_sg_advance()
453 host->page_idx++; in usdhi6_sg_advance()
463 done = (host->page_idx << PAGE_SHIFT) + host->offset; in usdhi6_sg_advance()
489 host->page_idx = 0; in usdhi6_sg_advance()
506 host->pg.page = nth_page(sg_page(host->sg), host->page_idx); in usdhi6_sg_advance()
1011 host->page_idx = 0; in usdhi6_rq_start()
1722 data->flags & MMC_DATA_READ ? 'R' : 'W', host->page_idx, in usdhi6_timeout_work()
/drivers/input/touchscreen/
Draydium_i2c_ts.c612 u16 page_idx, const void *data, size_t len) in raydium_i2c_fw_write_page() argument
623 buf[BL_PAGE_STR] = page_idx ? 0xff : 0; in raydium_i2c_fw_write_page()
637 page_idx, i, error); in raydium_i2c_fw_write_page()
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ttm.c840 uint64_t page_idx = 1; in amdgpu_ttm_gart_bind() local
842 r = amdgpu_gart_bind(adev, gtt->offset, page_idx, in amdgpu_ttm_gart_bind()
855 gtt->offset + (page_idx << PAGE_SHIFT), in amdgpu_ttm_gart_bind()
856 ttm->num_pages - page_idx, in amdgpu_ttm_gart_bind()
857 &(gtt->ttm.dma_address[page_idx]), flags); in amdgpu_ttm_gart_bind()
/drivers/md/
Draid1.c2662 int page_idx = 0; in raid1_sync_request() local
2893 page = resync_fetch_page(rp, page_idx); in raid1_sync_request()
2905 } while (++page_idx < RESYNC_PAGES); in raid1_sync_request()
Draid10.c3311 int page_idx = 0; in raid10_sync_request() local
3823 page = resync_fetch_page(rp, page_idx); in raid10_sync_request()
3832 } while (++page_idx < RESYNC_PAGES); in raid10_sync_request()
/drivers/scsi/lpfc/
Dlpfc_sli.c16603 int cnt, idx, numcq, page_idx = 0; in lpfc_cq_create_set() local
16780 cnt = page_idx + dmabuf->buffer_tag; in lpfc_cq_create_set()
16787 page_idx += rc; in lpfc_cq_create_set()
17620 int cnt, idx, numrq, page_idx = 0; in lpfc_mrq_create() local
17706 cnt = page_idx + dmabuf->buffer_tag; in lpfc_mrq_create()
17713 page_idx += rc; in lpfc_mrq_create()
17718 cnt = page_idx + dmabuf->buffer_tag; in lpfc_mrq_create()
17725 page_idx += rc; in lpfc_mrq_create()