Home
last modified time | relevance | path

Searched refs:page_cnt (Results 1 – 17 of 17) sorted by relevance

/drivers/net/ethernet/qlogic/qed/
Dqed_chain.c12 u32 page_cnt) in qed_chain_init() argument
34 chain->page_cnt = page_cnt; in qed_chain_init()
35 chain->capacity = chain->usable_per_page * page_cnt; in qed_chain_init()
36 chain->size = chain->elem_per_page * page_cnt; in qed_chain_init()
80 for (i = 0; i < chain->page_cnt; i++) { in qed_chain_free_next_ptr()
114 for (i = 0; i < chain->page_cnt; i++) { in qed_chain_free_pbl()
160 u32 page_cnt) in qed_chain_alloc_sanity_check() argument
165 chain_size *= page_cnt; in qed_chain_alloc_sanity_check()
206 for (i = 0; i < chain->page_cnt; i++) { in qed_chain_alloc_next_ptr()
255 u32 page_cnt, i; in qed_chain_alloc_pbl() local
[all …]
Dqed_sp_commands.c309 u8 page_cnt, i; in qed_sp_pf_start() local
370 page_cnt = (u8)qed_chain_get_page_cnt(&p_hwfn->p_eq->chain); in qed_sp_pf_start()
371 p_ramrod->event_ring_num_pages = page_cnt; in qed_sp_pf_start()
/drivers/hv/
Dring_buffer.c184 struct page *pages, u32 page_cnt, u32 max_pkt_size) in hv_ringbuffer_init() argument
195 pages_wraparound = kcalloc(page_cnt * 2 - 1, sizeof(struct page *), in hv_ringbuffer_init()
201 for (i = 0; i < 2 * (page_cnt - 1); i++) in hv_ringbuffer_init()
202 pages_wraparound[i + 1] = &pages[i % (page_cnt - 1) + 1]; in hv_ringbuffer_init()
205 vmap(pages_wraparound, page_cnt * 2 - 1, VM_MAP, PAGE_KERNEL); in hv_ringbuffer_init()
219 ring_info->ring_size = page_cnt << PAGE_SHIFT; in hv_ringbuffer_init()
Dhv_balloon.c144 __u64 page_cnt:24; member
998 pfn_cnt = dm->ha_wrk.ha_page_range.finfo.page_cnt; in hot_add_req()
1001 rg_sz = dm->ha_wrk.ha_region_range.finfo.page_cnt; in hot_add_req()
1200 int num_pages = range_array->finfo.page_cnt; in free_balloon_pages()
1259 bl_resp->range_array[i].finfo.page_cnt = alloc_unit; in alloc_balloon_pages()
/drivers/infiniband/hw/hns/
Dhns_roce_mr.c772 dma_addr_t *pages, unsigned int page_cnt) in hns_roce_mtr_map() argument
789 mapped_cnt < page_cnt; i++) { in hns_roce_mtr_map()
797 if (r->offset + r->count > page_cnt) { in hns_roce_mtr_map()
801 i, r->offset, r->count, page_cnt); in hns_roce_mtr_map()
806 page_cnt - mapped_cnt); in hns_roce_mtr_map()
817 if (mapped_cnt < page_cnt) { in hns_roce_mtr_map()
820 mapped_cnt, page_cnt); in hns_roce_mtr_map()
891 int page_cnt, region_cnt; in mtr_init_buf_cfg() local
923 for (page_cnt = 0, region_cnt = 0; region_cnt < attr->region_count && in mtr_init_buf_cfg()
926 r->offset = page_cnt; in mtr_init_buf_cfg()
[all …]
Dhns_roce_device.h1155 dma_addr_t *pages, unsigned int page_cnt);
/drivers/base/firmware_loader/
Dfallback.c321 int page_cnt = min_t(size_t, PAGE_SIZE - page_ofs, count); in firmware_rw() local
326 memcpy(buffer, page_data + page_ofs, page_cnt); in firmware_rw()
328 memcpy(page_data + page_ofs, buffer, page_cnt); in firmware_rw()
331 buffer += page_cnt; in firmware_rw()
332 offset += page_cnt; in firmware_rw()
333 count -= page_cnt; in firmware_rw()
/drivers/staging/rts5208/
Dxd.c1506 u8 reg_val, page_cnt; in xd_read_multiple_pages() local
1512 page_cnt = end_page - start_page; in xd_read_multiple_pages()
1538 rtsx_add_cmd(chip, WRITE_REG_CMD, XD_PAGE_CNT, 0xFF, page_cnt); in xd_read_multiple_pages()
1543 page_cnt * 512, DMA_512); in xd_read_multiple_pages()
1553 retval = rtsx_transfer_data_partial(chip, XD_CARD, buf, page_cnt * 512, in xd_read_multiple_pages()
1708 u8 page_cnt, reg_val; in xd_write_multiple_pages() local
1716 page_cnt = end_page - start_page; in xd_write_multiple_pages()
1738 rtsx_add_cmd(chip, WRITE_REG_CMD, XD_PAGE_CNT, 0xFF, page_cnt); in xd_write_multiple_pages()
1742 page_cnt * 512, DMA_512); in xd_write_multiple_pages()
1751 retval = rtsx_transfer_data_partial(chip, XD_CARD, buf, page_cnt * 512, in xd_write_multiple_pages()
[all …]
Dms.c3399 u8 start_page, end_page = 0, page_cnt; local
3525 page_cnt = end_page - start_page;
3528 start_page, end_page, page_cnt);
3565 total_sec_cnt -= page_cnt;
3567 ptr += page_cnt * 512;
/drivers/infiniband/hw/efa/
Defa_verbs.c1167 static struct scatterlist *efa_vmalloc_buf_to_sg(u64 *buf, int page_cnt) in efa_vmalloc_buf_to_sg() argument
1173 sglist = kmalloc_array(page_cnt, sizeof(*sglist), GFP_KERNEL); in efa_vmalloc_buf_to_sg()
1176 sg_init_table(sglist, page_cnt); in efa_vmalloc_buf_to_sg()
1177 for (i = 0; i < page_cnt; i++) { in efa_vmalloc_buf_to_sg()
1198 int page_cnt = pbl->phys.indirect.pbl_buf_size_in_pages; in pbl_chunk_list_create() local
1209 chunk_list_size = DIV_ROUND_UP(page_cnt, EFA_PTRS_PER_CHUNK); in pbl_chunk_list_create()
1220 page_cnt); in pbl_chunk_list_create()
1231 ((page_cnt % EFA_PTRS_PER_CHUNK) * EFA_CHUNK_PAYLOAD_PTR_SIZE) + in pbl_chunk_list_create()
/drivers/target/
Dtarget_core_user.c512 int page_cnt = DIV_ROUND_UP(length, PAGE_SIZE); in tcmu_get_empty_block() local
522 for (cnt = 0; xas_next(&xas) && cnt < page_cnt;) in tcmu_get_empty_block()
526 for (i = cnt; i < page_cnt; i++) { in tcmu_get_empty_block()
550 return i == page_cnt ? dbi : -1; in tcmu_get_empty_block()
709 int page_cnt, page_inx, dpi; in tcmu_copy_data() local
728 page_cnt = DIV_ROUND_UP(data_len, PAGE_SIZE); in tcmu_copy_data()
729 if (page_cnt > udev->data_pages_per_blk) in tcmu_copy_data()
730 page_cnt = udev->data_pages_per_blk; in tcmu_copy_data()
733 for (page_inx = 0; page_inx < page_cnt && data_len; in tcmu_copy_data()
/drivers/infiniband/hw/irdma/
Dverbs.h98 u32 page_cnt; member
Dverbs.c2319 status = irdma_get_pble(rf->pble_rsrc, palloc, iwmr->page_cnt, in irdma_setup_pbles()
2600 iwmr->page_cnt = max_num_sg; in irdma_alloc_mr()
2603 status = irdma_get_pble(iwdev->rf->pble_rsrc, palloc, iwmr->page_cnt, in irdma_alloc_mr()
2637 if (unlikely(iwmr->npages == iwmr->page_cnt)) in irdma_set_page()
2797 iwmr->page_cnt = ib_umem_num_dma_blocks(region, iwmr->page_size); in irdma_reg_user_mr()
2802 if (total > iwmr->page_cnt) { in irdma_reg_user_mr()
2823 if (total > iwmr->page_cnt) { in irdma_reg_user_mr()
2841 use_pbles = (iwmr->page_cnt != 1); in irdma_reg_user_mr()
2927 iwmr->page_cnt = 1; in irdma_reg_phys_mr()
/drivers/infiniband/hw/qedr/
Dmain.c790 u32 page_cnt; in qedr_init_hw() local
804 page_cnt = qed_chain_get_page_cnt(&dev->cnq_array[i].pbl); in qedr_init_hw()
805 cur_pbl->num_pbl_pages = page_cnt; in qedr_init_hw()
Dverbs.c842 int chain_entries, int page_cnt, in qedr_init_cq_params() argument
853 params->pbl_num_pages = page_cnt; in qedr_init_cq_params()
923 int page_cnt; in qedr_create_cq() local
972 page_cnt = cq->q.pbl_info.num_pbes; in qedr_create_cq()
984 page_cnt = qed_chain_get_page_cnt(&cq->pbl); in qedr_create_cq()
989 qedr_init_cq_params(cq, ctx, dev, vector, chain_entries, page_cnt, in qedr_create_cq()
1561 u32 page_cnt, page_size; in qedr_create_srq() local
1597 page_cnt = srq->usrq.pbl_info.num_pbes; in qedr_create_srq()
1609 page_cnt = qed_chain_get_page_cnt(pbl); in qedr_create_srq()
1618 in_params.num_pages = page_cnt; in qedr_create_srq()
/drivers/net/ethernet/qlogic/qede/
Dqede_main.c2122 u32 page_cnt = qed_chain_get_page_cnt(&txq->tx_pbl); in qede_start_txq() local
2143 page_cnt, &ret_params); in qede_start_txq()
2212 u32 page_cnt; in qede_start_queues() local
2228 page_cnt = qed_chain_get_page_cnt(&rxq->rx_comp_ring); in qede_start_queues()
2234 page_cnt, &ret_params); in qede_start_queues()
/drivers/mmc/core/
Dmmc_test.c343 unsigned long page_cnt = 0; in mmc_test_alloc_mem() local
380 if (page_cnt < min_page_cnt) in mmc_test_alloc_mem()
390 page_cnt += 1UL << order; in mmc_test_alloc_mem()
392 if (page_cnt < min_page_cnt) in mmc_test_alloc_mem()