Lines Matching refs:rmem
2773 static void bnxt_free_ring(struct bnxt *bp, struct bnxt_ring_mem_info *rmem) in bnxt_free_ring() argument
2778 for (i = 0; i < rmem->nr_pages; i++) { in bnxt_free_ring()
2779 if (!rmem->pg_arr[i]) in bnxt_free_ring()
2782 dma_free_coherent(&pdev->dev, rmem->page_size, in bnxt_free_ring()
2783 rmem->pg_arr[i], rmem->dma_arr[i]); in bnxt_free_ring()
2785 rmem->pg_arr[i] = NULL; in bnxt_free_ring()
2787 if (rmem->pg_tbl) { in bnxt_free_ring()
2788 size_t pg_tbl_size = rmem->nr_pages * 8; in bnxt_free_ring()
2790 if (rmem->flags & BNXT_RMEM_USE_FULL_PAGE_FLAG) in bnxt_free_ring()
2791 pg_tbl_size = rmem->page_size; in bnxt_free_ring()
2793 rmem->pg_tbl, rmem->pg_tbl_map); in bnxt_free_ring()
2794 rmem->pg_tbl = NULL; in bnxt_free_ring()
2796 if (rmem->vmem_size && *rmem->vmem) { in bnxt_free_ring()
2797 vfree(*rmem->vmem); in bnxt_free_ring()
2798 *rmem->vmem = NULL; in bnxt_free_ring()
2802 static int bnxt_alloc_ring(struct bnxt *bp, struct bnxt_ring_mem_info *rmem) in bnxt_alloc_ring() argument
2808 if (rmem->flags & (BNXT_RMEM_VALID_PTE_FLAG | BNXT_RMEM_RING_PTE_FLAG)) in bnxt_alloc_ring()
2810 if ((rmem->nr_pages > 1 || rmem->depth > 0) && !rmem->pg_tbl) { in bnxt_alloc_ring()
2811 size_t pg_tbl_size = rmem->nr_pages * 8; in bnxt_alloc_ring()
2813 if (rmem->flags & BNXT_RMEM_USE_FULL_PAGE_FLAG) in bnxt_alloc_ring()
2814 pg_tbl_size = rmem->page_size; in bnxt_alloc_ring()
2815 rmem->pg_tbl = dma_alloc_coherent(&pdev->dev, pg_tbl_size, in bnxt_alloc_ring()
2816 &rmem->pg_tbl_map, in bnxt_alloc_ring()
2818 if (!rmem->pg_tbl) in bnxt_alloc_ring()
2822 for (i = 0; i < rmem->nr_pages; i++) { in bnxt_alloc_ring()
2825 rmem->pg_arr[i] = dma_alloc_coherent(&pdev->dev, in bnxt_alloc_ring()
2826 rmem->page_size, in bnxt_alloc_ring()
2827 &rmem->dma_arr[i], in bnxt_alloc_ring()
2829 if (!rmem->pg_arr[i]) in bnxt_alloc_ring()
2832 if (rmem->init_val) in bnxt_alloc_ring()
2833 memset(rmem->pg_arr[i], rmem->init_val, in bnxt_alloc_ring()
2834 rmem->page_size); in bnxt_alloc_ring()
2835 if (rmem->nr_pages > 1 || rmem->depth > 0) { in bnxt_alloc_ring()
2836 if (i == rmem->nr_pages - 2 && in bnxt_alloc_ring()
2837 (rmem->flags & BNXT_RMEM_RING_PTE_FLAG)) in bnxt_alloc_ring()
2839 else if (i == rmem->nr_pages - 1 && in bnxt_alloc_ring()
2840 (rmem->flags & BNXT_RMEM_RING_PTE_FLAG)) in bnxt_alloc_ring()
2842 rmem->pg_tbl[i] = in bnxt_alloc_ring()
2843 cpu_to_le64(rmem->dma_arr[i] | extra_bits); in bnxt_alloc_ring()
2847 if (rmem->vmem_size) { in bnxt_alloc_ring()
2848 *rmem->vmem = vzalloc(rmem->vmem_size); in bnxt_alloc_ring()
2849 if (!(*rmem->vmem)) in bnxt_alloc_ring()
3140 struct bnxt_ring_mem_info *rmem; in bnxt_alloc_cp_sub_ring() local
3150 rmem = &ring->ring_mem; in bnxt_alloc_cp_sub_ring()
3151 rmem->nr_pages = bp->cp_nr_pages; in bnxt_alloc_cp_sub_ring()
3152 rmem->page_size = HW_CMPD_RING_SIZE; in bnxt_alloc_cp_sub_ring()
3153 rmem->pg_arr = (void **)cpr->cp_desc_ring; in bnxt_alloc_cp_sub_ring()
3154 rmem->dma_arr = cpr->cp_desc_mapping; in bnxt_alloc_cp_sub_ring()
3155 rmem->flags = BNXT_RMEM_RING_PTE_FLAG; in bnxt_alloc_cp_sub_ring()
3156 rc = bnxt_alloc_ring(bp, rmem); in bnxt_alloc_cp_sub_ring()
3158 bnxt_free_ring(bp, rmem); in bnxt_alloc_cp_sub_ring()
3225 struct bnxt_ring_mem_info *rmem; in bnxt_init_ring_struct() local
3236 rmem = &ring->ring_mem; in bnxt_init_ring_struct()
3237 rmem->nr_pages = bp->cp_nr_pages; in bnxt_init_ring_struct()
3238 rmem->page_size = HW_CMPD_RING_SIZE; in bnxt_init_ring_struct()
3239 rmem->pg_arr = (void **)cpr->cp_desc_ring; in bnxt_init_ring_struct()
3240 rmem->dma_arr = cpr->cp_desc_mapping; in bnxt_init_ring_struct()
3241 rmem->vmem_size = 0; in bnxt_init_ring_struct()
3248 rmem = &ring->ring_mem; in bnxt_init_ring_struct()
3249 rmem->nr_pages = bp->rx_nr_pages; in bnxt_init_ring_struct()
3250 rmem->page_size = HW_RXBD_RING_SIZE; in bnxt_init_ring_struct()
3251 rmem->pg_arr = (void **)rxr->rx_desc_ring; in bnxt_init_ring_struct()
3252 rmem->dma_arr = rxr->rx_desc_mapping; in bnxt_init_ring_struct()
3253 rmem->vmem_size = SW_RXBD_RING_SIZE * bp->rx_nr_pages; in bnxt_init_ring_struct()
3254 rmem->vmem = (void **)&rxr->rx_buf_ring; in bnxt_init_ring_struct()
3257 rmem = &ring->ring_mem; in bnxt_init_ring_struct()
3258 rmem->nr_pages = bp->rx_agg_nr_pages; in bnxt_init_ring_struct()
3259 rmem->page_size = HW_RXBD_RING_SIZE; in bnxt_init_ring_struct()
3260 rmem->pg_arr = (void **)rxr->rx_agg_desc_ring; in bnxt_init_ring_struct()
3261 rmem->dma_arr = rxr->rx_agg_desc_mapping; in bnxt_init_ring_struct()
3262 rmem->vmem_size = SW_RXBD_AGG_RING_SIZE * bp->rx_agg_nr_pages; in bnxt_init_ring_struct()
3263 rmem->vmem = (void **)&rxr->rx_agg_ring; in bnxt_init_ring_struct()
3271 rmem = &ring->ring_mem; in bnxt_init_ring_struct()
3272 rmem->nr_pages = bp->tx_nr_pages; in bnxt_init_ring_struct()
3273 rmem->page_size = HW_RXBD_RING_SIZE; in bnxt_init_ring_struct()
3274 rmem->pg_arr = (void **)txr->tx_desc_ring; in bnxt_init_ring_struct()
3275 rmem->dma_arr = txr->tx_desc_mapping; in bnxt_init_ring_struct()
3276 rmem->vmem_size = SW_TXBD_RING_SIZE * bp->tx_nr_pages; in bnxt_init_ring_struct()
3277 rmem->vmem = (void **)&txr->tx_buf_ring; in bnxt_init_ring_struct()
5568 struct bnxt_ring_mem_info *rmem = &ring->ring_mem; in hwrm_ring_alloc_send_msg() local
5575 if (rmem->nr_pages > 1) { in hwrm_ring_alloc_send_msg()
5576 req.page_tbl_addr = cpu_to_le64(rmem->pg_tbl_map); in hwrm_ring_alloc_send_msg()
5581 req.page_tbl_addr = cpu_to_le64(rmem->dma_arr[0]); in hwrm_ring_alloc_send_msg()
6909 static void bnxt_hwrm_set_pg_attr(struct bnxt_ring_mem_info *rmem, u8 *pg_attr, in bnxt_hwrm_set_pg_attr() argument
6913 if (rmem->depth >= 1) { in bnxt_hwrm_set_pg_attr()
6914 if (rmem->depth == 2) in bnxt_hwrm_set_pg_attr()
6918 *pg_dir = cpu_to_le64(rmem->pg_tbl_map); in bnxt_hwrm_set_pg_attr()
6920 *pg_dir = cpu_to_le64(rmem->dma_arr[0]); in bnxt_hwrm_set_pg_attr()
7035 struct bnxt_ring_mem_info *rmem = &ctx_pg->ring_mem; in bnxt_alloc_ctx_mem_blk() local
7037 rmem->page_size = BNXT_PAGE_SIZE; in bnxt_alloc_ctx_mem_blk()
7038 rmem->pg_arr = ctx_pg->ctx_pg_arr; in bnxt_alloc_ctx_mem_blk()
7039 rmem->dma_arr = ctx_pg->ctx_dma_arr; in bnxt_alloc_ctx_mem_blk()
7040 rmem->flags = BNXT_RMEM_VALID_PTE_FLAG; in bnxt_alloc_ctx_mem_blk()
7041 if (rmem->depth >= 1) in bnxt_alloc_ctx_mem_blk()
7042 rmem->flags |= BNXT_RMEM_USE_FULL_PAGE_FLAG; in bnxt_alloc_ctx_mem_blk()
7043 return bnxt_alloc_ring(bp, rmem); in bnxt_alloc_ctx_mem_blk()
7050 struct bnxt_ring_mem_info *rmem = &ctx_pg->ring_mem; in bnxt_alloc_ctx_pg_tbls() local
7064 rmem->depth = 2; in bnxt_alloc_ctx_pg_tbls()
7070 rmem->nr_pages = nr_tbls; in bnxt_alloc_ctx_pg_tbls()
7081 rmem = &pg_tbl->ring_mem; in bnxt_alloc_ctx_pg_tbls()
7082 rmem->pg_tbl = ctx_pg->ctx_pg_arr[i]; in bnxt_alloc_ctx_pg_tbls()
7083 rmem->pg_tbl_map = ctx_pg->ctx_dma_arr[i]; in bnxt_alloc_ctx_pg_tbls()
7084 rmem->depth = 1; in bnxt_alloc_ctx_pg_tbls()
7085 rmem->nr_pages = MAX_CTX_PAGES; in bnxt_alloc_ctx_pg_tbls()
7087 rmem->init_val = bp->ctx->ctx_kind_initializer; in bnxt_alloc_ctx_pg_tbls()
7092 rmem->nr_pages = rem; in bnxt_alloc_ctx_pg_tbls()
7099 rmem->nr_pages = DIV_ROUND_UP(mem_size, BNXT_PAGE_SIZE); in bnxt_alloc_ctx_pg_tbls()
7100 if (rmem->nr_pages > 1 || depth) in bnxt_alloc_ctx_pg_tbls()
7101 rmem->depth = 1; in bnxt_alloc_ctx_pg_tbls()
7103 rmem->init_val = bp->ctx->ctx_kind_initializer; in bnxt_alloc_ctx_pg_tbls()
7112 struct bnxt_ring_mem_info *rmem = &ctx_pg->ring_mem; in bnxt_free_ctx_pg_tbls() local
7114 if (rmem->depth > 1 || ctx_pg->nr_pages > MAX_CTX_PAGES || in bnxt_free_ctx_pg_tbls()
7116 int i, nr_tbls = rmem->nr_pages; in bnxt_free_ctx_pg_tbls()
7134 bnxt_free_ring(bp, rmem); in bnxt_free_ctx_pg_tbls()