Searched refs:wb_offset (Results 1 – 14 of 14) sorted by relevance
192 u32 reg_offset, wb_offset; in cayman_dma_resume() local199 wb_offset = R600_WB_DMA_RPTR_OFFSET; in cayman_dma_resume()203 wb_offset = CAYMAN_WB_DMA1_RPTR_OFFSET; in cayman_dma_resume()223 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()225 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
370 u32 reg_offset, wb_offset; in cik_sdma_gfx_resume() local377 wb_offset = R600_WB_DMA_RPTR_OFFSET; in cik_sdma_gfx_resume()381 wb_offset = CAYMAN_WB_DMA1_RPTR_OFFSET; in cik_sdma_gfx_resume()401 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()403 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
47 unsigned int wb_offset, /* Offset & ~PAGE_MASK */ member208 return (((loff_t)req->wb_index) << PAGE_SHIFT) + req->wb_offset; in req_offset()
513 off = head->wb_offset; in nfs_join_page_group()529 head->wb_offset = off; in nfs_join_page_group()1107 rqend = req->wb_offset + req->wb_bytes; in nfs_try_to_update_request()1114 if (offset > rqend || end < req->wb_offset) in nfs_try_to_update_request()1118 if (offset < req->wb_offset) { in nfs_try_to_update_request()1119 req->wb_offset = offset; in nfs_try_to_update_request()1123 req->wb_bytes = end - req->wb_offset; in nfs_try_to_update_request()1125 req->wb_bytes = rqend - req->wb_offset; in nfs_try_to_update_request()
461 req->wb_offset = offset; in __nfs_create_request()1185 req->wb_offset += size; in __nfs_pageio_add_request()1209 req->wb_offset, size); in __nfs_pageio_add_request()1291 offset = req->wb_offset; in nfs_pageio_add_request()
388 req->wb_offset = pos & ~PAGE_MASK; in nfs_direct_read_schedule_iovec()840 req->wb_offset = pos & ~PAGE_MASK; in nfs_direct_write_schedule_iovec()
1177 __entry->offset = req->wb_offset;
413 u32 wb_offset; in sdma_v2_4_gfx_resume() local418 wb_offset = (ring->rptr_offs * 4); in sdma_v2_4_gfx_resume()454 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume()456 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume()
435 u32 wb_offset; in cik_sdma_gfx_resume() local440 wb_offset = (ring->rptr_offs * 4); in cik_sdma_gfx_resume()476 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()478 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
1143 u32 wb_offset; in sdma_v4_0_gfx_resume() local1148 wb_offset = (ring->rptr_offs * 4); in sdma_v4_0_gfx_resume()1162 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v4_0_gfx_resume()1164 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v4_0_gfx_resume()1233 u32 wb_offset; in sdma_v4_0_page_resume() local1238 wb_offset = (ring->rptr_offs * 4); in sdma_v4_0_page_resume()1252 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v4_0_page_resume()1254 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v4_0_page_resume()
648 u32 wb_offset; in sdma_v3_0_gfx_resume() local656 wb_offset = (ring->rptr_offs * 4); in sdma_v3_0_gfx_resume()693 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()695 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
585 u32 wb_offset; in sdma_v5_2_gfx_resume() local595 wb_offset = (ring->rptr_offs * 4); in sdma_v5_2_gfx_resume()632 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v5_2_gfx_resume()634 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v5_2_gfx_resume()
654 u32 wb_offset; in sdma_v5_0_gfx_resume() local664 wb_offset = (ring->rptr_offs * 4); in sdma_v5_0_gfx_resume()702 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v5_0_gfx_resume()704 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v5_0_gfx_resume()
834 if (!IS_ALIGNED(req->wb_offset, alignment)) in is_aligned_req()