Lines Matching refs:ring
54 struct radeon_ring *ring) in cayman_dma_get_rptr() argument
59 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr()
61 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_get_rptr()
81 struct radeon_ring *ring) in cayman_dma_get_wptr() argument
85 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_get_wptr()
102 struct radeon_ring *ring) in cayman_dma_set_wptr() argument
106 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_set_wptr()
111 WREG32(reg, (ring->wptr << 2) & 0x3fffc); in cayman_dma_set_wptr()
125 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute() local
128 u32 next_rptr = ring->wptr + 4; in cayman_dma_ring_ib_execute()
132 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1)); in cayman_dma_ring_ib_execute()
133 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc); in cayman_dma_ring_ib_execute()
134 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff); in cayman_dma_ring_ib_execute()
135 radeon_ring_write(ring, next_rptr); in cayman_dma_ring_ib_execute()
141 while ((ring->wptr & 7) != 5) in cayman_dma_ring_ib_execute()
142 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0)); in cayman_dma_ring_ib_execute()
143 radeon_ring_write(ring, DMA_IB_PACKET(DMA_PACKET_INDIRECT_BUFFER, ib->vm ? ib->vm->id : 0, 0)); in cayman_dma_ring_ib_execute()
144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
174 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; in cayman_dma_stop()
175 rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false; in cayman_dma_stop()
188 struct radeon_ring *ring; in cayman_dma_resume() local
196 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in cayman_dma_resume()
200 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in cayman_dma_resume()
209 rb_bufsz = order_base_2(ring->ring_size / 4); in cayman_dma_resume()
229 WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cayman_dma_resume()
242 ring->wptr = 0; in cayman_dma_resume()
243 WREG32(DMA_RB_WPTR + reg_offset, ring->wptr << 2); in cayman_dma_resume()
247 ring->ready = true; in cayman_dma_resume()
249 r = radeon_ring_test(rdev, ring->idx, ring); in cayman_dma_resume()
251 ring->ready = false; in cayman_dma_resume()
273 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]); in cayman_dma_fini()
274 radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]); in cayman_dma_fini()
286 bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in cayman_dma_is_lockup() argument
291 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_is_lockup()
297 radeon_ring_lockup_update(rdev, ring); in cayman_dma_is_lockup()
300 return radeon_ring_test_lockup(rdev, ring); in cayman_dma_is_lockup()
451 struct radeon_ring *ring = &rdev->ring[ridx]; in cayman_dma_vm_flush() local
456 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0)); in cayman_dma_vm_flush()
457 radeon_ring_write(ring, (0xf << 16) | ((VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm->id << 2)) >> 2)); in cayman_dma_vm_flush()
458 radeon_ring_write(ring, vm->pd_gpu_addr >> 12); in cayman_dma_vm_flush()
461 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0)); in cayman_dma_vm_flush()
462 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2)); in cayman_dma_vm_flush()
463 radeon_ring_write(ring, 1); in cayman_dma_vm_flush()
466 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0)); in cayman_dma_vm_flush()
467 radeon_ring_write(ring, (0xf << 16) | (VM_INVALIDATE_REQUEST >> 2)); in cayman_dma_vm_flush()
468 radeon_ring_write(ring, 1 << vm->id); in cayman_dma_vm_flush()