Lines Matching refs:ring
54 struct radeon_ring *ring) in cayman_dma_get_rptr() argument
59 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr()
61 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_get_rptr()
81 struct radeon_ring *ring) in cayman_dma_get_wptr() argument
85 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_get_wptr()
102 struct radeon_ring *ring) in cayman_dma_set_wptr() argument
106 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_set_wptr()
111 WREG32(reg, (ring->wptr << 2) & 0x3fffc); in cayman_dma_set_wptr()
125 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute() local
126 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute()
129 u32 next_rptr = ring->wptr + 4; in cayman_dma_ring_ib_execute()
133 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1)); in cayman_dma_ring_ib_execute()
134 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc); in cayman_dma_ring_ib_execute()
135 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff); in cayman_dma_ring_ib_execute()
136 radeon_ring_write(ring, next_rptr); in cayman_dma_ring_ib_execute()
142 while ((ring->wptr & 7) != 5) in cayman_dma_ring_ib_execute()
143 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0)); in cayman_dma_ring_ib_execute()
144 radeon_ring_write(ring, DMA_IB_PACKET(DMA_PACKET_INDIRECT_BUFFER, vm_id, 0)); in cayman_dma_ring_ib_execute()
145 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
146 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
175 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; in cayman_dma_stop()
176 rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX].ready = false; in cayman_dma_stop()
189 struct radeon_ring *ring; in cayman_dma_resume() local
197 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in cayman_dma_resume()
201 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in cayman_dma_resume()
210 rb_bufsz = order_base_2(ring->ring_size / 4); in cayman_dma_resume()
230 WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cayman_dma_resume()
243 ring->wptr = 0; in cayman_dma_resume()
244 WREG32(DMA_RB_WPTR + reg_offset, ring->wptr << 2); in cayman_dma_resume()
248 ring->ready = true; in cayman_dma_resume()
250 r = radeon_ring_test(rdev, ring->idx, ring); in cayman_dma_resume()
252 ring->ready = false; in cayman_dma_resume()
274 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX]); in cayman_dma_fini()
275 radeon_ring_fini(rdev, &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]); in cayman_dma_fini()
287 bool cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in cayman_dma_is_lockup() argument
292 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_is_lockup()
298 radeon_ring_lockup_update(rdev, ring); in cayman_dma_is_lockup()
301 return radeon_ring_test_lockup(rdev, ring); in cayman_dma_is_lockup()
449 void cayman_dma_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, in cayman_dma_vm_flush() argument
452 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0)); in cayman_dma_vm_flush()
453 radeon_ring_write(ring, (0xf << 16) | ((VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm_id << 2)) >> 2)); in cayman_dma_vm_flush()
454 radeon_ring_write(ring, pd_addr >> 12); in cayman_dma_vm_flush()
457 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0)); in cayman_dma_vm_flush()
458 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2)); in cayman_dma_vm_flush()
459 radeon_ring_write(ring, 1); in cayman_dma_vm_flush()
462 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0, 0)); in cayman_dma_vm_flush()
463 radeon_ring_write(ring, (0xf << 16) | (VM_INVALIDATE_REQUEST >> 2)); in cayman_dma_vm_flush()
464 radeon_ring_write(ring, 1 << vm_id); in cayman_dma_vm_flush()
467 radeon_ring_write(ring, DMA_SRBM_READ_PACKET); in cayman_dma_vm_flush()
468 radeon_ring_write(ring, (0xff << 20) | (VM_INVALIDATE_REQUEST >> 2)); in cayman_dma_vm_flush()
469 radeon_ring_write(ring, 0); /* mask */ in cayman_dma_vm_flush()
470 radeon_ring_write(ring, 0); /* value */ in cayman_dma_vm_flush()