Home
last modified time | relevance | path

Searched refs:ring (Results 1 – 25 of 415) sorted by relevance

12345678910>>...17

/drivers/net/wireless/broadcom/b43legacy/
Ddma.c45 struct b43legacy_dmadesc32 *op32_idx2desc(struct b43legacy_dmaring *ring, in op32_idx2desc() argument
51 *meta = &(ring->meta[slot]); in op32_idx2desc()
52 desc = ring->descbase; in op32_idx2desc()
58 static void op32_fill_descriptor(struct b43legacy_dmaring *ring, in op32_fill_descriptor() argument
63 struct b43legacy_dmadesc32 *descbase = ring->descbase; in op32_fill_descriptor()
70 B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op32_fill_descriptor()
75 addr |= ring->dev->dma.translation; in op32_fill_descriptor()
76 ctl = (bufsize - ring->frameoffset) in op32_fill_descriptor()
78 if (slot == ring->nr_slots - 1) in op32_fill_descriptor()
93 static void op32_poke_tx(struct b43legacy_dmaring *ring, int slot) in op32_poke_tx() argument
[all …]
/drivers/thunderbolt/
Dnhi.c22 #define RING_TYPE(ring) ((ring)->is_tx ? "TX ring" : "RX ring") argument
25 static int ring_interrupt_index(struct tb_ring *ring) in ring_interrupt_index() argument
27 int bit = ring->hop; in ring_interrupt_index()
28 if (!ring->is_tx) in ring_interrupt_index()
29 bit += ring->nhi->hop_count; in ring_interrupt_index()
38 static void ring_interrupt_active(struct tb_ring *ring, bool active) in ring_interrupt_active() argument
41 ring_interrupt_index(ring) / 32 * 4; in ring_interrupt_active()
42 int bit = ring_interrupt_index(ring) & 31; in ring_interrupt_active()
45 old = ioread32(ring->nhi->iobase + reg); in ring_interrupt_active()
51 dev_info(&ring->nhi->pdev->dev, in ring_interrupt_active()
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ring.c51 struct amdgpu_ring *ring);
52 static void amdgpu_debugfs_ring_fini(struct amdgpu_ring *ring);
64 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw) in amdgpu_ring_alloc() argument
68 ndw = (ndw + ring->align_mask) & ~ring->align_mask; in amdgpu_ring_alloc()
73 if (WARN_ON_ONCE(ndw > ring->max_dw)) in amdgpu_ring_alloc()
76 ring->count_dw = ndw; in amdgpu_ring_alloc()
77 ring->wptr_old = ring->wptr; in amdgpu_ring_alloc()
79 if (ring->funcs->begin_use) in amdgpu_ring_alloc()
80 ring->funcs->begin_use(ring); in amdgpu_ring_alloc()
92 void amdgpu_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) in amdgpu_ring_insert_nop() argument
[all …]
Damdgpu_fence.c54 struct amdgpu_ring *ring; member
96 static void amdgpu_fence_write(struct amdgpu_ring *ring, u32 seq) in amdgpu_fence_write() argument
98 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write()
112 static u32 amdgpu_fence_read(struct amdgpu_ring *ring) in amdgpu_fence_read() argument
114 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read()
134 int amdgpu_fence_emit(struct amdgpu_ring *ring, struct fence **f) in amdgpu_fence_emit() argument
136 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_emit()
145 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit()
146 fence->ring = ring; in amdgpu_fence_emit()
148 &ring->fence_drv.lock, in amdgpu_fence_emit()
[all …]
Duvd_v6_0.c53 static uint32_t uvd_v6_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_get_rptr() argument
55 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_ring_get_rptr()
67 static uint32_t uvd_v6_0_ring_get_wptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_get_wptr() argument
69 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_ring_get_wptr()
81 static void uvd_v6_0_ring_set_wptr(struct amdgpu_ring *ring) in uvd_v6_0_ring_set_wptr() argument
83 struct amdgpu_device *adev = ring->adev; in uvd_v6_0_ring_set_wptr()
85 WREG32(mmUVD_RBC_RB_WPTR, ring->wptr); in uvd_v6_0_ring_set_wptr()
104 struct amdgpu_ring *ring; in uvd_v6_0_sw_init() local
121 ring = &adev->uvd.ring; in uvd_v6_0_sw_init()
122 sprintf(ring->name, "uvd"); in uvd_v6_0_sw_init()
[all …]
Duvd_v4_2.c53 static uint32_t uvd_v4_2_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v4_2_ring_get_rptr() argument
55 struct amdgpu_device *adev = ring->adev; in uvd_v4_2_ring_get_rptr()
67 static uint32_t uvd_v4_2_ring_get_wptr(struct amdgpu_ring *ring) in uvd_v4_2_ring_get_wptr() argument
69 struct amdgpu_device *adev = ring->adev; in uvd_v4_2_ring_get_wptr()
81 static void uvd_v4_2_ring_set_wptr(struct amdgpu_ring *ring) in uvd_v4_2_ring_set_wptr() argument
83 struct amdgpu_device *adev = ring->adev; in uvd_v4_2_ring_set_wptr()
85 WREG32(mmUVD_RBC_RB_WPTR, ring->wptr); in uvd_v4_2_ring_set_wptr()
100 struct amdgpu_ring *ring; in uvd_v4_2_sw_init() local
117 ring = &adev->uvd.ring; in uvd_v4_2_sw_init()
118 sprintf(ring->name, "uvd"); in uvd_v4_2_sw_init()
[all …]
Duvd_v5_0.c49 static uint32_t uvd_v5_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v5_0_ring_get_rptr() argument
51 struct amdgpu_device *adev = ring->adev; in uvd_v5_0_ring_get_rptr()
63 static uint32_t uvd_v5_0_ring_get_wptr(struct amdgpu_ring *ring) in uvd_v5_0_ring_get_wptr() argument
65 struct amdgpu_device *adev = ring->adev; in uvd_v5_0_ring_get_wptr()
77 static void uvd_v5_0_ring_set_wptr(struct amdgpu_ring *ring) in uvd_v5_0_ring_set_wptr() argument
79 struct amdgpu_device *adev = ring->adev; in uvd_v5_0_ring_set_wptr()
81 WREG32(mmUVD_RBC_RB_WPTR, ring->wptr); in uvd_v5_0_ring_set_wptr()
96 struct amdgpu_ring *ring; in uvd_v5_0_sw_init() local
113 ring = &adev->uvd.ring; in uvd_v5_0_sw_init()
114 sprintf(ring->name, "uvd"); in uvd_v5_0_sw_init()
[all …]
/drivers/gpu/drm/radeon/
Dradeon_ring.c45 static int radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring);
58 struct radeon_ring *ring) in radeon_ring_supports_scratch_reg() argument
60 switch (ring->idx) { in radeon_ring_supports_scratch_reg()
78 void radeon_ring_free_size(struct radeon_device *rdev, struct radeon_ring *ring) in radeon_ring_free_size() argument
80 uint32_t rptr = radeon_ring_get_rptr(rdev, ring); in radeon_ring_free_size()
83 ring->ring_free_dw = rptr + (ring->ring_size / 4); in radeon_ring_free_size()
84 ring->ring_free_dw -= ring->wptr; in radeon_ring_free_size()
85 ring->ring_free_dw &= ring->ptr_mask; in radeon_ring_free_size()
86 if (!ring->ring_free_dw) { in radeon_ring_free_size()
88 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_free_size()
[all …]
Dcik_sdma.c64 struct radeon_ring *ring) in cik_sdma_get_rptr() argument
69 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr()
71 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_get_rptr()
91 struct radeon_ring *ring) in cik_sdma_get_wptr() argument
95 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_get_wptr()
112 struct radeon_ring *ring) in cik_sdma_set_wptr() argument
116 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_set_wptr()
121 WREG32(reg, (ring->wptr << 2) & 0x3fffc); in cik_sdma_set_wptr()
136 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute() local
137 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute()
[all …]
Dr600_dma.c52 struct radeon_ring *ring) in r600_dma_get_rptr() argument
57 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr()
73 struct radeon_ring *ring) in r600_dma_get_wptr() argument
87 struct radeon_ring *ring) in r600_dma_set_wptr() argument
89 WREG32(DMA_RB_WPTR, (ring->wptr << 2) & 0x3fffc); in r600_dma_set_wptr()
109 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; in r600_dma_stop()
122 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in r600_dma_resume() local
131 rb_bufsz = order_base_2(ring->ring_size / 4); in r600_dma_resume()
151 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume()
167 ring->wptr = 0; in r600_dma_resume()
[all …]
Duvd_v1_0.c40 struct radeon_ring *ring) in uvd_v1_0_get_rptr() argument
54 struct radeon_ring *ring) in uvd_v1_0_get_wptr() argument
68 struct radeon_ring *ring) in uvd_v1_0_set_wptr() argument
70 WREG32(UVD_RBC_RB_WPTR, ring->wptr); in uvd_v1_0_set_wptr()
84 struct radeon_ring *ring = &rdev->ring[fence->ring]; in uvd_v1_0_fence_emit() local
85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
87 radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA0, 0)); in uvd_v1_0_fence_emit()
88 radeon_ring_write(ring, addr & 0xffffffff); in uvd_v1_0_fence_emit()
89 radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA1, 0)); in uvd_v1_0_fence_emit()
90 radeon_ring_write(ring, fence->seq); in uvd_v1_0_fence_emit()
[all …]
Dradeon_fence.c62 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument
64 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write()
83 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument
85 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read()
108 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument
115 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
131 int ring) in radeon_fence_emit() argument
141 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
142 (*fence)->ring = ring; in radeon_fence_emit()
145 &rdev->fence_queue.lock, rdev->fence_context + ring, seq); in radeon_fence_emit()
[all …]
Devergreen_dma.c44 struct radeon_ring *ring = &rdev->ring[fence->ring]; in evergreen_dma_fence_ring_emit() local
45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
47 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0)); in evergreen_dma_fence_ring_emit()
48 radeon_ring_write(ring, addr & 0xfffffffc); in evergreen_dma_fence_ring_emit()
49 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff)); in evergreen_dma_fence_ring_emit()
50 radeon_ring_write(ring, fence->seq); in evergreen_dma_fence_ring_emit()
52 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0)); in evergreen_dma_fence_ring_emit()
54 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0)); in evergreen_dma_fence_ring_emit()
55 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2)); in evergreen_dma_fence_ring_emit()
56 radeon_ring_write(ring, 1); in evergreen_dma_fence_ring_emit()
[all …]
Dni_dma.c54 struct radeon_ring *ring) in cayman_dma_get_rptr() argument
59 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr()
61 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_get_rptr()
81 struct radeon_ring *ring) in cayman_dma_get_wptr() argument
85 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_get_wptr()
102 struct radeon_ring *ring) in cayman_dma_set_wptr() argument
106 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cayman_dma_set_wptr()
111 WREG32(reg, (ring->wptr << 2) & 0x3fffc); in cayman_dma_set_wptr()
125 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute() local
126 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute()
[all …]
/drivers/net/wireless/broadcom/b43/
Ddma.c85 struct b43_dmadesc_generic *op32_idx2desc(struct b43_dmaring *ring, in op32_idx2desc() argument
91 *meta = &(ring->meta[slot]); in op32_idx2desc()
92 desc = ring->descbase; in op32_idx2desc()
98 static void op32_fill_descriptor(struct b43_dmaring *ring, in op32_fill_descriptor() argument
103 struct b43_dmadesc32 *descbase = ring->descbase; in op32_fill_descriptor()
110 B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op32_fill_descriptor()
112 addr = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW); in op32_fill_descriptor()
113 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT); in op32_fill_descriptor()
116 if (slot == ring->nr_slots - 1) in op32_fill_descriptor()
131 static void op32_poke_tx(struct b43_dmaring *ring, int slot) in op32_poke_tx() argument
[all …]
/drivers/net/ethernet/apm/xgene/
Dxgene_enet_ring2.c24 static void xgene_enet_ring_init(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_init() argument
26 u32 *ring_cfg = ring->state; in xgene_enet_ring_init()
27 u64 addr = ring->dma; in xgene_enet_ring_init()
29 if (xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU) { in xgene_enet_ring_init()
30 ring_cfg[0] |= SET_VAL(X2_INTLINE, ring->id & RING_BUFNUM_MASK); in xgene_enet_ring_init()
39 ring_cfg[3] |= SET_VAL(RINGSIZE, ring->cfgsize) in xgene_enet_ring_init()
46 static void xgene_enet_ring_set_type(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_set_type() argument
48 u32 *ring_cfg = ring->state; in xgene_enet_ring_set_type()
52 is_bufpool = xgene_enet_is_bufpool(ring->id); in xgene_enet_ring_set_type()
59 static void xgene_enet_ring_set_recombbuf(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_set_recombbuf() argument
[all …]
/drivers/net/ethernet/mellanox/mlx4/
Den_tx.c54 struct mlx4_en_tx_ring *ring; in mlx4_en_create_tx_ring() local
58 ring = kzalloc_node(sizeof(*ring), GFP_KERNEL, node); in mlx4_en_create_tx_ring()
59 if (!ring) { in mlx4_en_create_tx_ring()
60 ring = kzalloc(sizeof(*ring), GFP_KERNEL); in mlx4_en_create_tx_ring()
61 if (!ring) { in mlx4_en_create_tx_ring()
67 ring->size = size; in mlx4_en_create_tx_ring()
68 ring->size_mask = size - 1; in mlx4_en_create_tx_ring()
69 ring->stride = stride; in mlx4_en_create_tx_ring()
70 ring->full_size = ring->size - HEADROOM - MAX_DESC_TXBBS; in mlx4_en_create_tx_ring()
73 ring->tx_info = kmalloc_node(tmp, GFP_KERNEL | __GFP_NOWARN, node); in mlx4_en_create_tx_ring()
[all …]
Den_rx.c158 struct mlx4_en_rx_ring *ring) in mlx4_en_init_allocator() argument
166 if (mlx4_alloc_pages(priv, &ring->page_alloc[i], in mlx4_en_init_allocator()
171 i, ring->page_alloc[i].page_size, in mlx4_en_init_allocator()
172 page_ref_count(ring->page_alloc[i].page)); in mlx4_en_init_allocator()
180 page_alloc = &ring->page_alloc[i]; in mlx4_en_init_allocator()
195 struct mlx4_en_rx_ring *ring) in mlx4_en_destroy_allocator() argument
203 page_alloc = &ring->page_alloc[i]; in mlx4_en_destroy_allocator()
219 struct mlx4_en_rx_ring *ring, int index) in mlx4_en_init_rx_desc() argument
221 struct mlx4_en_rx_desc *rx_desc = ring->buf + ring->stride * index; in mlx4_en_init_rx_desc()
235 possible_frags = (ring->stride - sizeof(struct mlx4_en_rx_desc)) / DS_SIZE; in mlx4_en_init_rx_desc()
[all …]
/drivers/crypto/qat/qat_common/
Dadf_transport.c80 static int adf_reserve_ring(struct adf_etr_bank_data *bank, uint32_t ring) in adf_reserve_ring() argument
83 if (bank->ring_mask & (1 << ring)) { in adf_reserve_ring()
87 bank->ring_mask |= (1 << ring); in adf_reserve_ring()
92 static void adf_unreserve_ring(struct adf_etr_bank_data *bank, uint32_t ring) in adf_unreserve_ring() argument
95 bank->ring_mask &= ~(1 << ring); in adf_unreserve_ring()
99 static void adf_enable_ring_irq(struct adf_etr_bank_data *bank, uint32_t ring) in adf_enable_ring_irq() argument
102 bank->irq_mask |= (1 << ring); in adf_enable_ring_irq()
109 static void adf_disable_ring_irq(struct adf_etr_bank_data *bank, uint32_t ring) in adf_disable_ring_irq() argument
112 bank->irq_mask &= ~(1 << ring); in adf_disable_ring_irq()
117 int adf_send_message(struct adf_etr_ring_data *ring, uint32_t *msg) in adf_send_message() argument
[all …]
/drivers/gpu/drm/i915/
Dintel_ringbuffer.c50 void intel_ring_update_space(struct intel_ring *ring) in intel_ring_update_space() argument
52 if (ring->last_retired_head != -1) { in intel_ring_update_space()
53 ring->head = ring->last_retired_head; in intel_ring_update_space()
54 ring->last_retired_head = -1; in intel_ring_update_space()
57 ring->space = __intel_ring_space(ring->head & HEAD_ADDR, in intel_ring_update_space()
58 ring->tail, ring->size); in intel_ring_update_space()
64 struct intel_ring *ring = req->ring; in gen2_render_ring_flush() local
77 intel_ring_emit(ring, cmd); in gen2_render_ring_flush()
78 intel_ring_emit(ring, MI_NOOP); in gen2_render_ring_flush()
79 intel_ring_advance(ring); in gen2_render_ring_flush()
[all …]
/drivers/block/xen-blkback/
Dblkback.c126 static inline int get_free_page(struct xen_blkif_ring *ring, struct page **page) in get_free_page() argument
130 spin_lock_irqsave(&ring->free_pages_lock, flags); in get_free_page()
131 if (list_empty(&ring->free_pages)) { in get_free_page()
132 BUG_ON(ring->free_pages_num != 0); in get_free_page()
133 spin_unlock_irqrestore(&ring->free_pages_lock, flags); in get_free_page()
136 BUG_ON(ring->free_pages_num == 0); in get_free_page()
137 page[0] = list_first_entry(&ring->free_pages, struct page, lru); in get_free_page()
139 ring->free_pages_num--; in get_free_page()
140 spin_unlock_irqrestore(&ring->free_pages_lock, flags); in get_free_page()
145 static inline void put_free_pages(struct xen_blkif_ring *ring, struct page **page, in put_free_pages() argument
[all …]
/drivers/net/ethernet/hisilicon/hns/
Dhnae.c40 static int hnae_alloc_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_alloc_buffer() argument
42 unsigned int order = hnae_page_order(ring); in hnae_alloc_buffer()
52 cb->length = hnae_page_size(ring); in hnae_alloc_buffer()
58 static void hnae_free_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_free_buffer() argument
62 else if (unlikely(is_rx_ring(ring))) in hnae_free_buffer()
67 static int hnae_map_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_map_buffer() argument
69 cb->dma = dma_map_page(ring_to_dev(ring), cb->priv, 0, in hnae_map_buffer()
70 cb->length, ring_to_dma_dir(ring)); in hnae_map_buffer()
72 if (dma_mapping_error(ring_to_dev(ring), cb->dma)) in hnae_map_buffer()
78 static void hnae_unmap_buffer(struct hnae_ring *ring, struct hnae_desc_cb *cb) in hnae_unmap_buffer() argument
[all …]
Dhnae.h230 #define is_tx_ring(ring) ((ring)->flags & RINGF_DIR) argument
231 #define is_rx_ring(ring) (!is_tx_ring(ring)) argument
232 #define ring_to_dma_dir(ring) (is_tx_ring(ring) ? \ argument
291 #define ring_ptr_move_fw(ring, p) \ argument
292 ((ring)->p = ((ring)->p + 1) % (ring)->desc_num)
293 #define ring_ptr_move_bw(ring, p) \ argument
294 ((ring)->p = ((ring)->p - 1 + (ring)->desc_num) % (ring)->desc_num)
301 #define assert_is_ring_idx(ring, idx) \ argument
302 assert((idx) >= 0 && (idx) < (ring)->desc_num)
307 static inline int ring_dist(struct hnae_ring *ring, int begin, int end) in ring_dist() argument
[all …]
/drivers/gpu/drm/msm/
Dmsm_ringbuffer.c23 struct msm_ringbuffer *ring; in msm_ringbuffer_new() local
29 ring = kzalloc(sizeof(*ring), GFP_KERNEL); in msm_ringbuffer_new()
30 if (!ring) { in msm_ringbuffer_new()
35 ring->gpu = gpu; in msm_ringbuffer_new()
36 ring->bo = msm_gem_new(gpu->dev, size, MSM_BO_WC); in msm_ringbuffer_new()
37 if (IS_ERR(ring->bo)) { in msm_ringbuffer_new()
38 ret = PTR_ERR(ring->bo); in msm_ringbuffer_new()
39 ring->bo = NULL; in msm_ringbuffer_new()
43 ring->start = msm_gem_get_vaddr_locked(ring->bo); in msm_ringbuffer_new()
44 if (IS_ERR(ring->start)) { in msm_ringbuffer_new()
[all …]
/drivers/net/ethernet/amd/xgbe/
Dxgbe-desc.c123 struct xgbe_ring *ring) in xgbe_free_ring() argument
128 if (!ring) in xgbe_free_ring()
131 if (ring->rdata) { in xgbe_free_ring()
132 for (i = 0; i < ring->rdesc_count; i++) { in xgbe_free_ring()
133 rdata = XGBE_GET_DESC_DATA(ring, i); in xgbe_free_ring()
137 kfree(ring->rdata); in xgbe_free_ring()
138 ring->rdata = NULL; in xgbe_free_ring()
141 if (ring->rx_hdr_pa.pages) { in xgbe_free_ring()
142 dma_unmap_page(pdata->dev, ring->rx_hdr_pa.pages_dma, in xgbe_free_ring()
143 ring->rx_hdr_pa.pages_len, DMA_FROM_DEVICE); in xgbe_free_ring()
[all …]

12345678910>>...17