Home
last modified time | relevance | path

Searched refs:ring (Results 1 – 25 of 641) sorted by relevance

12345678910>>...26

/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ring.c62 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw) in amdgpu_ring_alloc() argument
66 ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask; in amdgpu_ring_alloc()
71 if (WARN_ON_ONCE(ndw > ring->max_dw)) in amdgpu_ring_alloc()
74 ring->count_dw = ndw; in amdgpu_ring_alloc()
75 ring->wptr_old = ring->wptr; in amdgpu_ring_alloc()
77 if (ring->funcs->begin_use) in amdgpu_ring_alloc()
78 ring->funcs->begin_use(ring); in amdgpu_ring_alloc()
90 void amdgpu_ring_insert_nop(struct amdgpu_ring *ring, uint32_t count) in amdgpu_ring_insert_nop() argument
95 amdgpu_ring_write(ring, ring->funcs->nop); in amdgpu_ring_insert_nop()
105 void amdgpu_ring_generic_pad_ib(struct amdgpu_ring *ring, struct amdgpu_ib *ib) in amdgpu_ring_generic_pad_ib() argument
[all …]
Djpeg_v1_0.c36 static void jpeg_v1_0_ring_begin_use(struct amdgpu_ring *ring);
38 static void jpeg_v1_0_decode_ring_patch_wreg(struct amdgpu_ring *ring, uint32_t *ptr, uint32_t reg_… in jpeg_v1_0_decode_ring_patch_wreg() argument
40 struct amdgpu_device *adev = ring->adev; in jpeg_v1_0_decode_ring_patch_wreg()
41ring->ring[(*ptr)++] = PACKETJ(SOC15_REG_OFFSET(JPEG, 0, mmUVD_JRBC_EXTERNAL_REG_BASE), 0, 0, PACK… in jpeg_v1_0_decode_ring_patch_wreg()
44 ring->ring[(*ptr)++] = 0; in jpeg_v1_0_decode_ring_patch_wreg()
45 ring->ring[(*ptr)++] = PACKETJ((reg_offset >> 2), 0, 0, PACKETJ_TYPE0); in jpeg_v1_0_decode_ring_patch_wreg()
47 ring->ring[(*ptr)++] = reg_offset; in jpeg_v1_0_decode_ring_patch_wreg()
48 ring->ring[(*ptr)++] = PACKETJ(0, 0, 0, PACKETJ_TYPE0); in jpeg_v1_0_decode_ring_patch_wreg()
50 ring->ring[(*ptr)++] = val; in jpeg_v1_0_decode_ring_patch_wreg()
53 static void jpeg_v1_0_decode_ring_set_patch_ring(struct amdgpu_ring *ring, uint32_t ptr) in jpeg_v1_0_decode_ring_set_patch_ring() argument
[all …]
Damdgpu_fence.c57 struct amdgpu_ring *ring; member
99 static void amdgpu_fence_write(struct amdgpu_ring *ring, u32 seq) in amdgpu_fence_write() argument
101 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_write()
115 static u32 amdgpu_fence_read(struct amdgpu_ring *ring) in amdgpu_fence_read() argument
117 struct amdgpu_fence_driver *drv = &ring->fence_drv; in amdgpu_fence_read()
137 int amdgpu_fence_emit(struct amdgpu_ring *ring, struct dma_fence **f, in amdgpu_fence_emit() argument
140 struct amdgpu_device *adev = ring->adev; in amdgpu_fence_emit()
150 seq = ++ring->fence_drv.sync_seq; in amdgpu_fence_emit()
151 fence->ring = ring; in amdgpu_fence_emit()
153 &ring->fence_drv.lock, in amdgpu_fence_emit()
[all …]
Djpeg_v2_0.c89 struct amdgpu_ring *ring; in jpeg_v2_0_sw_init() local
106 ring = &adev->jpeg.inst->ring_dec; in jpeg_v2_0_sw_init()
107 ring->use_doorbell = true; in jpeg_v2_0_sw_init()
108 ring->doorbell_index = (adev->doorbell_index.vcn.vcn_ring0_1 << 1) + 1; in jpeg_v2_0_sw_init()
109 sprintf(ring->name, "jpeg_dec"); in jpeg_v2_0_sw_init()
110 r = amdgpu_ring_init(adev, ring, 512, &adev->jpeg.inst->irq, in jpeg_v2_0_sw_init()
151 struct amdgpu_ring *ring = &adev->jpeg.inst->ring_dec; in jpeg_v2_0_hw_init() local
154 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v2_0_hw_init()
157 r = amdgpu_ring_test_helper(ring); in jpeg_v2_0_hw_init()
334 struct amdgpu_ring *ring = &adev->jpeg.inst->ring_dec; in jpeg_v2_0_start() local
[all …]
Damdgpu_ring.h111 void amdgpu_fence_driver_force_completion(struct amdgpu_ring *ring);
113 int amdgpu_fence_driver_init_ring(struct amdgpu_ring *ring,
115 int amdgpu_fence_driver_start_ring(struct amdgpu_ring *ring,
120 int amdgpu_fence_emit(struct amdgpu_ring *ring, struct dma_fence **fence,
122 int amdgpu_fence_emit_polling(struct amdgpu_ring *ring, uint32_t *s,
124 bool amdgpu_fence_process(struct amdgpu_ring *ring);
125 int amdgpu_fence_wait_empty(struct amdgpu_ring *ring);
126 signed long amdgpu_fence_wait_polling(struct amdgpu_ring *ring,
129 unsigned amdgpu_fence_count_emitted(struct amdgpu_ring *ring);
146 u64 (*get_rptr)(struct amdgpu_ring *ring);
[all …]
Damdgpu_ib.c124 int amdgpu_ib_schedule(struct amdgpu_ring *ring, unsigned num_ibs, in amdgpu_ib_schedule() argument
128 struct amdgpu_device *adev = ring->adev; in amdgpu_ib_schedule()
156 if (!ring->sched.ready) { in amdgpu_ib_schedule()
157 dev_err(adev->dev, "couldn't schedule ib on ring <%s>\n", ring->name); in amdgpu_ib_schedule()
167 (ring->funcs->type == AMDGPU_RING_TYPE_COMPUTE)) { in amdgpu_ib_schedule()
172 alloc_size = ring->funcs->emit_frame_size + num_ibs * in amdgpu_ib_schedule()
173 ring->funcs->emit_ib_size; in amdgpu_ib_schedule()
175 r = amdgpu_ring_alloc(ring, alloc_size); in amdgpu_ib_schedule()
181 need_ctx_switch = ring->current_ctx != fence_ctx; in amdgpu_ib_schedule()
182 if (ring->funcs->emit_pipeline_sync && job && in amdgpu_ib_schedule()
[all …]
Duvd_v7_0.c71 static uint64_t uvd_v7_0_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v7_0_ring_get_rptr() argument
73 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_ring_get_rptr()
75 return RREG32_SOC15(UVD, ring->me, mmUVD_RBC_RB_RPTR); in uvd_v7_0_ring_get_rptr()
85 static uint64_t uvd_v7_0_enc_ring_get_rptr(struct amdgpu_ring *ring) in uvd_v7_0_enc_ring_get_rptr() argument
87 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_enc_ring_get_rptr()
89 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_get_rptr()
90 return RREG32_SOC15(UVD, ring->me, mmUVD_RB_RPTR); in uvd_v7_0_enc_ring_get_rptr()
92 return RREG32_SOC15(UVD, ring->me, mmUVD_RB_RPTR2); in uvd_v7_0_enc_ring_get_rptr()
102 static uint64_t uvd_v7_0_ring_get_wptr(struct amdgpu_ring *ring) in uvd_v7_0_ring_get_wptr() argument
104 struct amdgpu_device *adev = ring->adev; in uvd_v7_0_ring_get_wptr()
[all …]
/drivers/net/wireless/broadcom/b43legacy/
Ddma.c32 struct b43legacy_dmadesc32 *op32_idx2desc(struct b43legacy_dmaring *ring, in op32_idx2desc() argument
38 *meta = &(ring->meta[slot]); in op32_idx2desc()
39 desc = ring->descbase; in op32_idx2desc()
45 static void op32_fill_descriptor(struct b43legacy_dmaring *ring, in op32_fill_descriptor() argument
50 struct b43legacy_dmadesc32 *descbase = ring->descbase; in op32_fill_descriptor()
57 B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op32_fill_descriptor()
62 addr |= ring->dev->dma.translation; in op32_fill_descriptor()
63 ctl = (bufsize - ring->frameoffset) in op32_fill_descriptor()
65 if (slot == ring->nr_slots - 1) in op32_fill_descriptor()
80 static void op32_poke_tx(struct b43legacy_dmaring *ring, int slot) in op32_poke_tx() argument
[all …]
/drivers/soc/ti/
Dk3-ringacc.c106 int (*push_tail)(struct k3_ring *ring, void *elm);
107 int (*push_head)(struct k3_ring *ring, void *elm);
108 int (*pop_tail)(struct k3_ring *ring, void *elm);
109 int (*pop_head)(struct k3_ring *ring, void *elm);
221 static long k3_ringacc_ring_get_fifo_pos(struct k3_ring *ring) in k3_ringacc_ring_get_fifo_pos() argument
224 (4 << ring->elm_size); in k3_ringacc_ring_get_fifo_pos()
227 static void *k3_ringacc_get_elm_addr(struct k3_ring *ring, u32 idx) in k3_ringacc_get_elm_addr() argument
229 return (ring->ring_mem_virt + idx * (4 << ring->elm_size)); in k3_ringacc_get_elm_addr()
232 static int k3_ringacc_ring_push_mem(struct k3_ring *ring, void *elem);
233 static int k3_ringacc_ring_pop_mem(struct k3_ring *ring, void *elem);
[all …]
/drivers/gpu/drm/radeon/
Dradeon_ring.c49 static int radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring);
62 struct radeon_ring *ring) in radeon_ring_supports_scratch_reg() argument
64 switch (ring->idx) { in radeon_ring_supports_scratch_reg()
82 void radeon_ring_free_size(struct radeon_device *rdev, struct radeon_ring *ring) in radeon_ring_free_size() argument
84 uint32_t rptr = radeon_ring_get_rptr(rdev, ring); in radeon_ring_free_size()
87 ring->ring_free_dw = rptr + (ring->ring_size / 4); in radeon_ring_free_size()
88 ring->ring_free_dw -= ring->wptr; in radeon_ring_free_size()
89 ring->ring_free_dw &= ring->ptr_mask; in radeon_ring_free_size()
90 if (!ring->ring_free_dw) { in radeon_ring_free_size()
92 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_free_size()
[all …]
Dcik_sdma.c64 struct radeon_ring *ring) in cik_sdma_get_rptr() argument
69 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr()
71 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_get_rptr()
91 struct radeon_ring *ring) in cik_sdma_get_wptr() argument
95 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_get_wptr()
112 struct radeon_ring *ring) in cik_sdma_set_wptr() argument
116 if (ring->idx == R600_RING_TYPE_DMA_INDEX) in cik_sdma_set_wptr()
121 WREG32(reg, (ring->wptr << 2) & 0x3fffc); in cik_sdma_set_wptr()
136 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute() local
137 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute()
[all …]
Dr600_dma.c52 struct radeon_ring *ring) in r600_dma_get_rptr() argument
57 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr()
73 struct radeon_ring *ring) in r600_dma_get_wptr() argument
87 struct radeon_ring *ring) in r600_dma_set_wptr() argument
89 WREG32(DMA_RB_WPTR, (ring->wptr << 2) & 0x3fffc); in r600_dma_set_wptr()
109 rdev->ring[R600_RING_TYPE_DMA_INDEX].ready = false; in r600_dma_stop()
122 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in r600_dma_resume() local
131 rb_bufsz = order_base_2(ring->ring_size / 4); in r600_dma_resume()
151 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume()
167 ring->wptr = 0; in r600_dma_resume()
[all …]
Duvd_v1_0.c40 struct radeon_ring *ring) in uvd_v1_0_get_rptr() argument
54 struct radeon_ring *ring) in uvd_v1_0_get_wptr() argument
68 struct radeon_ring *ring) in uvd_v1_0_set_wptr() argument
70 WREG32(UVD_RBC_RB_WPTR, ring->wptr); in uvd_v1_0_set_wptr()
84 struct radeon_ring *ring = &rdev->ring[fence->ring]; in uvd_v1_0_fence_emit() local
85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit()
87 radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA0, 0)); in uvd_v1_0_fence_emit()
88 radeon_ring_write(ring, addr & 0xffffffff); in uvd_v1_0_fence_emit()
89 radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA1, 0)); in uvd_v1_0_fence_emit()
90 radeon_ring_write(ring, fence->seq); in uvd_v1_0_fence_emit()
[all …]
Devergreen_dma.c44 struct radeon_ring *ring = &rdev->ring[fence->ring]; in evergreen_dma_fence_ring_emit() local
45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
47 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0)); in evergreen_dma_fence_ring_emit()
48 radeon_ring_write(ring, addr & 0xfffffffc); in evergreen_dma_fence_ring_emit()
49 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff)); in evergreen_dma_fence_ring_emit()
50 radeon_ring_write(ring, fence->seq); in evergreen_dma_fence_ring_emit()
52 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0)); in evergreen_dma_fence_ring_emit()
54 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0)); in evergreen_dma_fence_ring_emit()
55 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2)); in evergreen_dma_fence_ring_emit()
56 radeon_ring_write(ring, 1); in evergreen_dma_fence_ring_emit()
[all …]
Dradeon_fence.c68 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument
70 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write()
89 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument
91 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read()
114 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument
121 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
137 int ring) in radeon_fence_emit() argument
147 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
148 (*fence)->ring = ring; in radeon_fence_emit()
152 rdev->fence_context + ring, in radeon_fence_emit()
[all …]
/drivers/net/wireless/broadcom/b43/
Ddma.c72 struct b43_dmadesc_generic *op32_idx2desc(struct b43_dmaring *ring, in op32_idx2desc() argument
78 *meta = &(ring->meta[slot]); in op32_idx2desc()
79 desc = ring->descbase; in op32_idx2desc()
85 static void op32_fill_descriptor(struct b43_dmaring *ring, in op32_fill_descriptor() argument
90 struct b43_dmadesc32 *descbase = ring->descbase; in op32_fill_descriptor()
97 B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op32_fill_descriptor()
99 addr = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW); in op32_fill_descriptor()
100 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT); in op32_fill_descriptor()
103 if (slot == ring->nr_slots - 1) in op32_fill_descriptor()
118 static void op32_poke_tx(struct b43_dmaring *ring, int slot) in op32_poke_tx() argument
[all …]
/drivers/thunderbolt/
Dnhi.c26 #define RING_TYPE(ring) ((ring)->is_tx ? "TX ring" : "RX ring") argument
39 static int ring_interrupt_index(const struct tb_ring *ring) in ring_interrupt_index() argument
41 int bit = ring->hop; in ring_interrupt_index()
42 if (!ring->is_tx) in ring_interrupt_index()
43 bit += ring->nhi->hop_count; in ring_interrupt_index()
52 static void ring_interrupt_active(struct tb_ring *ring, bool active) in ring_interrupt_active() argument
55 ring_interrupt_index(ring) / 32 * 4; in ring_interrupt_active()
56 int bit = ring_interrupt_index(ring) & 31; in ring_interrupt_active()
60 if (ring->irq > 0) { in ring_interrupt_active()
65 if (ring->is_tx) in ring_interrupt_active()
[all …]
/drivers/net/ethernet/apm/xgene/
Dxgene_enet_ring2.c12 static void xgene_enet_ring_init(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_init() argument
14 u32 *ring_cfg = ring->state; in xgene_enet_ring_init()
15 u64 addr = ring->dma; in xgene_enet_ring_init()
17 if (xgene_enet_ring_owner(ring->id) == RING_OWNER_CPU) { in xgene_enet_ring_init()
18 ring_cfg[0] |= SET_VAL(X2_INTLINE, ring->id & RING_BUFNUM_MASK); in xgene_enet_ring_init()
27 ring_cfg[3] |= SET_VAL(RINGSIZE, ring->cfgsize) in xgene_enet_ring_init()
34 static void xgene_enet_ring_set_type(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_set_type() argument
36 u32 *ring_cfg = ring->state; in xgene_enet_ring_set_type()
40 is_bufpool = xgene_enet_is_bufpool(ring->id); in xgene_enet_ring_set_type()
47 static void xgene_enet_ring_set_recombbuf(struct xgene_enet_desc_ring *ring) in xgene_enet_ring_set_recombbuf() argument
[all …]
/drivers/net/ethernet/mellanox/mlx4/
Den_tx.c55 struct mlx4_en_tx_ring *ring; in mlx4_en_create_tx_ring() local
59 ring = kzalloc_node(sizeof(*ring), GFP_KERNEL, node); in mlx4_en_create_tx_ring()
60 if (!ring) { in mlx4_en_create_tx_ring()
65 ring->size = size; in mlx4_en_create_tx_ring()
66 ring->size_mask = size - 1; in mlx4_en_create_tx_ring()
67 ring->sp_stride = stride; in mlx4_en_create_tx_ring()
68 ring->full_size = ring->size - HEADROOM - MAX_DESC_TXBBS; in mlx4_en_create_tx_ring()
71 ring->tx_info = kvmalloc_node(tmp, GFP_KERNEL, node); in mlx4_en_create_tx_ring()
72 if (!ring->tx_info) { in mlx4_en_create_tx_ring()
78 ring->tx_info, tmp); in mlx4_en_create_tx_ring()
[all …]
/drivers/crypto/qat/qat_common/
Dadf_transport.c36 static int adf_reserve_ring(struct adf_etr_bank_data *bank, u32 ring) in adf_reserve_ring() argument
39 if (bank->ring_mask & (1 << ring)) { in adf_reserve_ring()
43 bank->ring_mask |= (1 << ring); in adf_reserve_ring()
48 static void adf_unreserve_ring(struct adf_etr_bank_data *bank, u32 ring) in adf_unreserve_ring() argument
51 bank->ring_mask &= ~(1 << ring); in adf_unreserve_ring()
55 static void adf_enable_ring_irq(struct adf_etr_bank_data *bank, u32 ring) in adf_enable_ring_irq() argument
58 bank->irq_mask |= (1 << ring); in adf_enable_ring_irq()
65 static void adf_disable_ring_irq(struct adf_etr_bank_data *bank, u32 ring) in adf_disable_ring_irq() argument
68 bank->irq_mask &= ~(1 << ring); in adf_disable_ring_irq()
73 int adf_send_message(struct adf_etr_ring_data *ring, u32 *msg) in adf_send_message() argument
[all …]
/drivers/gpu/drm/i915/gt/
Dintel_ring.c14 unsigned int intel_ring_update_space(struct intel_ring *ring) in intel_ring_update_space() argument
18 space = __intel_ring_space(ring->head, ring->emit, ring->size); in intel_ring_update_space()
20 ring->space = space; in intel_ring_update_space()
24 void __intel_ring_pin(struct intel_ring *ring) in __intel_ring_pin() argument
26 GEM_BUG_ON(!atomic_read(&ring->pin_count)); in __intel_ring_pin()
27 atomic_inc(&ring->pin_count); in __intel_ring_pin()
30 int intel_ring_pin(struct intel_ring *ring, struct i915_gem_ww_ctx *ww) in intel_ring_pin() argument
32 struct i915_vma *vma = ring->vma; in intel_ring_pin()
37 if (atomic_fetch_inc(&ring->pin_count)) in intel_ring_pin()
65 intel_ring_reset(ring, ring->emit); in intel_ring_pin()
[all …]
Dselftest_ring.c8 struct intel_ring *ring; in mock_ring() local
10 ring = kzalloc(sizeof(*ring) + sz, GFP_KERNEL); in mock_ring()
11 if (!ring) in mock_ring()
14 kref_init(&ring->ref); in mock_ring()
15 ring->size = sz; in mock_ring()
16 ring->wrap = BITS_PER_TYPE(ring->size) - ilog2(sz); in mock_ring()
17 ring->effective_size = sz; in mock_ring()
18 ring->vaddr = (void *)(ring + 1); in mock_ring()
19 atomic_set(&ring->pin_count, 1); in mock_ring()
21 intel_ring_update_space(ring); in mock_ring()
[all …]
/drivers/crypto/inside-secure/
Dsafexcel_ring.c78 struct safexcel_desc_ring *ring, in safexcel_ring_next_cwptr() argument
82 void *ptr = ring->write; in safexcel_ring_next_cwptr()
85 *atoken = ring->shwrite; in safexcel_ring_next_cwptr()
87 if ((ring->write == ring->read - ring->offset) || in safexcel_ring_next_cwptr()
88 (ring->read == ring->base && ring->write == ring->base_end)) in safexcel_ring_next_cwptr()
91 if (ring->write == ring->base_end) { in safexcel_ring_next_cwptr()
92 ring->write = ring->base; in safexcel_ring_next_cwptr()
93 ring->shwrite = ring->shbase; in safexcel_ring_next_cwptr()
95 ring->write += ring->offset; in safexcel_ring_next_cwptr()
96 ring->shwrite += ring->shoffset; in safexcel_ring_next_cwptr()
[all …]
/drivers/gpu/drm/msm/
Dmsm_ringbuffer.c13 struct msm_ringbuffer *ring; in msm_ringbuffer_new() local
20 ring = kzalloc(sizeof(*ring), GFP_KERNEL); in msm_ringbuffer_new()
21 if (!ring) { in msm_ringbuffer_new()
26 ring->gpu = gpu; in msm_ringbuffer_new()
27 ring->id = id; in msm_ringbuffer_new()
29 ring->start = msm_gem_kernel_new(gpu->dev, MSM_GPU_RINGBUFFER_SZ, in msm_ringbuffer_new()
31 gpu->aspace, &ring->bo, &ring->iova); in msm_ringbuffer_new()
33 if (IS_ERR(ring->start)) { in msm_ringbuffer_new()
34 ret = PTR_ERR(ring->start); in msm_ringbuffer_new()
35 ring->start = 0; in msm_ringbuffer_new()
[all …]
/drivers/net/wireless/ath/ath11k/
Ddbring.c10 struct ath11k_dbring *ring, in ath11k_dbring_bufs_replenish() argument
21 srng = &ab->hal.srng_list[ring->refill_srng.ring_id]; in ath11k_dbring_bufs_replenish()
28 ptr_aligned = PTR_ALIGN(ptr_unaligned, ring->buf_align); in ath11k_dbring_bufs_replenish()
29 paddr = dma_map_single(ab->dev, ptr_aligned, ring->buf_sz, in ath11k_dbring_bufs_replenish()
36 spin_lock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish()
37 buf_id = idr_alloc(&ring->bufs_idr, buff, 0, ring->bufs_max, GFP_ATOMIC); in ath11k_dbring_bufs_replenish()
38 spin_unlock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish()
62 spin_lock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish()
63 idr_remove(&ring->bufs_idr, buf_id); in ath11k_dbring_bufs_replenish()
64 spin_unlock_bh(&ring->idr_lock); in ath11k_dbring_bufs_replenish()
[all …]

12345678910>>...26