Home
last modified time | relevance | path

Searched refs:wb (Results 1 – 25 of 117) sorted by relevance

12345

/drivers/gpu/drm/amd/display/dc/dml/dcn2x/
Ddcn2x.c81 pipes[pipe_cnt].dout.wb.wb_src_height = wb_info->dwb_params.cnv_params.crop_height; in dcn20_populate_dml_writeback_from_context()
82 pipes[pipe_cnt].dout.wb.wb_src_width = wb_info->dwb_params.cnv_params.crop_width; in dcn20_populate_dml_writeback_from_context()
83 pipes[pipe_cnt].dout.wb.wb_dst_width = wb_info->dwb_params.dest_width; in dcn20_populate_dml_writeback_from_context()
84 pipes[pipe_cnt].dout.wb.wb_dst_height = wb_info->dwb_params.dest_height; in dcn20_populate_dml_writeback_from_context()
85 pipes[pipe_cnt].dout.wb.wb_htaps_luma = 1; in dcn20_populate_dml_writeback_from_context()
86 pipes[pipe_cnt].dout.wb.wb_vtaps_luma = 1; in dcn20_populate_dml_writeback_from_context()
87 pipes[pipe_cnt].dout.wb.wb_htaps_chroma = wb_info->dwb_params.scaler_taps.h_taps_c; in dcn20_populate_dml_writeback_from_context()
88 pipes[pipe_cnt].dout.wb.wb_vtaps_chroma = wb_info->dwb_params.scaler_taps.v_taps_c; in dcn20_populate_dml_writeback_from_context()
89 pipes[pipe_cnt].dout.wb.wb_hratio = 1.0; in dcn20_populate_dml_writeback_from_context()
90 pipes[pipe_cnt].dout.wb.wb_vratio = 1.0; in dcn20_populate_dml_writeback_from_context()
[all …]
/drivers/staging/media/atomisp/pci/isp/kernels/wb/wb_1.0/
Dia_css_wb.host.c59 const struct sh_css_isp_wb_params *wb, in ia_css_wb_dump() argument
62 if (!wb) return; in ia_css_wb_dump()
65 "wb_gain_shift", wb->gain_shift); in ia_css_wb_dump()
67 "wb_gain_gr", wb->gain_gr); in ia_css_wb_dump()
69 "wb_gain_r", wb->gain_r); in ia_css_wb_dump()
71 "wb_gain_b", wb->gain_b); in ia_css_wb_dump()
73 "wb_gain_gb", wb->gain_gb); in ia_css_wb_dump()
Dia_css_wb.host.h32 const struct sh_css_isp_wb_params *wb,
37 const struct ia_css_wb_config *wb,
/drivers/gpu/drm/radeon/
Dr600_dma.c55 if (rdev->wb.enabled) in r600_dma_get_rptr()
56 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr()
143 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume()
145 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume()
147 if (rdev->wb.enabled) in r600_dma_resume()
243 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test()
246 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in r600_dma_ring_test()
260 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ring_test()
350 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test()
381 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ib_test()
[all …]
Dradeon_device.c423 rdev->wb.enabled = false; in radeon_wb_disable()
437 if (rdev->wb.wb_obj) { in radeon_wb_fini()
438 if (!radeon_bo_reserve(rdev->wb.wb_obj, false)) { in radeon_wb_fini()
439 radeon_bo_kunmap(rdev->wb.wb_obj); in radeon_wb_fini()
440 radeon_bo_unpin(rdev->wb.wb_obj); in radeon_wb_fini()
441 radeon_bo_unreserve(rdev->wb.wb_obj); in radeon_wb_fini()
443 radeon_bo_unref(&rdev->wb.wb_obj); in radeon_wb_fini()
444 rdev->wb.wb = NULL; in radeon_wb_fini()
445 rdev->wb.wb_obj = NULL; in radeon_wb_fini()
462 if (rdev->wb.wb_obj == NULL) { in radeon_wb_init()
[all …]
Dcik_sdma.c67 if (rdev->wb.enabled) { in cik_sdma_get_rptr()
68 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr()
138 if (rdev->wb.enabled) { in cik_sdma_ring_ib_execute()
400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
404 if (rdev->wb.enabled) in cik_sdma_gfx_resume()
658 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test()
661 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ring_test()
676 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in cik_sdma_ring_test()
715 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ib_test()
[all …]
Dni_dma.c57 if (rdev->wb.enabled) { in cayman_dma_get_rptr()
58 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr()
127 if (rdev->wb.enabled) { in cayman_dma_ring_ib_execute()
222 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()
224 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
226 if (rdev->wb.enabled) in cayman_dma_resume()
Dradeon_fence.c70 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write()
93 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read()
838 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { in radeon_fence_driver_start_ring()
842 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
843 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring()
862 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring()
863 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
Dradeon_ring.c306 else if (rdev->wb.enabled) in radeon_ring_backup()
419 if (rdev->wb.enabled) { in radeon_ring_init()
421 ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index; in radeon_ring_init()
422 ring->next_rptr_cpu_addr = &rdev->wb.wb[index/4]; in radeon_ring_init()
/drivers/usb/class/
Dcdc-acm.c156 usb_poison_urb(acm->wb[i].urb); in acm_poison_urbs()
168 usb_unpoison_urb(acm->wb[i].urb); in acm_unpoison_urbs()
181 struct acm_wb *wb; in acm_wb_alloc() local
186 wb = &acm->wb[wbn]; in acm_wb_alloc()
187 if (!wb->use) { in acm_wb_alloc()
188 wb->use = true; in acm_wb_alloc()
189 wb->len = 0; in acm_wb_alloc()
206 if(acm->wb[i].use) in acm_wb_is_avail()
215 static void acm_write_done(struct acm *acm, struct acm_wb *wb) in acm_write_done() argument
217 wb->use = false; in acm_write_done()
[all …]
/drivers/net/wireless/microchip/wilc1000/
Dspi.c298 static int wilc_spi_tx_rx(struct wilc *wilc, u8 *wb, u8 *rb, u32 rlen) in wilc_spi_tx_rx() argument
307 .tx_buf = wb, in wilc_spi_tx_rx()
423 u8 wb[32], rb[32]; in wilc_spi_single_read() local
430 memset(wb, 0x0, sizeof(wb)); in wilc_spi_single_read()
432 c = (struct wilc_spi_cmd *)wb; in wilc_spi_single_read()
453 c->u.simple_cmd.crc[0] = wilc_get_crc7(wb, cmd_len); in wilc_spi_single_read()
458 if (cmd_len + resp_len > ARRAY_SIZE(wb)) { in wilc_spi_single_read()
461 cmd_len, resp_len, ARRAY_SIZE(wb)); in wilc_spi_single_read()
465 if (wilc_spi_tx_rx(wilc, wb, rb, cmd_len + resp_len)) { in wilc_spi_single_read()
518 u8 wb[32], rb[32]; in wilc_spi_write_cmd() local
[all …]
/drivers/gpu/drm/amd/amdgpu/
Dsdma_v3_0.c353 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v3_0_ring_get_rptr()
370 wptr = ring->adev->wb.wb[ring->wptr_offs] >> 2; in sdma_v3_0_ring_get_wptr()
390 u32 *wb = (u32 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v3_0_ring_set_wptr() local
392 WRITE_ONCE(*wb, (lower_32_bits(ring->wptr) << 2)); in sdma_v3_0_ring_set_wptr()
395 u32 *wb = (u32 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v3_0_ring_set_wptr() local
397 WRITE_ONCE(*wb, (lower_32_bits(ring->wptr) << 2)); in sdma_v3_0_ring_set_wptr()
697 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
699 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
718 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v3_0_gfx_resume()
834 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring()
[all …]
Damdgpu_ih.c97 ih->wptr_addr = adev->wb.gpu_addr + wptr_offs * 4; in amdgpu_ih_ring_init()
98 ih->wptr_cpu = &adev->wb.wb[wptr_offs]; in amdgpu_ih_ring_init()
99 ih->rptr_addr = adev->wb.gpu_addr + rptr_offs * 4; in amdgpu_ih_ring_init()
100 ih->rptr_cpu = &adev->wb.wb[rptr_offs]; in amdgpu_ih_ring_init()
Dsi_dma.c43 return ring->adev->wb.wb[ring->rptr_offs>>2]; in si_dma_ring_get_rptr()
157 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in si_dma_start()
216 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ring()
218 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ring()
231 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ring()
268 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ib()
270 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ib()
293 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ib()
Damdgpu_ring.c225 adev->wb.gpu_addr + (ring->trail_fence_offs * 4); in amdgpu_ring_init()
226 ring->trail_fence_cpu_addr = &adev->wb.wb[ring->trail_fence_offs]; in amdgpu_ring_init()
233 ring->cond_exe_gpu_addr = adev->wb.gpu_addr + (ring->cond_exe_offs * 4); in amdgpu_ring_init()
234 ring->cond_exe_cpu_addr = &adev->wb.wb[ring->cond_exe_offs]; in amdgpu_ring_init()
Dmes_v10_1.c49 atomic64_set((atomic64_t *)&adev->wb.wb[ring->wptr_offs], in mes_v10_1_ring_set_wptr()
59 return ring->adev->wb.wb[ring->rptr_offs]; in mes_v10_1_ring_get_rptr()
68 &ring->adev->wb.wb[ring->wptr_offs]); in mes_v10_1_ring_get_wptr()
576 adev->wb.gpu_addr + (adev->mes.sch_ctx_offs * 4); in mes_v10_1_allocate_mem_slots()
578 (uint64_t *)&adev->wb.wb[adev->mes.sch_ctx_offs]; in mes_v10_1_allocate_mem_slots()
587 adev->wb.gpu_addr + (adev->mes.query_status_fence_offs * 4); in mes_v10_1_allocate_mem_slots()
589 (uint64_t *)&adev->wb.wb[adev->mes.query_status_fence_offs]; in mes_v10_1_allocate_mem_slots()
676 wb_gpu_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in mes_v10_1_mqd_init()
682 wb_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in mes_v10_1_mqd_init()
Dsdma_v4_0.c723 rptr = ((u64 *)&ring->adev->wb.wb[ring->rptr_offs]); in sdma_v4_0_ring_get_rptr()
743 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v4_0_ring_get_wptr()
769 u64 *wb = (u64 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v4_0_ring_set_wptr() local
779 WRITE_ONCE(*wb, (ring->wptr << 2)); in sdma_v4_0_ring_set_wptr()
812 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v4_0_page_ring_get_wptr()
834 u64 *wb = (u64 *)&adev->wb.wb[ring->wptr_offs]; in sdma_v4_0_page_ring_set_wptr() local
837 WRITE_ONCE(*wb, (ring->wptr << 2)); in sdma_v4_0_page_ring_set_wptr()
1194 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v4_0_gfx_resume()
1196 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v4_0_gfx_resume()
1226 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v4_0_gfx_resume()
[all …]
Dsdma_v5_2.c243 rptr = ((u64 *)&ring->adev->wb.wb[ring->rptr_offs]); in sdma_v5_2_ring_get_rptr()
263 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v5_2_ring_get_wptr()
296 adev->wb.wb[ring->wptr_offs] = lower_32_bits(ring->wptr << 2); in sdma_v5_2_ring_set_wptr()
297 adev->wb.wb[ring->wptr_offs + 1] = upper_32_bits(ring->wptr << 2); in sdma_v5_2_ring_set_wptr()
633 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v5_2_gfx_resume()
648 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v5_2_gfx_resume()
650 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v5_2_gfx_resume()
918 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v5_2_ring_test_ring()
920 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v5_2_ring_test_ring()
938 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v5_2_ring_test_ring()
[all …]
Dsdma_v2_4.c197 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v2_4_ring_get_rptr()
458 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume()
460 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume()
562 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ring()
564 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v2_4_ring_test_ring()
579 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v2_4_ring_test_ring()
616 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ib()
618 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v2_4_ring_test_ib()
647 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v2_4_ring_test_ib()
Dcik_sdma.c167 rptr = ring->adev->wb.wb[ring->rptr_offs]; in cik_sdma_ring_get_rptr()
480 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
482 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
628 gpu_addr = adev->wb.gpu_addr + (index * 4); in cik_sdma_ring_test_ring()
630 adev->wb.wb[index] = cpu_to_le32(tmp); in cik_sdma_ring_test_ring()
644 tmp = le32_to_cpu(adev->wb.wb[index]); in cik_sdma_ring_test_ring()
681 gpu_addr = adev->wb.gpu_addr + (index * 4); in cik_sdma_ring_test_ib()
683 adev->wb.wb[index] = cpu_to_le32(tmp); in cik_sdma_ring_test_ib()
708 tmp = le32_to_cpu(adev->wb.wb[index]); in cik_sdma_ring_test_ib()
Dsdma_v5_0.c356 rptr = ((u64 *)&ring->adev->wb.wb[ring->rptr_offs]); in sdma_v5_0_ring_get_rptr()
376 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v5_0_ring_get_wptr()
409 adev->wb.wb[ring->wptr_offs] = lower_32_bits(ring->wptr << 2); in sdma_v5_0_ring_set_wptr()
410 adev->wb.wb[ring->wptr_offs + 1] = upper_32_bits(ring->wptr << 2); in sdma_v5_0_ring_set_wptr()
752 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in sdma_v5_0_gfx_resume()
767 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v5_0_gfx_resume()
769 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v5_0_gfx_resume()
996 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v5_0_ring_test_ring()
998 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v5_0_ring_test_ring()
1016 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v5_0_ring_test_ring()
[all …]
/drivers/media/platform/mtk-vcodec/venc/
Dvenc_vp8_if.c160 struct venc_vp8_vpu_buf *wb = inst->vsi->work_bufs; in vp8_enc_alloc_work_buf() local
165 if (wb[i].size == 0) in vp8_enc_alloc_work_buf()
177 inst->work_bufs[i].size = wb[i].size; in vp8_enc_alloc_work_buf()
197 wb[i].vpua); in vp8_enc_alloc_work_buf()
198 memcpy(inst->work_bufs[i].va, tmp_va, wb[i].size); in vp8_enc_alloc_work_buf()
200 wb[i].iova = inst->work_bufs[i].dma_addr; in vp8_enc_alloc_work_buf()
/drivers/gpu/drm/i915/
Di915_active.c678 struct wait_barrier *wb = container_of(wq, typeof(*wb), base); in barrier_wake() local
680 if (i915_active_is_idle(wb->ref)) { in barrier_wake()
691 struct wait_barrier *wb; in __await_barrier() local
693 wb = kmalloc(sizeof(*wb), GFP_KERNEL); in __await_barrier()
694 if (unlikely(!wb)) in __await_barrier()
699 kfree(wb); in __await_barrier()
703 wb->base.flags = 0; in __await_barrier()
704 wb->base.func = barrier_wake; in __await_barrier()
705 wb->base.private = fence; in __await_barrier()
706 wb->ref = ref; in __await_barrier()
[all …]
/drivers/md/
Ddm-writecache.c1644 struct writeback_struct *wb = container_of(bio, struct writeback_struct, bio); in writecache_writeback_endio() local
1645 struct dm_writecache *wc = wb->wc; in writecache_writeback_endio()
1651 list_add_tail(&wb->endio_entry, &wc->endio_list); in writecache_writeback_endio()
1672 struct writeback_struct *wb; in __writecache_endio_pmem() local
1677 wb = list_entry(list->next, struct writeback_struct, endio_entry); in __writecache_endio_pmem()
1678 list_del(&wb->endio_entry); in __writecache_endio_pmem()
1680 if (unlikely(wb->bio.bi_status != BLK_STS_OK)) in __writecache_endio_pmem()
1681 writecache_error(wc, blk_status_to_errno(wb->bio.bi_status), in __writecache_endio_pmem()
1682 "write error %d", wb->bio.bi_status); in __writecache_endio_pmem()
1685 e = wb->wc_list[i]; in __writecache_endio_pmem()
[all …]
/drivers/gpu/drm/vkms/
Dvkms_writeback.c141 struct drm_writeback_connector *wb = &vkmsdev->output.wb_connector; in vkms_enable_writeback_connector() local
144 drm_connector_helper_add(&wb->base, &vkms_wb_conn_helper_funcs); in vkms_enable_writeback_connector()
146 return drm_writeback_connector_init(&vkmsdev->drm, wb, in vkms_enable_writeback_connector()

12345