/drivers/staging/wilc1000/ |
D | wilc_spi.c | 245 static int wilc_spi_tx_rx(struct wilc *wilc, u8 *wb, u8 *rb, u32 rlen) in wilc_spi_tx_rx() argument 254 .tx_buf = wb, in wilc_spi_tx_rx() 284 u8 wb[32], rb[32]; in spi_cmd_complete() local 291 wb[0] = cmd; in spi_cmd_complete() 294 wb[1] = (u8)(adr >> 16); in spi_cmd_complete() 295 wb[2] = (u8)(adr >> 8); in spi_cmd_complete() 296 wb[3] = (u8)adr; in spi_cmd_complete() 301 wb[1] = (u8)(adr >> 8); in spi_cmd_complete() 303 wb[1] |= BIT(7); in spi_cmd_complete() 304 wb[2] = (u8)adr; in spi_cmd_complete() [all …]
|
/drivers/gpu/drm/radeon/ |
D | r600_dma.c | 56 if (rdev->wb.enabled) in r600_dma_get_rptr() 57 rptr = rdev->wb.wb[ring->rptr_offs/4]; in r600_dma_get_rptr() 144 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume() 146 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume() 148 if (rdev->wb.enabled) in r600_dma_resume() 244 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test() 247 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in r600_dma_ring_test() 261 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ring_test() 351 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ib_test() 382 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in r600_dma_ib_test() [all …]
|
D | radeon_device.c | 445 rdev->wb.enabled = false; in radeon_wb_disable() 459 if (rdev->wb.wb_obj) { in radeon_wb_fini() 460 if (!radeon_bo_reserve(rdev->wb.wb_obj, false)) { in radeon_wb_fini() 461 radeon_bo_kunmap(rdev->wb.wb_obj); in radeon_wb_fini() 462 radeon_bo_unpin(rdev->wb.wb_obj); in radeon_wb_fini() 463 radeon_bo_unreserve(rdev->wb.wb_obj); in radeon_wb_fini() 465 radeon_bo_unref(&rdev->wb.wb_obj); in radeon_wb_fini() 466 rdev->wb.wb = NULL; in radeon_wb_fini() 467 rdev->wb.wb_obj = NULL; in radeon_wb_fini() 484 if (rdev->wb.wb_obj == NULL) { in radeon_wb_init() [all …]
|
D | cik_sdma.c | 68 if (rdev->wb.enabled) { in cik_sdma_get_rptr() 69 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cik_sdma_get_rptr() 139 if (rdev->wb.enabled) { in cik_sdma_ring_ib_execute() 401 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 403 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 405 if (rdev->wb.enabled) in cik_sdma_gfx_resume() 659 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test() 662 rdev->wb.wb[index/4] = cpu_to_le32(tmp); in cik_sdma_ring_test() 677 tmp = le32_to_cpu(rdev->wb.wb[index/4]); in cik_sdma_ring_test() 716 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ib_test() [all …]
|
D | ni_dma.c | 58 if (rdev->wb.enabled) { in cayman_dma_get_rptr() 59 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_dma_get_rptr() 128 if (rdev->wb.enabled) { in cayman_dma_ring_ib_execute() 223 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume() 225 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume() 227 if (rdev->wb.enabled) in cayman_dma_resume()
|
D | radeon_fence.c | 65 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write() 88 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read() 831 if (rdev->wb.use_event || !radeon_ring_supports_scratch_reg(rdev, &rdev->ring[ring])) { in radeon_fence_driver_start_ring() 835 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring() 836 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring() 855 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring() 856 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring()
|
D | radeon_ring.c | 300 else if (rdev->wb.enabled) in radeon_ring_backup() 412 if (rdev->wb.enabled) { in radeon_ring_init() 414 ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index; in radeon_ring_init() 415 ring->next_rptr_cpu_addr = &rdev->wb.wb[index/4]; in radeon_ring_init()
|
/drivers/usb/class/ |
D | cdc-acm.c | 169 struct acm_wb *wb; in acm_wb_alloc() local 174 wb = &acm->wb[wbn]; in acm_wb_alloc() 175 if (!wb->use) { in acm_wb_alloc() 176 wb->use = 1; in acm_wb_alloc() 193 n -= acm->wb[i].use; in acm_wb_is_avail() 201 static void acm_write_done(struct acm *acm, struct acm_wb *wb) in acm_write_done() argument 203 wb->use = 0; in acm_write_done() 214 static int acm_start_wb(struct acm *acm, struct acm_wb *wb) in acm_start_wb() argument 220 wb->urb->transfer_buffer = wb->buf; in acm_start_wb() 221 wb->urb->transfer_dma = wb->dmah; in acm_start_wb() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_device.c | 444 if (adev->wb.wb_obj) { in amdgpu_wb_fini() 445 if (!amdgpu_bo_reserve(adev->wb.wb_obj, false)) { in amdgpu_wb_fini() 446 amdgpu_bo_kunmap(adev->wb.wb_obj); in amdgpu_wb_fini() 447 amdgpu_bo_unpin(adev->wb.wb_obj); in amdgpu_wb_fini() 448 amdgpu_bo_unreserve(adev->wb.wb_obj); in amdgpu_wb_fini() 450 amdgpu_bo_unref(&adev->wb.wb_obj); in amdgpu_wb_fini() 451 adev->wb.wb = NULL; in amdgpu_wb_fini() 452 adev->wb.wb_obj = NULL; in amdgpu_wb_fini() 469 if (adev->wb.wb_obj == NULL) { in amdgpu_wb_init() 472 &adev->wb.wb_obj); in amdgpu_wb_init() [all …]
|
D | si_dma.c | 42 return ring->adev->wb.wb[ring->rptr_offs>>2]; in si_dma_ring_get_rptr() 166 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in si_dma_start() 229 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ring() 231 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ring() 247 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ring() 289 gpu_addr = adev->wb.gpu_addr + (index * 4); in si_dma_ring_test_ib() 291 adev->wb.wb[index] = cpu_to_le32(tmp); in si_dma_ring_test_ib() 317 tmp = le32_to_cpu(adev->wb.wb[index]); in si_dma_ring_test_ib()
|
D | sdma_v3_0.c | 339 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v3_0_ring_get_rptr() 356 wptr = ring->adev->wb.wb[ring->wptr_offs] >> 2; in sdma_v3_0_ring_get_wptr() 379 adev->wb.wb[ring->wptr_offs] = ring->wptr << 2; in sdma_v3_0_ring_set_wptr() 648 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume() 650 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume() 824 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring() 826 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ring() 844 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v3_0_ring_test_ring() 886 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ib() 888 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v3_0_ring_test_ib() [all …]
|
D | cik_sdma.c | 167 rptr = ring->adev->wb.wb[ring->rptr_offs]; in cik_sdma_ring_get_rptr() 427 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 429 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 577 gpu_addr = adev->wb.gpu_addr + (index * 4); in cik_sdma_ring_test_ring() 579 adev->wb.wb[index] = cpu_to_le32(tmp); in cik_sdma_ring_test_ring() 595 tmp = le32_to_cpu(adev->wb.wb[index]); in cik_sdma_ring_test_ring() 637 gpu_addr = adev->wb.gpu_addr + (index * 4); in cik_sdma_ring_test_ib() 639 adev->wb.wb[index] = cpu_to_le32(tmp); in cik_sdma_ring_test_ib() 667 tmp = le32_to_cpu(adev->wb.wb[index]); in cik_sdma_ring_test_ib()
|
D | sdma_v2_4.c | 194 return ring->adev->wb.wb[ring->rptr_offs] >> 2; in sdma_v2_4_ring_get_rptr() 461 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume() 463 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume() 621 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ring() 623 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v2_4_ring_test_ring() 641 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v2_4_ring_test_ring() 683 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ib() 685 adev->wb.wb[index] = cpu_to_le32(tmp); in sdma_v2_4_ring_test_ib() 717 tmp = le32_to_cpu(adev->wb.wb[index]); in sdma_v2_4_ring_test_ib()
|
D | tonga_ih.c | 142 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in tonga_ih_irq_init() 203 wptr = le32_to_cpu(adev->wb.wb[adev->irq.ih.wptr_offs]); in tonga_ih_get_wptr() 265 adev->wb.wb[adev->irq.ih.rptr_offs] = adev->irq.ih.rptr; in tonga_ih_set_rptr()
|
D | si_ih.c | 79 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in si_ih_irq_init() 107 wptr = le32_to_cpu(adev->wb.wb[adev->irq.ih.wptr_offs]); in si_ih_get_wptr()
|
D | amdgpu_ring.c | 208 ring->cond_exe_gpu_addr = adev->wb.gpu_addr + (ring->cond_exe_offs * 4); in amdgpu_ring_init() 209 ring->cond_exe_cpu_addr = &adev->wb.wb[ring->cond_exe_offs]; in amdgpu_ring_init()
|
D | cik_ih.c | 134 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in cik_ih_irq_init() 190 wptr = le32_to_cpu(adev->wb.wb[adev->irq.ih.wptr_offs]); in cik_ih_get_wptr()
|
/drivers/media/platform/mtk-vcodec/venc/ |
D | venc_vp8_if.c | 170 struct venc_vp8_vpu_buf *wb = inst->vsi->work_bufs; in vp8_enc_alloc_work_buf() local 175 if ((wb[i].size == 0)) in vp8_enc_alloc_work_buf() 187 inst->work_bufs[i].size = wb[i].size; in vp8_enc_alloc_work_buf() 205 wb[i].vpua); in vp8_enc_alloc_work_buf() 206 memcpy(inst->work_bufs[i].va, tmp_va, wb[i].size); in vp8_enc_alloc_work_buf() 208 wb[i].iova = inst->work_bufs[i].dma_addr; in vp8_enc_alloc_work_buf()
|
D | venc_h264_if.c | 246 struct venc_h264_vpu_buf *wb = inst->vsi->work_bufs; in h264_enc_alloc_work_buf() local 267 inst->work_bufs[i].size = wb[i].size; in h264_enc_alloc_work_buf() 270 inst->vpu_inst.dev, wb[i].vpua); in h264_enc_alloc_work_buf() 290 wb[i].vpua); in h264_enc_alloc_work_buf() 292 wb[i].size); in h264_enc_alloc_work_buf() 295 wb[i].iova = inst->work_bufs[i].dma_addr; in h264_enc_alloc_work_buf()
|
/drivers/staging/media/davinci_vpfe/ |
D | dm365_ipipe_hw.c | 612 void ipipe_set_wb_regs(void __iomem *base_addr, struct vpfe_ipipe_wb *wb) in ipipe_set_wb_regs() argument 618 regw_ip(base_addr, wb->ofst_r & WB_OFFSET_MASK, WB2_OFT_R); in ipipe_set_wb_regs() 619 regw_ip(base_addr, wb->ofst_gr & WB_OFFSET_MASK, WB2_OFT_GR); in ipipe_set_wb_regs() 620 regw_ip(base_addr, wb->ofst_gb & WB_OFFSET_MASK, WB2_OFT_GB); in ipipe_set_wb_regs() 621 regw_ip(base_addr, wb->ofst_b & WB_OFFSET_MASK, WB2_OFT_B); in ipipe_set_wb_regs() 624 val = IPIPE_U13Q9(wb->gain_r.decimal, wb->gain_r.integer); in ipipe_set_wb_regs() 627 val = IPIPE_U13Q9(wb->gain_gr.decimal, wb->gain_gr.integer); in ipipe_set_wb_regs() 630 val = IPIPE_U13Q9(wb->gain_gb.decimal, wb->gain_gb.integer); in ipipe_set_wb_regs() 633 val = IPIPE_U13Q9(wb->gain_b.decimal, wb->gain_b.integer); in ipipe_set_wb_regs()
|
/drivers/media/i2c/m5mols/ |
D | m5mols_controls.c | 281 static const unsigned short wb[][2] = { in m5mols_set_white_balance() local 296 for (i = 0; i < ARRAY_SIZE(wb); i++) { in m5mols_set_white_balance() 298 if (wb[i][0] != val) in m5mols_set_white_balance() 302 "Setting white balance to: %#x\n", wb[i][0]); in m5mols_set_white_balance() 304 awb = wb[i][0] == V4L2_WHITE_BALANCE_AUTO; in m5mols_set_white_balance() 311 ret = m5mols_write(sd, AWB_MANUAL, wb[i][1]); in m5mols_set_white_balance()
|
/drivers/crypto/sunxi-ss/ |
D | sun4i-ss-hash.c | 198 u32 wb = 0; in sun4i_hash() local 398 wb = *(u32 *)(op->buf + nwait * 4); in sun4i_hash() 399 wb &= (0xFFFFFFFF >> (4 - nbw) * 8); in sun4i_hash() 404 wb |= ((1 << 7) << (nbw * 8)); in sun4i_hash() 405 bf[j++] = wb; in sun4i_hash()
|
/drivers/char/ |
D | ps3flash.c | 270 int wb; in ps3flash_kernel_write() local 277 wb = ps3flash_writeback(ps3flash_dev); in ps3flash_kernel_write() 278 if (wb) in ps3flash_kernel_write() 279 return wb; in ps3flash_kernel_write()
|
/drivers/net/ethernet/intel/i40e/ |
D | i40e_txrx.c | 486 qw = le64_to_cpu(rx_desc->wb.qword1.status_error_len); in i40e_fd_handle_status() 491 pf->fd_inv = le32_to_cpu(rx_desc->wb.qword0.hi_dword.fd_id); in i40e_fd_handle_status() 492 if ((rx_desc->wb.qword0.hi_dword.fd_id != 0) || in i40e_fd_handle_status() 510 if ((rx_desc->wb.qword0.hi_dword.fd_id == 0) && in i40e_fd_handle_status() 536 rx_desc->wb.qword0.hi_dword.fd_id); in i40e_fd_handle_status() 972 qw = le64_to_cpu(rx_desc->wb.qword1.status_error_len); in i40e_clean_programming_status() 1261 rx_desc->wb.qword1.status_error_len = 0; in i40e_alloc_rx_buffers() 1299 qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len); in i40e_rx_checksum() 1415 if ((rx_desc->wb.qword1.status_error_len & rss_mask) == rss_mask) { in i40e_rx_hash() 1416 hash = le32_to_cpu(rx_desc->wb.qword0.hi_dword.rss); in i40e_rx_hash() [all …]
|
/drivers/media/i2c/s5c73m3/ |
D | s5c73m3-ctrls.c | 152 static const unsigned short wb[][2] = { in s5c73m3_set_white_balance() local 162 for (i = 0; i < ARRAY_SIZE(wb); i++) { in s5c73m3_set_white_balance() 163 if (wb[i][0] != val) in s5c73m3_set_white_balance() 170 return s5c73m3_isp_command(state, COMM_AWB_MODE, wb[i][1]); in s5c73m3_set_white_balance()
|