/drivers/gpu/drm/radeon/ |
D | radeon_ib.c | 56 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument 61 r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get() 67 r = radeon_semaphore_create(rdev, &ib->semaphore); in radeon_ib_get() 72 ib->ring = ring; in radeon_ib_get() 73 ib->fence = NULL; in radeon_ib_get() 74 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get() 75 ib->vm = vm; in radeon_ib_get() 80 ib->gpu_addr = ib->sa_bo->soffset + RADEON_VA_IB_OFFSET; in radeon_ib_get() 82 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get() 84 ib->is_const_ib = false; in radeon_ib_get() [all …]
|
D | radeon_vce.c | 332 struct radeon_ib ib; in radeon_vce_get_create_msg() local 336 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4); in radeon_vce_get_create_msg() 342 dummy = ib.gpu_addr + 1024; in radeon_vce_get_create_msg() 345 ib.length_dw = 0; in radeon_vce_get_create_msg() 346 ib.ptr[ib.length_dw++] = 0x0000000c; /* len */ in radeon_vce_get_create_msg() 347 ib.ptr[ib.length_dw++] = 0x00000001; /* session cmd */ in radeon_vce_get_create_msg() 348 ib.ptr[ib.length_dw++] = handle; in radeon_vce_get_create_msg() 350 ib.ptr[ib.length_dw++] = 0x00000030; /* len */ in radeon_vce_get_create_msg() 351 ib.ptr[ib.length_dw++] = 0x01000001; /* create cmd */ in radeon_vce_get_create_msg() 352 ib.ptr[ib.length_dw++] = 0x00000000; in radeon_vce_get_create_msg() [all …]
|
D | si_dma.c | 70 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument 79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages() 81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 84 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages() 106 struct radeon_ib *ib, in si_dma_vm_write_pages() argument 120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages() 121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() [all …]
|
D | ni_dma.c | 123 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument 125 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute() 143 radeon_ring_write(ring, DMA_IB_PACKET(DMA_PACKET_INDIRECT_BUFFER, ib->vm ? ib->vm->id : 0, 0)); in cayman_dma_ring_ib_execute() 144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute() 145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 315 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument 326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages() 328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages() 330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() [all …]
|
D | cik_sdma.c | 134 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument 136 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute() 137 u32 extra_bits = (ib->vm ? ib->vm->id : 0) & 0xf; in cik_sdma_ring_ib_execute() 155 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute() 156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute() 157 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute() 679 struct radeon_ib ib; in cik_sdma_ib_test() local 696 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test() 702 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test() 703 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test() [all …]
|
D | evergreen_cs.c | 446 volatile u32 *ib = p->ib.ptr; in evergreen_cs_track_validate_cb() local 468 ib[track->cb_color_slice_idx[id]] = slice; in evergreen_cs_track_validate_cb() 1099 u32 m, i, tmp, *ib; in evergreen_cs_check_reg() local 1120 ib = p->ib.ptr; in evergreen_cs_check_reg() 1168 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_check_reg() 1197 ib[idx] &= ~Z_ARRAY_MODE(0xf); in evergreen_cs_check_reg() 1199 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_check_reg() 1207 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); in evergreen_cs_check_reg() 1208 ib[idx] |= DB_TILE_SPLIT(tile_split) | in evergreen_cs_check_reg() 1240 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_check_reg() [all …]
|
D | radeon_vm.c | 360 struct radeon_ib *ib, in radeon_vm_set_pages() argument 369 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); in radeon_vm_set_pages() 372 radeon_asic_vm_write_pages(rdev, ib, pe, addr, in radeon_vm_set_pages() 376 radeon_asic_vm_set_pages(rdev, ib, pe, addr, in radeon_vm_set_pages() 390 struct radeon_ib ib; in radeon_vm_clear_bo() local 406 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); in radeon_vm_clear_bo() 410 ib.length_dw = 0; in radeon_vm_clear_bo() 412 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); in radeon_vm_clear_bo() 413 radeon_asic_vm_pad_ib(rdev, &ib); in radeon_vm_clear_bo() 414 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo() [all …]
|
D | r600_cs.c | 356 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_cb() local 467 ib[track->cb_color_size_idx[i]] = tmp; in r600_cs_track_validate_cb() 526 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_db() local 564 ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF); in r600_cs_track_validate_db() 834 volatile uint32_t *ib; in r600_cs_common_vline_parse() local 836 ib = p->ib.ptr; in r600_cs_common_vline_parse() 899 ib[h_idx + 2] = PACKET2(0); in r600_cs_common_vline_parse() 900 ib[h_idx + 3] = PACKET2(0); in r600_cs_common_vline_parse() 901 ib[h_idx + 4] = PACKET2(0); in r600_cs_common_vline_parse() 902 ib[h_idx + 5] = PACKET2(0); in r600_cs_common_vline_parse() [all …]
|
D | r600_dma.c | 339 struct radeon_ib ib; in r600_dma_ib_test() local 353 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test() 359 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1); in r600_dma_ib_test() 360 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test() 361 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test() 362 ib.ptr[3] = 0xDEADBEEF; in r600_dma_ib_test() 363 ib.length_dw = 4; in r600_dma_ib_test() 365 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test() 367 radeon_ib_free(rdev, &ib); in r600_dma_ib_test() 371 r = radeon_fence_wait(ib.fence, false); in r600_dma_ib_test() [all …]
|
D | radeon_cs.c | 196 p->vm_bos = radeon_vm_get_bos(p->rdev, p->ib.vm, in radeon_cs_parser_relocs() 261 r = radeon_semaphore_sync_resv(p->rdev, p->ib.semaphore, resv, in radeon_cs_sync_rings() 286 p->ib.sa_bo = NULL; in radeon_cs_parser_init() 287 p->ib.semaphore = NULL; in radeon_cs_parser_init() 437 &parser->ib.fence->base); in radeon_cs_parser_fini() 457 radeon_ib_free(parser->rdev, &parser->ib); in radeon_cs_parser_fini() 491 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); in radeon_cs_ib_chunk() 564 r = radeon_ring_ib_parse(rdev, parser->ring, &parser->ib); in radeon_cs_ib_vm_chunk() 584 radeon_semaphore_sync_fence(parser->ib.semaphore, vm->fence); in radeon_cs_ib_vm_chunk() 588 r = radeon_ib_schedule(rdev, &parser->ib, &parser->const_ib, true); in radeon_cs_ib_vm_chunk() [all …]
|
D | r200.c | 151 volatile uint32_t *ib; in r200_packet0_check() local 159 ib = p->ib.ptr; in r200_packet0_check() 191 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check() 204 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check() 228 ib[idx] = tmp + ((u32)reloc->gpu_offset); in r200_packet0_check() 230 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check() 274 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check() 300 ib[idx] = tmp; in r200_packet0_check() 302 ib[idx] = idx_value; in r200_packet0_check() 368 ib[idx] = idx_value + ((u32)reloc->gpu_offset); in r200_packet0_check()
|
D | radeon_uvd.c | 553 p->ib.ptr[data0] = start & 0xFFFFFFFF; in radeon_uvd_cs_reloc() 554 p->ib.ptr[data1] = start >> 32; in radeon_uvd_cs_reloc() 700 struct radeon_ib ib; in radeon_uvd_send_msg() local 703 r = radeon_ib_get(rdev, ring, &ib, NULL, 64); in radeon_uvd_send_msg() 707 ib.ptr[0] = PACKET0(UVD_GPCOM_VCPU_DATA0, 0); in radeon_uvd_send_msg() 708 ib.ptr[1] = addr; in radeon_uvd_send_msg() 709 ib.ptr[2] = PACKET0(UVD_GPCOM_VCPU_DATA1, 0); in radeon_uvd_send_msg() 710 ib.ptr[3] = addr >> 32; in radeon_uvd_send_msg() 711 ib.ptr[4] = PACKET0(UVD_GPCOM_VCPU_CMD, 0); in radeon_uvd_send_msg() 712 ib.ptr[5] = 0; in radeon_uvd_send_msg() [all …]
|
D | evergreen_dma.c | 68 struct radeon_ib *ib) in evergreen_dma_ring_ib_execute() argument 70 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_dma_ring_ib_execute() 89 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute() 90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
|
D | radeon_asic.h | 95 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 334 void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 344 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 515 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 542 struct radeon_ib *ib); 603 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 608 int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib); 609 int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib); 611 struct radeon_ib *ib); 616 struct radeon_ib *ib, [all …]
|
/drivers/net/ethernet/amd/ |
D | 7990.c | 99 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \ 100 ib->brx_ring[t].length, \ 101 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \ 105 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \ 106 ib->btx_ring[t].length, \ 107 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \ 139 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local 149 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring() 162 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring() 163 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring() [all …]
|
D | sunlance.c | 323 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local 336 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma() 337 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma() 338 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma() 339 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma() 340 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma() 341 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma() 346 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma() 347 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma() 348 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma() [all …]
|
D | a2065.c | 152 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local 163 ib->mode = 0; in lance_init_ring() 168 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring() 169 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring() 170 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring() 171 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring() 172 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring() 173 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring() 179 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring() 180 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring() [all …]
|
D | declance.c | 234 #define lib_ptr(ib, rt, type) \ argument 235 ((volatile u16 *)((u8 *)(ib) + lib_off(rt, type))) 451 volatile u16 *ib = (volatile u16 *)dev->mem_start; in lance_init_ring() local 463 *lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) | in lance_init_ring() 465 *lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) | in lance_init_ring() 467 *lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) | in lance_init_ring() 473 *lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) | in lance_init_ring() 475 *lib_ptr(ib, rx_ptr, lp->type) = leptr; in lance_init_ring() 482 *lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) | in lance_init_ring() 484 *lib_ptr(ib, tx_ptr, lp->type) = leptr; in lance_init_ring() [all …]
|
/drivers/infiniband/hw/mlx4/ |
D | ah.c | 47 ah->av.ib.port_pd = cpu_to_be32(to_mpd(pd)->pdn | (ah_attr->port_num << 24)); in create_ib_ah() 48 ah->av.ib.g_slid = ah_attr->src_path_bits; in create_ib_ah() 49 ah->av.ib.sl_tclass_flowlabel = cpu_to_be32(ah_attr->sl << 28); in create_ib_ah() 51 ah->av.ib.g_slid |= 0x80; in create_ib_ah() 52 ah->av.ib.gid_index = ah_attr->grh.sgid_index; in create_ib_ah() 53 ah->av.ib.hop_limit = ah_attr->grh.hop_limit; in create_ib_ah() 54 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah() 57 memcpy(ah->av.ib.dgid, ah_attr->grh.dgid.raw, 16); in create_ib_ah() 60 ah->av.ib.dlid = cpu_to_be16(ah_attr->dlid); in create_ib_ah() 62 ah->av.ib.stat_rate = ah_attr->static_rate + MLX4_STAT_RATE_OFFSET; in create_ib_ah() [all …]
|
/drivers/s390/char/ |
D | fs3270.c | 247 struct idal_buffer *ib; in fs3270_read() local 255 ib = idal_buffer_alloc(count, 0); in fs3270_read() 256 if (IS_ERR(ib)) in fs3270_read() 263 raw3270_request_set_idal(rq, ib); in fs3270_read() 270 if (idal_buffer_to_user(ib, data, count) != 0) in fs3270_read() 280 idal_buffer_free(ib); in fs3270_read() 292 struct idal_buffer *ib; in fs3270_write() local 299 ib = idal_buffer_alloc(count, 0); in fs3270_write() 300 if (IS_ERR(ib)) in fs3270_write() 304 if (idal_buffer_from_user(ib, data, count) == 0) { in fs3270_write() [all …]
|
/drivers/isdn/hysdn/ |
D | hysdn_proclog.c | 103 struct log_data *ib; in put_log_buffer() local 117 if (!(ib = kmalloc(sizeof(struct log_data) + strlen(cp), GFP_ATOMIC))) in put_log_buffer() 119 strcpy(ib->log_start, cp); /* set output string */ in put_log_buffer() 120 ib->next = NULL; in put_log_buffer() 121 ib->proc_ctrl = pd; /* point to own control structure */ in put_log_buffer() 123 ib->usage_cnt = pd->if_used; in put_log_buffer() 125 pd->log_head = ib; /* new head */ in put_log_buffer() 127 pd->log_tail->next = ib; /* follows existing messages */ in put_log_buffer() 128 pd->log_tail = ib; /* new tail */ in put_log_buffer() 137 ib = pd->log_head; in put_log_buffer() [all …]
|
/drivers/isdn/divert/ |
D | divert_procfs.c | 43 struct divert_info *ib; in put_info_buffer() local 52 if (!(ib = kmalloc(sizeof(struct divert_info) + strlen(cp), GFP_ATOMIC))) in put_info_buffer() 54 strcpy(ib->info_start, cp); /* set output string */ in put_info_buffer() 55 ib->next = NULL; in put_info_buffer() 57 ib->usage_cnt = if_used; in put_info_buffer() 59 divert_info_head = ib; /* new head */ in put_info_buffer() 61 divert_info_tail->next = ib; /* follows existing messages */ in put_info_buffer() 62 divert_info_tail = ib; /* new tail */ in put_info_buffer() 68 ib = divert_info_head; in put_info_buffer() 70 kfree(ib); in put_info_buffer()
|
/drivers/net/ethernet/brocade/bna/ |
D | bna_hw_defs.h | 243 struct bna_ib *ib = _ib; \ 244 if ((ib->intr_type == BNA_INTR_T_INTX)) { \ 246 intx_mask &= ~(ib->intr_vector); \ 249 bna_ib_coalescing_timer_set(&ib->door_bell, \ 250 ib->coalescing_timeo); \ 252 bna_ib_ack(&ib->door_bell, 0); \ 258 struct bna_ib *ib = _ib; \ 260 ib->door_bell.doorbell_addr); \ 261 if (ib->intr_type == BNA_INTR_T_INTX) { \ 263 intx_mask |= ib->intr_vector; \
|
/drivers/infiniband/core/ |
D | cma.c | 136 struct ib_cm_id *ib; member 154 struct ib_sa_multicast *ib; member 268 kfree(mc->multicast.ib); in release_mc() 740 if (!id_priv->cm_id.ib || (id_priv->id.qp_type == IB_QPT_UD)) in rdma_init_qp_attr() 743 ret = ib_cm_init_qp_attr(id_priv->cm_id.ib, qp_attr, in rdma_init_qp_attr() 843 struct sockaddr_ib *listen_ib, *ib; in cma_save_ib_info() local 846 ib = (struct sockaddr_ib *) &id->route.addr.src_addr; in cma_save_ib_info() 847 ib->sib_family = listen_ib->sib_family; in cma_save_ib_info() 848 ib->sib_pkey = path->pkey; in cma_save_ib_info() 849 ib->sib_flowinfo = path->flow_label; in cma_save_ib_info() [all …]
|
/drivers/infiniband/hw/ehca/ |
D | ehca_mrmw.c | 176 &e_maxmr->ib.ib_mr.lkey, in ehca_get_dma_mr() 177 &e_maxmr->ib.ib_mr.rkey); in ehca_get_dma_mr() 183 ib_mr = &e_maxmr->ib.ib_mr; in ehca_get_dma_mr() 260 e_pd, &e_mr->ib.ib_mr.lkey, in ehca_reg_phys_mr() 261 &e_mr->ib.ib_mr.rkey); in ehca_reg_phys_mr() 289 e_pd, &pginfo, &e_mr->ib.ib_mr.lkey, in ehca_reg_phys_mr() 290 &e_mr->ib.ib_mr.rkey, EHCA_REG_MR); in ehca_reg_phys_mr() 298 return &e_mr->ib.ib_mr; in ehca_reg_phys_mr() 405 e_pd, &pginfo, &e_mr->ib.ib_mr.lkey, in ehca_reg_user_mr() 406 &e_mr->ib.ib_mr.rkey, EHCA_REG_MR); in ehca_reg_user_mr() [all …]
|