/drivers/gpu/drm/radeon/ |
D | radeon_ib.c | 56 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument 61 r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get() 67 radeon_sync_create(&ib->sync); in radeon_ib_get() 69 ib->ring = ring; in radeon_ib_get() 70 ib->fence = NULL; in radeon_ib_get() 71 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get() 72 ib->vm = vm; in radeon_ib_get() 77 ib->gpu_addr = ib->sa_bo->soffset + RADEON_VA_IB_OFFSET; in radeon_ib_get() 79 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get() 81 ib->is_const_ib = false; in radeon_ib_get() [all …]
|
D | ni_dma.c | 123 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument 125 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute() 126 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute() 145 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute() 146 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 316 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument 327 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages() 329 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 330 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages() 331 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() [all …]
|
D | si_dma.c | 70 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument 79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages() 81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 84 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages() 106 struct radeon_ib *ib, in si_dma_vm_write_pages() argument 120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages() 121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() [all …]
|
D | radeon_vce.c | 350 struct radeon_ib ib; in radeon_vce_get_create_msg() local 354 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4); in radeon_vce_get_create_msg() 360 dummy = ib.gpu_addr + 1024; in radeon_vce_get_create_msg() 363 ib.length_dw = 0; in radeon_vce_get_create_msg() 364 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */ in radeon_vce_get_create_msg() 365 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */ in radeon_vce_get_create_msg() 366 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg() 368 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000030); /* len */ in radeon_vce_get_create_msg() 369 ib.ptr[ib.length_dw++] = cpu_to_le32(0x01000001); /* create cmd */ in radeon_vce_get_create_msg() 370 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000); in radeon_vce_get_create_msg() [all …]
|
D | cik_sdma.c | 134 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument 136 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute() 137 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute() 155 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute() 156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute() 157 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute() 704 struct radeon_ib ib; in cik_sdma_ib_test() local 721 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test() 727 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test() 728 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test() [all …]
|
D | evergreen_cs.c | 450 uint32_t *ib = p->ib.ptr; in evergreen_cs_track_validate_cb() local 472 ib[track->cb_color_slice_idx[id]] = slice; in evergreen_cs_track_validate_cb() 1098 u32 tmp, *ib; in evergreen_cs_handle_reg() local 1101 ib = p->ib.ptr; in evergreen_cs_handle_reg() 1149 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg() 1178 ib[idx] &= ~Z_ARRAY_MODE(0xf); in evergreen_cs_handle_reg() 1180 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_handle_reg() 1188 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); in evergreen_cs_handle_reg() 1189 ib[idx] |= DB_TILE_SPLIT(tile_split) | in evergreen_cs_handle_reg() 1221 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg() [all …]
|
D | radeon_vm.c | 360 struct radeon_ib *ib, in radeon_vm_set_pages() argument 369 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count); in radeon_vm_set_pages() 372 radeon_asic_vm_write_pages(rdev, ib, pe, addr, in radeon_vm_set_pages() 376 radeon_asic_vm_set_pages(rdev, ib, pe, addr, in radeon_vm_set_pages() 390 struct radeon_ib ib; in radeon_vm_clear_bo() local 406 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256); in radeon_vm_clear_bo() 410 ib.length_dw = 0; in radeon_vm_clear_bo() 412 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0); in radeon_vm_clear_bo() 413 radeon_asic_vm_pad_ib(rdev, &ib); in radeon_vm_clear_bo() 414 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo() [all …]
|
D | r600_cs.c | 356 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_cb() local 467 ib[track->cb_color_size_idx[i]] = tmp; in r600_cs_track_validate_cb() 526 volatile u32 *ib = p->ib.ptr; in r600_cs_track_validate_db() local 564 ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF); in r600_cs_track_validate_db() 834 volatile uint32_t *ib; in r600_cs_common_vline_parse() local 836 ib = p->ib.ptr; in r600_cs_common_vline_parse() 899 ib[h_idx + 2] = PACKET2(0); in r600_cs_common_vline_parse() 900 ib[h_idx + 3] = PACKET2(0); in r600_cs_common_vline_parse() 901 ib[h_idx + 4] = PACKET2(0); in r600_cs_common_vline_parse() 902 ib[h_idx + 5] = PACKET2(0); in r600_cs_common_vline_parse() [all …]
|
D | r600_dma.c | 339 struct radeon_ib ib; in r600_dma_ib_test() local 353 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in r600_dma_ib_test() 359 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1); in r600_dma_ib_test() 360 ib.ptr[1] = lower_32_bits(gpu_addr); in r600_dma_ib_test() 361 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in r600_dma_ib_test() 362 ib.ptr[3] = 0xDEADBEEF; in r600_dma_ib_test() 363 ib.length_dw = 4; in r600_dma_ib_test() 365 r = radeon_ib_schedule(rdev, &ib, NULL, false); in r600_dma_ib_test() 367 radeon_ib_free(rdev, &ib); in r600_dma_ib_test() 371 r = radeon_fence_wait_timeout(ib.fence, false, usecs_to_jiffies( in r600_dma_ib_test() [all …]
|
D | radeon_cs.c | 176 p->vm_bos = radeon_vm_get_bos(p->rdev, p->ib.vm, in radeon_cs_parser_relocs() 241 r = radeon_sync_resv(p->rdev, &p->ib.sync, resv, in radeon_cs_sync_rings() 266 p->ib.sa_bo = NULL; in radeon_cs_parser_init() 414 &parser->ib.fence->base); in radeon_cs_parser_fini() 436 radeon_ib_free(parser->rdev, &parser->ib); in radeon_cs_parser_fini() 470 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); in radeon_cs_ib_chunk() 516 radeon_sync_fence(&p->ib.sync, bo_va->last_pt_update); in radeon_bo_vm_update_pte() 541 r = radeon_ring_ib_parse(rdev, parser->ring, &parser->ib); in radeon_cs_ib_vm_chunk() 564 r = radeon_ib_schedule(rdev, &parser->ib, &parser->const_ib, true); in radeon_cs_ib_vm_chunk() 566 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true); in radeon_cs_ib_vm_chunk() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vce.c | 411 struct amdgpu_ib *ib; in amdgpu_vce_get_create_msg() local 420 ib = &job->ibs[0]; in amdgpu_vce_get_create_msg() 422 dummy = ib->gpu_addr + 1024; in amdgpu_vce_get_create_msg() 425 ib->length_dw = 0; in amdgpu_vce_get_create_msg() 426 ib->ptr[ib->length_dw++] = 0x0000000c; /* len */ in amdgpu_vce_get_create_msg() 427 ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */ in amdgpu_vce_get_create_msg() 428 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg() 431 ib->ptr[ib->length_dw++] = 0x00000040; /* len */ in amdgpu_vce_get_create_msg() 433 ib->ptr[ib->length_dw++] = 0x00000030; /* len */ in amdgpu_vce_get_create_msg() 434 ib->ptr[ib->length_dw++] = 0x01000001; /* create cmd */ in amdgpu_vce_get_create_msg() [all …]
|
D | si_dma.c | 62 struct amdgpu_ib *ib, in si_dma_ring_emit_ib() argument 71 amdgpu_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in si_dma_ring_emit_ib() 72 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib() 276 struct amdgpu_ib ib; in si_dma_ring_test_ib() local 292 memset(&ib, 0, sizeof(ib)); in si_dma_ring_test_ib() 293 r = amdgpu_ib_get(adev, NULL, 256, &ib); in si_dma_ring_test_ib() 299 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, 1); in si_dma_ring_test_ib() 300 ib.ptr[1] = lower_32_bits(gpu_addr); in si_dma_ring_test_ib() 301 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in si_dma_ring_test_ib() 302 ib.ptr[3] = 0xDEADBEEF; in si_dma_ring_test_ib() [all …]
|
D | cik_sdma.c | 224 struct amdgpu_ib *ib, in cik_sdma_ring_emit_ib() argument 233 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_emit_ib() 234 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff); in cik_sdma_ring_emit_ib() 235 amdgpu_ring_write(ring, ib->length_dw); in cik_sdma_ring_emit_ib() 624 struct amdgpu_ib ib; in cik_sdma_ring_test_ib() local 640 memset(&ib, 0, sizeof(ib)); in cik_sdma_ring_test_ib() 641 r = amdgpu_ib_get(adev, NULL, 256, &ib); in cik_sdma_ring_test_ib() 647 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_ring_test_ib() 649 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ring_test_ib() 650 ib.ptr[2] = upper_32_bits(gpu_addr); in cik_sdma_ring_test_ib() [all …]
|
D | sdma_v2_4.c | 250 struct amdgpu_ib *ib, in sdma_v2_4_ring_emit_ib() argument 261 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib() 262 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib() 263 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib() 670 struct amdgpu_ib ib; in sdma_v2_4_ring_test_ib() local 686 memset(&ib, 0, sizeof(ib)); in sdma_v2_4_ring_test_ib() 687 r = amdgpu_ib_get(adev, NULL, 256, &ib); in sdma_v2_4_ring_test_ib() 693 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_ring_test_ib() 695 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib() 696 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib() [all …]
|
D | sdma_v3_0.c | 410 struct amdgpu_ib *ib, in sdma_v3_0_ring_emit_ib() argument 421 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib() 422 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib() 423 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib() 873 struct amdgpu_ib ib; in sdma_v3_0_ring_test_ib() local 889 memset(&ib, 0, sizeof(ib)); in sdma_v3_0_ring_test_ib() 890 r = amdgpu_ib_get(adev, NULL, 256, &ib); in sdma_v3_0_ring_test_ib() 896 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_ring_test_ib() 898 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib() 899 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib() [all …]
|
D | amdgpu_ib.c | 61 unsigned size, struct amdgpu_ib *ib) in amdgpu_ib_get() argument 67 &ib->sa_bo, size, 256); in amdgpu_ib_get() 73 ib->ptr = amdgpu_sa_bo_cpu_addr(ib->sa_bo); in amdgpu_ib_get() 76 ib->gpu_addr = amdgpu_sa_bo_gpu_addr(ib->sa_bo); in amdgpu_ib_get() 91 void amdgpu_ib_free(struct amdgpu_device *adev, struct amdgpu_ib *ib, in amdgpu_ib_free() argument 94 amdgpu_sa_bo_free(adev, &ib->sa_bo, f); in amdgpu_ib_free() 123 struct amdgpu_ib *ib = &ibs[0]; in amdgpu_ib_schedule() local 191 ib = &ibs[i]; in amdgpu_ib_schedule() 194 if ((ib->flags & AMDGPU_IB_FLAG_PREAMBLE) && in amdgpu_ib_schedule() 199 amdgpu_ring_emit_ib(ring, ib, job ? job->vm_id : 0, in amdgpu_ib_schedule()
|
/drivers/net/ethernet/amd/ |
D | 7990.c | 99 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \ 100 ib->brx_ring[t].length, \ 101 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \ 105 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \ 106 ib->btx_ring[t].length, \ 107 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \ 139 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local 149 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring() 162 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring() 163 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring() [all …]
|
D | sunlance.c | 323 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local 336 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma() 337 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma() 338 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma() 339 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma() 340 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma() 341 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma() 346 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma() 347 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma() 348 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma() [all …]
|
D | a2065.c | 152 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local 163 ib->mode = 0; in lance_init_ring() 168 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring() 169 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring() 170 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring() 171 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring() 172 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring() 173 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring() 179 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring() 180 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring() [all …]
|
D | declance.c | 234 #define lib_ptr(ib, rt, type) \ argument 235 ((volatile u16 *)((u8 *)(ib) + lib_off(rt, type))) 451 volatile u16 *ib = (volatile u16 *)dev->mem_start; in lance_init_ring() local 463 *lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) | in lance_init_ring() 465 *lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) | in lance_init_ring() 467 *lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) | in lance_init_ring() 473 *lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) | in lance_init_ring() 475 *lib_ptr(ib, rx_ptr, lp->type) = leptr; in lance_init_ring() 482 *lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) | in lance_init_ring() 484 *lib_ptr(ib, tx_ptr, lp->type) = leptr; in lance_init_ring() [all …]
|
/drivers/infiniband/hw/mlx4/ |
D | ah.c | 48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(pd)->pdn | (ah_attr->port_num << 24)); in create_ib_ah() 49 ah->av.ib.g_slid = ah_attr->src_path_bits; in create_ib_ah() 50 ah->av.ib.sl_tclass_flowlabel = cpu_to_be32(ah_attr->sl << 28); in create_ib_ah() 52 ah->av.ib.g_slid |= 0x80; in create_ib_ah() 53 ah->av.ib.gid_index = ah_attr->grh.sgid_index; in create_ib_ah() 54 ah->av.ib.hop_limit = ah_attr->grh.hop_limit; in create_ib_ah() 55 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah() 58 memcpy(ah->av.ib.dgid, ah_attr->grh.dgid.raw, 16); in create_ib_ah() 61 ah->av.ib.dlid = cpu_to_be16(ah_attr->dlid); in create_ib_ah() 63 ah->av.ib.stat_rate = ah_attr->static_rate + MLX4_STAT_RATE_OFFSET; in create_ib_ah() [all …]
|
/drivers/s390/char/ |
D | fs3270.c | 246 struct idal_buffer *ib; in fs3270_read() local 254 ib = idal_buffer_alloc(count, 0); in fs3270_read() 255 if (IS_ERR(ib)) in fs3270_read() 262 raw3270_request_set_idal(rq, ib); in fs3270_read() 269 if (idal_buffer_to_user(ib, data, count) != 0) in fs3270_read() 279 idal_buffer_free(ib); in fs3270_read() 291 struct idal_buffer *ib; in fs3270_write() local 298 ib = idal_buffer_alloc(count, 0); in fs3270_write() 299 if (IS_ERR(ib)) in fs3270_write() 303 if (idal_buffer_from_user(ib, data, count) == 0) { in fs3270_write() [all …]
|
/drivers/isdn/hysdn/ |
D | hysdn_proclog.c | 103 struct log_data *ib; in put_log_buffer() local 117 if (!(ib = kmalloc(sizeof(struct log_data) + strlen(cp), GFP_ATOMIC))) in put_log_buffer() 119 strcpy(ib->log_start, cp); /* set output string */ in put_log_buffer() 120 ib->next = NULL; in put_log_buffer() 121 ib->proc_ctrl = pd; /* point to own control structure */ in put_log_buffer() 123 ib->usage_cnt = pd->if_used; in put_log_buffer() 125 pd->log_head = ib; /* new head */ in put_log_buffer() 127 pd->log_tail->next = ib; /* follows existing messages */ in put_log_buffer() 128 pd->log_tail = ib; /* new tail */ in put_log_buffer() 137 ib = pd->log_head; in put_log_buffer() [all …]
|
/drivers/isdn/divert/ |
D | divert_procfs.c | 43 struct divert_info *ib; in put_info_buffer() local 52 if (!(ib = kmalloc(sizeof(struct divert_info) + strlen(cp), GFP_ATOMIC))) in put_info_buffer() 54 strcpy(ib->info_start, cp); /* set output string */ in put_info_buffer() 55 ib->next = NULL; in put_info_buffer() 57 ib->usage_cnt = if_used; in put_info_buffer() 59 divert_info_head = ib; /* new head */ in put_info_buffer() 61 divert_info_tail->next = ib; /* follows existing messages */ in put_info_buffer() 62 divert_info_tail = ib; /* new tail */ in put_info_buffer() 68 ib = divert_info_head; in put_info_buffer() 70 kfree(ib); in put_info_buffer()
|
/drivers/net/ethernet/brocade/bna/ |
D | bna_hw_defs.h | 244 struct bna_ib *ib = _ib; \ 245 if ((ib->intr_type == BNA_INTR_T_INTX)) { \ 247 intx_mask &= ~(ib->intr_vector); \ 250 bna_ib_coalescing_timer_set(&ib->door_bell, \ 251 ib->coalescing_timeo); \ 253 bna_ib_ack(&ib->door_bell, 0); \ 259 struct bna_ib *ib = _ib; \ 261 ib->door_bell.doorbell_addr); \ 262 if (ib->intr_type == BNA_INTR_T_INTX) { \ 264 intx_mask |= ib->intr_vector; \
|