/drivers/gpu/drm/radeon/ |
D | radeon_vce.c | 345 ib.length_dw = 0; in radeon_vce_get_create_msg() 346 ib.ptr[ib.length_dw++] = 0x0000000c; /* len */ in radeon_vce_get_create_msg() 347 ib.ptr[ib.length_dw++] = 0x00000001; /* session cmd */ in radeon_vce_get_create_msg() 348 ib.ptr[ib.length_dw++] = handle; in radeon_vce_get_create_msg() 350 ib.ptr[ib.length_dw++] = 0x00000030; /* len */ in radeon_vce_get_create_msg() 351 ib.ptr[ib.length_dw++] = 0x01000001; /* create cmd */ in radeon_vce_get_create_msg() 352 ib.ptr[ib.length_dw++] = 0x00000000; in radeon_vce_get_create_msg() 353 ib.ptr[ib.length_dw++] = 0x00000042; in radeon_vce_get_create_msg() 354 ib.ptr[ib.length_dw++] = 0x0000000a; in radeon_vce_get_create_msg() 355 ib.ptr[ib.length_dw++] = 0x00000001; in radeon_vce_get_create_msg() [all …]
|
D | si_dma.c | 79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages() 81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 84 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages() 120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages() 121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() 134 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages() 135 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages() [all …]
|
D | ni_dma.c | 145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages() 328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages() 330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() 331 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in cayman_dma_vm_copy_pages() 367 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, in cayman_dma_vm_write_pages() 369 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages() 370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages() 382 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages() [all …]
|
D | cik_sdma.c | 157 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute() 707 ib.length_dw = 5; in cik_sdma_ib_test() 783 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pages() 785 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages() 786 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pages() 787 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pages() 788 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pages() 789 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages() 790 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages() 826 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pages() [all …]
|
D | radeon_cs.c | 90 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs() 319 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init() 327 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 333 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 339 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 343 size = p->chunks[i].length_dw; in radeon_cs_parser_init() 364 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init() 366 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init() 557 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk() 624 if (ib_chunk->length_dw > RADEON_IB_VM_MAX_SIZE) { in radeon_cs_ib_fill() [all …]
|
D | radeon_vm.c | 410 ib.length_dw = 0; in radeon_vm_clear_bo() 414 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo() 657 ib.length_dw = 0; in radeon_vm_update_page_directory() 694 if (ib.length_dw != 0) { in radeon_vm_update_page_directory() 699 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory() 985 ib.length_dw = 0; in radeon_vm_bo_update() 992 WARN_ON(ib.length_dw > ndw); in radeon_vm_bo_update()
|
D | radeon_uvd.c | 542 if (idx >= relocs_chunk->length_dw) { in radeon_uvd_cs_reloc() 544 idx, relocs_chunk->length_dw); in radeon_uvd_cs_reloc() 656 if (p->chunks[p->chunk_ib_idx].length_dw % 16) { in radeon_uvd_cs_parse() 658 p->chunks[p->chunk_ib_idx].length_dw); in radeon_uvd_cs_parse() 686 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in radeon_uvd_cs_parse() 715 ib.length_dw = 16; in radeon_uvd_send_msg()
|
D | r600_cs.c | 2319 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in r600_cs_parse() 2321 for (r = 0; r < p->ib.length_dw; r++) { in r600_cs_parse() 2402 parser.ib.length_dw = ib_chunk->length_dw; in r600_cs_legacy() 2403 *l = parser.ib.length_dw; in r600_cs_legacy() 2404 if (copy_from_user(ib, ib_chunk->user_ptr, ib_chunk->length_dw * 4)) { in r600_cs_legacy() 2484 if (p->idx >= ib_chunk->length_dw) { in r600_dma_cs_parse() 2486 p->idx, ib_chunk->length_dw); in r600_dma_cs_parse() 2622 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in r600_dma_cs_parse() 2624 for (r = 0; r < p->ib->length_dw; r++) { in r600_dma_cs_parse()
|
D | r600_dma.c | 363 ib.length_dw = 4; in r600_dma_ib_test() 422 radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in r600_dma_ring_ib_execute()
|
D | evergreen_dma.c | 90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
|
D | radeon_ib.c | 131 if (!ib->length_dw || !ring->ready) { in radeon_ib_schedule()
|
D | radeon_trace.h | 41 __entry->dw = p->chunks[p->chunk_ib_idx].length_dw;
|
D | evergreen_cs.c | 2664 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in evergreen_cs_parse() 2666 for (r = 0; r < p->ib.length_dw; r++) { in evergreen_cs_parse() 2696 if (p->idx >= ib_chunk->length_dw) { in evergreen_dma_cs_parse() 2698 p->idx, ib_chunk->length_dw); in evergreen_dma_cs_parse() 3103 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in evergreen_dma_cs_parse() 3105 for (r = 0; r < p->ib->length_dw; r++) { in evergreen_dma_cs_parse() 3412 } while (idx < ib->length_dw); in evergreen_ib_parse() 3514 } while (idx < ib->length_dw); in evergreen_dma_ib_parse()
|
D | uvd_v1_0.c | 488 radeon_ring_write(ring, ib->length_dw); in uvd_v1_0_ib_execute()
|
D | r100.c | 2074 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in r100_cs_parse() 3688 radeon_ring_write(ring, ib->length_dw); in r100_ring_ib_execute() 3718 ib.length_dw = 8; in r100_ib_test()
|
D | r300.c | 1292 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in r300_cs_parse()
|
D | radeon.h | 812 uint32_t length_dw; member 1043 uint32_t length_dw; member
|
D | ni.c | 1397 radeon_ring_write(ring, ib->length_dw | in cayman_ring_ib_execute()
|
D | si.c | 3403 radeon_ring_write(ring, ib->length_dw | in si_ring_ib_execute() 4701 for (i = 0; i < ib->length_dw; i++) { in si_ib_parse() 4740 } while (idx < ib->length_dw); in si_ib_parse()
|
D | r600.c | 3283 radeon_ring_write(ring, ib->length_dw); in r600_ring_ib_execute() 3308 ib.length_dw = 3; in r600_ib_test()
|
D | cik.c | 4091 control |= ib->length_dw | in cik_ring_ib_execute() 4137 ib.length_dw = 3; in cik_ib_test()
|
D | evergreen.c | 2821 radeon_ring_write(ring, ib->length_dw); in evergreen_ring_ib_execute()
|