Home
last modified time | relevance | path

Searched refs:length_dw (Results 1 – 22 of 22) sorted by relevance

/drivers/gpu/drm/radeon/
Dradeon_vce.c345 ib.length_dw = 0; in radeon_vce_get_create_msg()
346 ib.ptr[ib.length_dw++] = 0x0000000c; /* len */ in radeon_vce_get_create_msg()
347 ib.ptr[ib.length_dw++] = 0x00000001; /* session cmd */ in radeon_vce_get_create_msg()
348 ib.ptr[ib.length_dw++] = handle; in radeon_vce_get_create_msg()
350 ib.ptr[ib.length_dw++] = 0x00000030; /* len */ in radeon_vce_get_create_msg()
351 ib.ptr[ib.length_dw++] = 0x01000001; /* create cmd */ in radeon_vce_get_create_msg()
352 ib.ptr[ib.length_dw++] = 0x00000000; in radeon_vce_get_create_msg()
353 ib.ptr[ib.length_dw++] = 0x00000042; in radeon_vce_get_create_msg()
354 ib.ptr[ib.length_dw++] = 0x0000000a; in radeon_vce_get_create_msg()
355 ib.ptr[ib.length_dw++] = 0x00000001; in radeon_vce_get_create_msg()
[all …]
Dsi_dma.c79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
84 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
120 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
122 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
134 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages()
135 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages()
[all …]
Dni_dma.c145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
331 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in cayman_dma_vm_copy_pages()
367 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, in cayman_dma_vm_write_pages()
369 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages()
370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages()
382 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages()
[all …]
Dcik_sdma.c157 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
707 ib.length_dw = 5; in cik_sdma_ib_test()
783 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pages()
785 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages()
786 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pages()
787 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pages()
788 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pages()
789 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages()
790 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages()
826 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pages()
[all …]
Dradeon_cs.c90 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs()
319 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init()
327 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
333 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
339 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
343 size = p->chunks[i].length_dw; in radeon_cs_parser_init()
364 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init()
366 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init()
557 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk()
624 if (ib_chunk->length_dw > RADEON_IB_VM_MAX_SIZE) { in radeon_cs_ib_fill()
[all …]
Dradeon_vm.c410 ib.length_dw = 0; in radeon_vm_clear_bo()
414 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
657 ib.length_dw = 0; in radeon_vm_update_page_directory()
694 if (ib.length_dw != 0) { in radeon_vm_update_page_directory()
699 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
985 ib.length_dw = 0; in radeon_vm_bo_update()
992 WARN_ON(ib.length_dw > ndw); in radeon_vm_bo_update()
Dradeon_uvd.c542 if (idx >= relocs_chunk->length_dw) { in radeon_uvd_cs_reloc()
544 idx, relocs_chunk->length_dw); in radeon_uvd_cs_reloc()
656 if (p->chunks[p->chunk_ib_idx].length_dw % 16) { in radeon_uvd_cs_parse()
658 p->chunks[p->chunk_ib_idx].length_dw); in radeon_uvd_cs_parse()
686 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in radeon_uvd_cs_parse()
715 ib.length_dw = 16; in radeon_uvd_send_msg()
Dr600_cs.c2319 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in r600_cs_parse()
2321 for (r = 0; r < p->ib.length_dw; r++) { in r600_cs_parse()
2402 parser.ib.length_dw = ib_chunk->length_dw; in r600_cs_legacy()
2403 *l = parser.ib.length_dw; in r600_cs_legacy()
2404 if (copy_from_user(ib, ib_chunk->user_ptr, ib_chunk->length_dw * 4)) { in r600_cs_legacy()
2484 if (p->idx >= ib_chunk->length_dw) { in r600_dma_cs_parse()
2486 p->idx, ib_chunk->length_dw); in r600_dma_cs_parse()
2622 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in r600_dma_cs_parse()
2624 for (r = 0; r < p->ib->length_dw; r++) { in r600_dma_cs_parse()
Dr600_dma.c363 ib.length_dw = 4; in r600_dma_ib_test()
422 radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in r600_dma_ring_ib_execute()
Devergreen_dma.c90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
Dradeon_ib.c131 if (!ib->length_dw || !ring->ready) { in radeon_ib_schedule()
Dradeon_trace.h41 __entry->dw = p->chunks[p->chunk_ib_idx].length_dw;
Devergreen_cs.c2664 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in evergreen_cs_parse()
2666 for (r = 0; r < p->ib.length_dw; r++) { in evergreen_cs_parse()
2696 if (p->idx >= ib_chunk->length_dw) { in evergreen_dma_cs_parse()
2698 p->idx, ib_chunk->length_dw); in evergreen_dma_cs_parse()
3103 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in evergreen_dma_cs_parse()
3105 for (r = 0; r < p->ib->length_dw; r++) { in evergreen_dma_cs_parse()
3412 } while (idx < ib->length_dw); in evergreen_ib_parse()
3514 } while (idx < ib->length_dw); in evergreen_dma_ib_parse()
Duvd_v1_0.c488 radeon_ring_write(ring, ib->length_dw); in uvd_v1_0_ib_execute()
Dr100.c2074 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in r100_cs_parse()
3688 radeon_ring_write(ring, ib->length_dw); in r100_ring_ib_execute()
3718 ib.length_dw = 8; in r100_ib_test()
Dr300.c1292 } while (p->idx < p->chunks[p->chunk_ib_idx].length_dw); in r300_cs_parse()
Dradeon.h812 uint32_t length_dw; member
1043 uint32_t length_dw; member
Dni.c1397 radeon_ring_write(ring, ib->length_dw | in cayman_ring_ib_execute()
Dsi.c3403 radeon_ring_write(ring, ib->length_dw | in si_ring_ib_execute()
4701 for (i = 0; i < ib->length_dw; i++) { in si_ib_parse()
4740 } while (idx < ib->length_dw); in si_ib_parse()
Dr600.c3283 radeon_ring_write(ring, ib->length_dw); in r600_ring_ib_execute()
3308 ib.length_dw = 3; in r600_ib_test()
Dcik.c4091 control |= ib->length_dw | in cik_ring_ib_execute()
4137 ib.length_dw = 3; in cik_ib_test()
Devergreen.c2821 radeon_ring_write(ring, ib->length_dw); in evergreen_ring_ib_execute()