Lines Matching refs:hwdev
135 static dma_addr_t swiotlb_virt_to_bus(struct device *hwdev, in swiotlb_virt_to_bus() argument
138 return phys_to_dma(hwdev, virt_to_phys(address)); in swiotlb_virt_to_bus()
425 phys_addr_t swiotlb_tbl_map_single(struct device *hwdev, in swiotlb_tbl_map_single() argument
441 mask = dma_get_seg_boundary(hwdev); in swiotlb_tbl_map_single()
517 dev_warn(hwdev, "swiotlb buffer is full (sz: %zd bytes)\n", size); in swiotlb_tbl_map_single()
540 phys_addr_t map_single(struct device *hwdev, phys_addr_t phys, size_t size, in map_single() argument
543 dma_addr_t start_dma_addr = phys_to_dma(hwdev, io_tlb_start); in map_single()
545 return swiotlb_tbl_map_single(hwdev, start_dma_addr, phys, size, dir); in map_single()
551 void swiotlb_tbl_unmap_single(struct device *hwdev, phys_addr_t tlb_addr, in swiotlb_tbl_unmap_single() argument
595 void swiotlb_tbl_sync_single(struct device *hwdev, phys_addr_t tlb_addr, in swiotlb_tbl_sync_single() argument
628 swiotlb_alloc_coherent(struct device *hwdev, size_t size, in swiotlb_alloc_coherent() argument
636 if (hwdev && hwdev->coherent_dma_mask) in swiotlb_alloc_coherent()
637 dma_mask = hwdev->coherent_dma_mask; in swiotlb_alloc_coherent()
641 dev_addr = swiotlb_virt_to_bus(hwdev, ret); in swiotlb_alloc_coherent()
656 phys_addr_t paddr = map_single(hwdev, 0, size, DMA_FROM_DEVICE); in swiotlb_alloc_coherent()
661 dev_addr = phys_to_dma(hwdev, paddr); in swiotlb_alloc_coherent()
670 swiotlb_tbl_unmap_single(hwdev, paddr, in swiotlb_alloc_coherent()
684 swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, in swiotlb_free_coherent() argument
687 phys_addr_t paddr = dma_to_phys(hwdev, dev_addr); in swiotlb_free_coherent()
694 swiotlb_tbl_unmap_single(hwdev, paddr, size, DMA_TO_DEVICE); in swiotlb_free_coherent()
776 static void unmap_single(struct device *hwdev, dma_addr_t dev_addr, in unmap_single() argument
779 phys_addr_t paddr = dma_to_phys(hwdev, dev_addr); in unmap_single()
784 swiotlb_tbl_unmap_single(hwdev, paddr, size, dir); in unmap_single()
800 void swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, in swiotlb_unmap_page() argument
804 unmap_single(hwdev, dev_addr, size, dir); in swiotlb_unmap_page()
819 swiotlb_sync_single(struct device *hwdev, dma_addr_t dev_addr, in swiotlb_sync_single() argument
823 phys_addr_t paddr = dma_to_phys(hwdev, dev_addr); in swiotlb_sync_single()
828 swiotlb_tbl_sync_single(hwdev, paddr, size, dir, target); in swiotlb_sync_single()
839 swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr, in swiotlb_sync_single_for_cpu() argument
842 swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_CPU); in swiotlb_sync_single_for_cpu()
847 swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, in swiotlb_sync_single_for_device() argument
850 swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_DEVICE); in swiotlb_sync_single_for_device()
871 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_map_sg_attrs() argument
881 dma_addr_t dev_addr = phys_to_dma(hwdev, paddr); in swiotlb_map_sg_attrs()
884 !dma_capable(hwdev, dev_addr, sg->length)) { in swiotlb_map_sg_attrs()
885 phys_addr_t map = map_single(hwdev, sg_phys(sg), in swiotlb_map_sg_attrs()
890 swiotlb_full(hwdev, sg->length, dir, 0); in swiotlb_map_sg_attrs()
891 swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, in swiotlb_map_sg_attrs()
896 sg->dma_address = phys_to_dma(hwdev, map); in swiotlb_map_sg_attrs()
906 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_map_sg() argument
909 return swiotlb_map_sg_attrs(hwdev, sgl, nelems, dir, NULL); in swiotlb_map_sg()
918 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in swiotlb_unmap_sg_attrs() argument
927 unmap_single(hwdev, sg->dma_address, sg_dma_len(sg), dir); in swiotlb_unmap_sg_attrs()
933 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_unmap_sg() argument
936 return swiotlb_unmap_sg_attrs(hwdev, sgl, nelems, dir, NULL); in swiotlb_unmap_sg()
948 swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, in swiotlb_sync_sg() argument
956 swiotlb_sync_single(hwdev, sg->dma_address, in swiotlb_sync_sg()
961 swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg, in swiotlb_sync_sg_for_cpu() argument
964 swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); in swiotlb_sync_sg_for_cpu()
969 swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, in swiotlb_sync_sg_for_device() argument
972 swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_DEVICE); in swiotlb_sync_sg_for_device()
977 swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr) in swiotlb_dma_mapping_error() argument
979 return (dma_addr == phys_to_dma(hwdev, io_tlb_overflow_buffer)); in swiotlb_dma_mapping_error()
990 swiotlb_dma_supported(struct device *hwdev, u64 mask) in swiotlb_dma_supported() argument
992 return phys_to_dma(hwdev, io_tlb_end - 1) <= mask; in swiotlb_dma_supported()