• Home
  • Raw
  • Download

Lines Matching refs:ops

185 	struct dma_map_ops *ops = get_dma_ops(dev);  in dma_map_single_attrs()  local
190 addr = ops->map_page(dev, virt_to_page(ptr), in dma_map_single_attrs()
204 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_single_attrs() local
207 if (ops->unmap_page) in dma_unmap_single_attrs()
208 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_single_attrs()
220 struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_sg_attrs() local
227 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
238 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_sg_attrs() local
242 if (ops->unmap_sg) in dma_unmap_sg_attrs()
243 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
250 struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_page() local
255 addr = ops->map_page(dev, page, offset, size, dir, 0); in dma_map_page()
264 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_page() local
267 if (ops->unmap_page) in dma_unmap_page()
268 ops->unmap_page(dev, addr, size, dir, 0); in dma_unmap_page()
278 struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_resource() local
287 if (ops->map_resource) in dma_map_resource()
288 addr = ops->map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
299 struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_resource() local
302 if (ops->unmap_resource) in dma_unmap_resource()
303 ops->unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
311 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_cpu() local
314 if (ops->sync_single_for_cpu) in dma_sync_single_for_cpu()
315 ops->sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
323 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_device() local
326 if (ops->sync_single_for_device) in dma_sync_single_for_device()
327 ops->sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
337 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_range_for_cpu() local
340 if (ops->sync_single_for_cpu) in dma_sync_single_range_for_cpu()
341 ops->sync_single_for_cpu(dev, addr + offset, size, dir); in dma_sync_single_range_for_cpu()
351 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_range_for_device() local
354 if (ops->sync_single_for_device) in dma_sync_single_range_for_device()
355 ops->sync_single_for_device(dev, addr + offset, size, dir); in dma_sync_single_range_for_device()
363 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_cpu() local
366 if (ops->sync_sg_for_cpu) in dma_sync_sg_for_cpu()
367 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
375 struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_device() local
378 if (ops->sync_sg_for_device) in dma_sync_sg_for_device()
379 ops->sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
418 struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_attrs() local
419 BUG_ON(!ops); in dma_mmap_attrs()
420 if (ops->mmap) in dma_mmap_attrs()
421 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
436 struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_sgtable_attrs() local
437 BUG_ON(!ops); in dma_get_sgtable_attrs()
438 if (ops->get_sgtable) in dma_get_sgtable_attrs()
439 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, in dma_get_sgtable_attrs()
454 struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_attrs() local
457 BUG_ON(!ops); in dma_alloc_attrs()
464 if (!ops->alloc) in dma_alloc_attrs()
467 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
476 struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_attrs() local
478 BUG_ON(!ops); in dma_free_attrs()
484 if (!ops->free || !cpu_addr) in dma_free_attrs()
488 ops->free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
534 struct dma_map_ops *ops = get_dma_ops(dev); in dma_supported() local
536 if (!ops) in dma_supported()
538 if (!ops->dma_supported) in dma_supported()
540 return ops->dma_supported(dev, mask); in dma_supported()
547 struct dma_map_ops *ops = get_dma_ops(dev); in dma_set_mask() local
549 if (ops->set_dma_mask) in dma_set_mask()
550 return ops->set_dma_mask(dev, mask); in dma_set_mask()