Lines Matching refs:dev
70 struct device *dev; member
87 static ssize_t dmabounce_show(struct device *dev, struct device_attribute *attr, in dmabounce_show() argument
90 struct dmabounce_device_info *device_info = dev->archdata.dmabounce; in dmabounce_show()
112 struct device *dev = device_info->dev; in alloc_safe_buffer() local
115 dev_dbg(dev, "%s(ptr=%p, size=%d, dir=%d)\n", in alloc_safe_buffer()
128 dev_warn(dev, "%s: kmalloc failed\n", __func__); in alloc_safe_buffer()
141 buf->safe = dma_alloc_coherent(dev, size, &buf->safe_dma_addr, in alloc_safe_buffer()
146 dev_warn(dev, in alloc_safe_buffer()
191 dev_dbg(device_info->dev, "%s(buf=%p)\n", __func__, buf); in free_safe_buffer()
202 dma_free_coherent(device_info->dev, buf->size, buf->safe, in free_safe_buffer()
210 static struct safe_buffer *find_safe_buffer_dev(struct device *dev, in find_safe_buffer_dev() argument
213 if (!dev || !dev->archdata.dmabounce) in find_safe_buffer_dev()
215 if (dma_mapping_error(dev, dma_addr)) { in find_safe_buffer_dev()
216 dev_err(dev, "Trying to %s invalid mapping\n", where); in find_safe_buffer_dev()
219 return find_safe_buffer(dev->archdata.dmabounce, dma_addr); in find_safe_buffer_dev()
222 static int needs_bounce(struct device *dev, dma_addr_t dma_addr, size_t size) in needs_bounce() argument
224 if (!dev || !dev->archdata.dmabounce) in needs_bounce()
227 if (dev->dma_mask) { in needs_bounce()
228 unsigned long limit, mask = *dev->dma_mask; in needs_bounce()
232 dev_err(dev, "DMA mapping too big (requested %#x " in needs_bounce()
233 "mask %#Lx)\n", size, *dev->dma_mask); in needs_bounce()
242 return !!dev->archdata.dmabounce->needs_bounce(dev, dma_addr, size); in needs_bounce()
245 static inline dma_addr_t map_single(struct device *dev, void *ptr, size_t size, in map_single() argument
248 struct dmabounce_device_info *device_info = dev->archdata.dmabounce; in map_single()
256 dev_err(dev, "%s: unable to map unsafe buffer %p!\n", in map_single()
261 dev_dbg(dev, "%s: unsafe buffer %p (dma=%#x) mapped to %p (dma=%#x)\n", in map_single()
262 __func__, buf->ptr, virt_to_dma(dev, buf->ptr), in map_single()
266 dev_dbg(dev, "%s: copy unsafe %p to safe %p, size %d\n", in map_single()
274 static inline void unmap_single(struct device *dev, struct safe_buffer *buf, in unmap_single() argument
280 dev_dbg(dev, "%s: unsafe buffer %p (dma=%#x) mapped to %p (dma=%#x)\n", in unmap_single()
281 __func__, buf->ptr, virt_to_dma(dev, buf->ptr), in unmap_single()
284 DO_STATS(dev->archdata.dmabounce->bounce_count++); in unmap_single()
289 dev_dbg(dev, "%s: copy back safe %p to unsafe %p size %d\n", in unmap_single()
300 free_safe_buffer(dev->archdata.dmabounce, buf); in unmap_single()
311 static dma_addr_t dmabounce_map_page(struct device *dev, struct page *page, in dmabounce_map_page() argument
318 dev_dbg(dev, "%s(page=%p,off=%#lx,size=%zx,dir=%x)\n", in dmabounce_map_page()
321 dma_addr = pfn_to_dma(dev, page_to_pfn(page)) + offset; in dmabounce_map_page()
323 ret = needs_bounce(dev, dma_addr, size); in dmabounce_map_page()
328 arm_dma_ops.sync_single_for_device(dev, dma_addr, size, dir); in dmabounce_map_page()
333 dev_err(dev, "DMA buffer bouncing of HIGHMEM pages is not supported\n"); in dmabounce_map_page()
337 return map_single(dev, page_address(page) + offset, size, dir); in dmabounce_map_page()
346 static void dmabounce_unmap_page(struct device *dev, dma_addr_t dma_addr, size_t size, in dmabounce_unmap_page() argument
351 dev_dbg(dev, "%s(dma=%#x,size=%d,dir=%x)\n", in dmabounce_unmap_page()
354 buf = find_safe_buffer_dev(dev, dma_addr, __func__); in dmabounce_unmap_page()
356 arm_dma_ops.sync_single_for_cpu(dev, dma_addr, size, dir); in dmabounce_unmap_page()
360 unmap_single(dev, buf, size, dir); in dmabounce_unmap_page()
363 static int __dmabounce_sync_for_cpu(struct device *dev, dma_addr_t addr, in __dmabounce_sync_for_cpu() argument
369 dev_dbg(dev, "%s(dma=%#x,sz=%zx,dir=%x)\n", in __dmabounce_sync_for_cpu()
372 buf = find_safe_buffer_dev(dev, addr, __func__); in __dmabounce_sync_for_cpu()
380 dev_dbg(dev, "%s: unsafe buffer %p (dma=%#x off=%#lx) mapped to %p (dma=%#x)\n", in __dmabounce_sync_for_cpu()
381 __func__, buf->ptr, virt_to_dma(dev, buf->ptr), off, in __dmabounce_sync_for_cpu()
384 DO_STATS(dev->archdata.dmabounce->bounce_count++); in __dmabounce_sync_for_cpu()
387 dev_dbg(dev, "%s: copy back safe %p to unsafe %p size %d\n", in __dmabounce_sync_for_cpu()
394 static void dmabounce_sync_for_cpu(struct device *dev, in dmabounce_sync_for_cpu() argument
397 if (!__dmabounce_sync_for_cpu(dev, handle, size, dir)) in dmabounce_sync_for_cpu()
400 arm_dma_ops.sync_single_for_cpu(dev, handle, size, dir); in dmabounce_sync_for_cpu()
403 static int __dmabounce_sync_for_device(struct device *dev, dma_addr_t addr, in __dmabounce_sync_for_device() argument
409 dev_dbg(dev, "%s(dma=%#x,sz=%zx,dir=%x)\n", in __dmabounce_sync_for_device()
412 buf = find_safe_buffer_dev(dev, addr, __func__); in __dmabounce_sync_for_device()
420 dev_dbg(dev, "%s: unsafe buffer %p (dma=%#x off=%#lx) mapped to %p (dma=%#x)\n", in __dmabounce_sync_for_device()
421 __func__, buf->ptr, virt_to_dma(dev, buf->ptr), off, in __dmabounce_sync_for_device()
424 DO_STATS(dev->archdata.dmabounce->bounce_count++); in __dmabounce_sync_for_device()
427 dev_dbg(dev, "%s: copy out unsafe %p to safe %p, size %d\n", in __dmabounce_sync_for_device()
434 static void dmabounce_sync_for_device(struct device *dev, in dmabounce_sync_for_device() argument
437 if (!__dmabounce_sync_for_device(dev, handle, size, dir)) in dmabounce_sync_for_device()
440 arm_dma_ops.sync_single_for_device(dev, handle, size, dir); in dmabounce_sync_for_device()
443 static int dmabounce_set_mask(struct device *dev, u64 dma_mask) in dmabounce_set_mask() argument
445 if (dev->archdata.dmabounce) in dmabounce_set_mask()
448 return arm_dma_ops.set_dma_mask(dev, dma_mask); in dmabounce_set_mask()
467 static int dmabounce_init_pool(struct dmabounce_pool *pool, struct device *dev, in dmabounce_init_pool() argument
472 pool->pool = dma_pool_create(name, dev, size, in dmabounce_init_pool()
479 int dmabounce_register_dev(struct device *dev, unsigned long small_buffer_size, in dmabounce_register_dev() argument
488 dev_err(dev, in dmabounce_register_dev()
493 ret = dmabounce_init_pool(&device_info->small, dev, in dmabounce_register_dev()
496 dev_err(dev, in dmabounce_register_dev()
503 ret = dmabounce_init_pool(&device_info->large, dev, in dmabounce_register_dev()
507 dev_err(dev, in dmabounce_register_dev()
514 device_info->dev = dev; in dmabounce_register_dev()
523 device_info->attr_res = device_create_file(dev, &dev_attr_dmabounce_stats); in dmabounce_register_dev()
526 dev->archdata.dmabounce = device_info; in dmabounce_register_dev()
527 set_dma_ops(dev, &dmabounce_ops); in dmabounce_register_dev()
529 dev_info(dev, "dmabounce: registered device\n"); in dmabounce_register_dev()
541 void dmabounce_unregister_dev(struct device *dev) in dmabounce_unregister_dev() argument
543 struct dmabounce_device_info *device_info = dev->archdata.dmabounce; in dmabounce_unregister_dev()
545 dev->archdata.dmabounce = NULL; in dmabounce_unregister_dev()
546 set_dma_ops(dev, NULL); in dmabounce_unregister_dev()
549 dev_warn(dev, in dmabounce_unregister_dev()
556 dev_err(dev, in dmabounce_unregister_dev()
568 device_remove_file(dev, &dev_attr_dmabounce_stats); in dmabounce_unregister_dev()
573 dev_info(dev, "dmabounce: device unregistered\n"); in dmabounce_unregister_dev()