Lines Matching refs:dev
70 struct device *dev; member
178 static bool driver_filter(struct device *dev) in driver_filter() argument
189 if (current_driver && dev && dev->driver == current_driver) in driver_filter()
193 if (!dev) in driver_filter()
200 drv = dev->driver; in driver_filter()
219 #define err_printk(dev, entry, format, arg...) do { \ argument
221 if (driver_filter(dev) && \
224 dev ? dev_driver_string(dev) : "NULL", \
225 dev ? dev_name(dev) : "NULL", ## arg); \
275 (a->dev == b->dev)) ? true : false; in exact_match()
281 if (a->dev != b->dev) in containing_match()
518 void debug_dma_dump_mappings(struct device *dev) in debug_dma_dump_mappings() argument
530 if (!dev || dev == entry->dev) { in debug_dma_dump_mappings()
532 dev_info(entry->dev, in debug_dma_dump_mappings()
565 dev_driver_string(entry->dev), in dump_show()
566 dev_name(entry->dev), in dump_show()
598 err_printk(entry->dev, entry, in add_dma_entry()
825 static int device_dma_allocations(struct device *dev, struct dma_debug_entry **out_entry) in device_dma_allocations() argument
834 if (entry->dev == dev) { in device_dma_allocations()
847 struct device *dev = data; in dma_debug_device_change() local
856 count = device_dma_allocations(dev, &entry); in dma_debug_device_change()
859 err_printk(dev, entry, "device driver has pending " in dma_debug_device_change()
969 if (dma_mapping_error(ref->dev, ref->dev_addr)) { in check_unmap()
970 err_printk(ref->dev, NULL, in check_unmap()
974 err_printk(ref->dev, NULL, in check_unmap()
984 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
992 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
1001 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
1013 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
1024 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
1039 err_printk(ref->dev, entry, in check_unmap()
1057 static void check_for_stack(struct device *dev, in check_for_stack() argument
1069 err_printk(dev, NULL, "device driver maps memory from stack [addr=%p]\n", addr); in check_for_stack()
1079 err_printk(dev, NULL, "device driver maps memory from stack [probable addr=%p]\n", addr); in check_for_stack()
1085 static void check_for_illegal_area(struct device *dev, void *addr, unsigned long len) in check_for_illegal_area() argument
1089 …err_printk(dev, NULL, "device driver maps memory from kernel text or rodata [addr=%p] [len=%lu]\n"… in check_for_illegal_area()
1092 static void check_sync(struct device *dev, in check_sync() argument
1105 err_printk(dev, NULL, "device driver tries " in check_sync()
1113 err_printk(dev, entry, "device driver syncs" in check_sync()
1126 err_printk(dev, entry, "device driver syncs " in check_sync()
1137 err_printk(dev, entry, "device driver syncs " in check_sync()
1147 err_printk(dev, entry, "device driver syncs " in check_sync()
1158 err_printk(ref->dev, entry, "device driver syncs " in check_sync()
1168 static void check_sg_segment(struct device *dev, struct scatterlist *sg) in check_sg_segment() argument
1171 unsigned int max_seg = dma_get_max_seg_size(dev); in check_sg_segment()
1172 u64 start, end, boundary = dma_get_seg_boundary(dev); in check_sg_segment()
1179 …err_printk(dev, NULL, "mapping sg segment longer than device claims to support [len=%u] [max=%u]\n… in check_sg_segment()
1189 …err_printk(dev, NULL, "mapping sg segment across boundary [start=0x%016llx] [end=0x%016llx] [bound… in check_sg_segment()
1194 void debug_dma_map_single(struct device *dev, const void *addr, in debug_dma_map_single() argument
1201 err_printk(dev, NULL, "device driver maps memory from invalid area [addr=%p] [len=%lu]\n", in debug_dma_map_single()
1205 err_printk(dev, NULL, "device driver maps memory from vmalloc area [addr=%p] [len=%lu]\n", in debug_dma_map_single()
1210 void debug_dma_map_page(struct device *dev, struct page *page, size_t offset, in debug_dma_map_page() argument
1219 if (dma_mapping_error(dev, dma_addr)) in debug_dma_map_page()
1226 entry->dev = dev; in debug_dma_map_page()
1234 check_for_stack(dev, page, offset); in debug_dma_map_page()
1239 check_for_illegal_area(dev, addr, size); in debug_dma_map_page()
1245 void debug_dma_mapping_error(struct device *dev, dma_addr_t dma_addr) in debug_dma_mapping_error() argument
1255 ref.dev = dev; in debug_dma_mapping_error()
1283 void debug_dma_unmap_page(struct device *dev, dma_addr_t dma_addr, in debug_dma_unmap_page() argument
1288 .dev = dev, in debug_dma_unmap_page()
1299 void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument
1311 check_for_stack(dev, sg_page(s), s->offset); in debug_dma_map_sg()
1313 check_for_illegal_area(dev, sg_virt(s), s->length); in debug_dma_map_sg()
1322 entry->dev = dev; in debug_dma_map_sg()
1330 check_sg_segment(dev, s); in debug_dma_map_sg()
1336 static int get_nr_mapped_entries(struct device *dev, in get_nr_mapped_entries() argument
1355 void debug_dma_unmap_sg(struct device *dev, struct scatterlist *sglist, in debug_dma_unmap_sg() argument
1368 .dev = dev, in debug_dma_unmap_sg()
1380 mapped_ents = get_nr_mapped_entries(dev, &ref); in debug_dma_unmap_sg()
1398 void debug_dma_alloc_coherent(struct device *dev, size_t size, in debug_dma_alloc_coherent() argument
1419 entry->dev = dev; in debug_dma_alloc_coherent()
1428 void debug_dma_free_coherent(struct device *dev, size_t size, in debug_dma_free_coherent() argument
1433 .dev = dev, in debug_dma_free_coherent()
1451 void debug_dma_map_resource(struct device *dev, phys_addr_t addr, size_t size, in debug_dma_map_resource() argument
1465 entry->dev = dev; in debug_dma_map_resource()
1475 void debug_dma_unmap_resource(struct device *dev, dma_addr_t dma_addr, in debug_dma_unmap_resource() argument
1480 .dev = dev, in debug_dma_unmap_resource()
1492 void debug_dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, in debug_dma_sync_single_for_cpu() argument
1501 ref.dev = dev; in debug_dma_sync_single_for_cpu()
1507 check_sync(dev, &ref, true); in debug_dma_sync_single_for_cpu()
1510 void debug_dma_sync_single_for_device(struct device *dev, in debug_dma_sync_single_for_device() argument
1520 ref.dev = dev; in debug_dma_sync_single_for_device()
1526 check_sync(dev, &ref, false); in debug_dma_sync_single_for_device()
1529 void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument
1542 .dev = dev, in debug_dma_sync_sg_for_cpu()
1551 mapped_ents = get_nr_mapped_entries(dev, &ref); in debug_dma_sync_sg_for_cpu()
1556 check_sync(dev, &ref, true); in debug_dma_sync_sg_for_cpu()
1560 void debug_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in debug_dma_sync_sg_for_device() argument
1573 .dev = dev, in debug_dma_sync_sg_for_device()
1581 mapped_ents = get_nr_mapped_entries(dev, &ref); in debug_dma_sync_sg_for_device()
1586 check_sync(dev, &ref, false); in debug_dma_sync_sg_for_device()
1590 void debug_dma_alloc_pages(struct device *dev, struct page *page, in debug_dma_alloc_pages() argument
1605 entry->dev = dev; in debug_dma_alloc_pages()
1614 void debug_dma_free_pages(struct device *dev, struct page *page, in debug_dma_free_pages() argument
1620 .dev = dev, in debug_dma_free_pages()