Lines Matching refs:s
1308 struct scatterlist *s; in debug_dma_map_sg() local
1314 for_each_sg(sg, s, nents, i) { in debug_dma_map_sg()
1315 check_for_stack(dev, sg_page(s), s->offset); in debug_dma_map_sg()
1316 if (!PageHighMem(sg_page(s))) in debug_dma_map_sg()
1317 check_for_illegal_area(dev, sg_virt(s), s->length); in debug_dma_map_sg()
1320 for_each_sg(sg, s, mapped_ents, i) { in debug_dma_map_sg()
1327 entry->pfn = page_to_pfn(sg_page(s)); in debug_dma_map_sg()
1328 entry->offset = s->offset; in debug_dma_map_sg()
1329 entry->size = sg_dma_len(s); in debug_dma_map_sg()
1330 entry->dev_addr = sg_dma_address(s); in debug_dma_map_sg()
1335 check_sg_segment(dev, s); in debug_dma_map_sg()
1363 struct scatterlist *s; in debug_dma_unmap_sg() local
1369 for_each_sg(sglist, s, nelems, i) { in debug_dma_unmap_sg()
1374 .pfn = page_to_pfn(sg_page(s)), in debug_dma_unmap_sg()
1375 .offset = s->offset, in debug_dma_unmap_sg()
1376 .dev_addr = sg_dma_address(s), in debug_dma_unmap_sg()
1377 .size = sg_dma_len(s), in debug_dma_unmap_sg()
1534 struct scatterlist *s; in debug_dma_sync_sg_for_cpu() local
1540 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_cpu()
1545 .pfn = page_to_pfn(sg_page(s)), in debug_dma_sync_sg_for_cpu()
1546 .offset = s->offset, in debug_dma_sync_sg_for_cpu()
1547 .dev_addr = sg_dma_address(s), in debug_dma_sync_sg_for_cpu()
1548 .size = sg_dma_len(s), in debug_dma_sync_sg_for_cpu()
1566 struct scatterlist *s; in debug_dma_sync_sg_for_device() local
1572 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_device()
1577 .pfn = page_to_pfn(sg_page(s)), in debug_dma_sync_sg_for_device()
1578 .offset = s->offset, in debug_dma_sync_sg_for_device()
1579 .dev_addr = sg_dma_address(s), in debug_dma_sync_sg_for_device()
1580 .size = sg_dma_len(s), in debug_dma_sync_sg_for_device()