/kernel/sched/ |
D | topology.c | 563 static void free_sched_groups(struct sched_group *sg, int free_sgc) in free_sched_groups() argument 567 if (!sg) in free_sched_groups() 570 first = sg; in free_sched_groups() 572 tmp = sg->next; in free_sched_groups() 574 if (free_sgc && atomic_dec_and_test(&sg->sgc->ref)) in free_sched_groups() 575 kfree(sg->sgc); in free_sched_groups() 577 if (atomic_dec_and_test(&sg->ref)) in free_sched_groups() 578 kfree(sg); in free_sched_groups() 579 sg = tmp; in free_sched_groups() 580 } while (sg != first); in free_sched_groups() [all …]
|
D | sched.h | 1548 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span() argument 1550 return to_cpumask(sg->cpumask); in sched_group_span() 1556 static inline struct cpumask *group_balance_mask(struct sched_group *sg) in group_balance_mask() argument 1558 return to_cpumask(sg->sgc->cpumask); in group_balance_mask() 1570 extern int group_balance_cpu(struct sched_group *sg);
|
D | fair.c | 8689 group_smaller_min_cpu_capacity(struct sched_group *sg, struct sched_group *ref) in group_smaller_min_cpu_capacity() argument 8691 return fits_capacity(sg->sgc->min_capacity, ref->sgc->min_capacity); in group_smaller_min_cpu_capacity() 8699 group_smaller_max_cpu_capacity(struct sched_group *sg, struct sched_group *ref) in group_smaller_max_cpu_capacity() argument 8701 return fits_capacity(sg->sgc->max_capacity, ref->sgc->max_capacity); in group_smaller_max_cpu_capacity() 8846 struct sched_group *sg, in update_sd_pick_busiest() argument 8862 (!group_smaller_max_cpu_capacity(sg, sds->local) || in update_sd_pick_busiest() 8893 if (sched_asym_prefer(sg->asym_prefer_cpu, sds->busiest->asym_prefer_cpu)) in update_sd_pick_busiest() 8946 (group_smaller_min_cpu_capacity(sds->local, sg))) in update_sd_pick_busiest() 9300 struct sched_group *sg = env->sd->groups; in update_sd_lb_stats() local 9314 local_group = cpumask_test_cpu(env->dst_cpu, sched_group_span(sg)); in update_sd_lb_stats() [all …]
|
/kernel/dma/ |
D | direct.c | 338 struct scatterlist *sg; in dma_direct_sync_sg_for_device() local 341 for_each_sg(sgl, sg, nents, i) { in dma_direct_sync_sg_for_device() 342 phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg)); in dma_direct_sync_sg_for_device() 345 swiotlb_tbl_sync_single(dev, paddr, sg->length, in dma_direct_sync_sg_for_device() 349 arch_sync_dma_for_device(paddr, sg->length, in dma_direct_sync_sg_for_device() 361 struct scatterlist *sg; in dma_direct_sync_sg_for_cpu() local 364 for_each_sg(sgl, sg, nents, i) { in dma_direct_sync_sg_for_cpu() 365 phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg)); in dma_direct_sync_sg_for_cpu() 368 arch_sync_dma_for_cpu(paddr, sg->length, dir); in dma_direct_sync_sg_for_cpu() 371 swiotlb_tbl_sync_single(dev, paddr, sg->length, dir, in dma_direct_sync_sg_for_cpu() [all …]
|
D | virt.c | 42 struct scatterlist *sg; in dma_virt_map_sg() local 44 for_each_sg(sgl, sg, nents, i) { in dma_virt_map_sg() 45 BUG_ON(!sg_page(sg)); in dma_virt_map_sg() 46 sg_dma_address(sg) = (uintptr_t)sg_virt(sg); in dma_virt_map_sg() 47 sg_dma_len(sg) = sg->length; in dma_virt_map_sg()
|
D | mapping.c | 180 int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg_attrs() argument 192 ents = dma_direct_map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs() 194 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs() 196 debug_dma_map_sg(dev, sg, nents, ents, dir); in dma_map_sg_attrs() 202 void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument 209 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs() 211 dma_direct_unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs() 213 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs() 282 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument 289 dma_direct_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu() [all …]
|
D | debug.h | 19 extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, 47 struct scatterlist *sg, 51 struct scatterlist *sg, 65 static inline void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument 111 struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument 117 struct scatterlist *sg, in debug_dma_sync_sg_for_device() argument
|
D | debug.c | 1173 static void check_sg_segment(struct device *dev, struct scatterlist *sg) in check_sg_segment() argument 1183 if (sg->length > max_seg) in check_sg_segment() 1185 sg->length, max_seg); in check_sg_segment() 1191 start = sg_dma_address(sg); in check_sg_segment() 1192 end = start + sg_dma_len(sg) - 1; in check_sg_segment() 1304 void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument 1314 for_each_sg(sg, s, nents, i) { in debug_dma_map_sg() 1320 for_each_sg(sg, s, mapped_ents, i) { in debug_dma_map_sg() 1531 void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument 1540 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_cpu() [all …]
|