/kernel/sched/ |
D | topology.c | 577 static void free_sched_groups(struct sched_group *sg, int free_sgc) in free_sched_groups() argument 581 if (!sg) in free_sched_groups() 584 first = sg; in free_sched_groups() 586 tmp = sg->next; in free_sched_groups() 588 if (free_sgc && atomic_dec_and_test(&sg->sgc->ref)) in free_sched_groups() 589 kfree(sg->sgc); in free_sched_groups() 591 if (atomic_dec_and_test(&sg->ref)) in free_sched_groups() 592 kfree(sg); in free_sched_groups() 593 sg = tmp; in free_sched_groups() 594 } while (sg != first); in free_sched_groups() [all …]
|
D | sched.h | 1189 static inline struct cpumask *sched_group_span(struct sched_group *sg); 1875 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span() argument 1877 return to_cpumask(sg->cpumask); in sched_group_span() 1883 static inline struct cpumask *group_balance_mask(struct sched_group *sg) in group_balance_mask() argument 1885 return to_cpumask(sg->sgc->cpumask); in group_balance_mask() 1897 extern int group_balance_cpu(struct sched_group *sg);
|
D | fair.c | 9150 struct sched_group *sg, in update_sd_pick_busiest() argument 9166 (!capacity_greater(capacity_of(env->dst_cpu), sg->sgc->max_capacity) || in update_sd_pick_busiest() 9197 if (sched_asym_prefer(sg->asym_prefer_cpu, sds->busiest->asym_prefer_cpu)) in update_sd_pick_busiest() 9250 (capacity_greater(sg->sgc->min_capacity, capacity_of(env->dst_cpu)))) in update_sd_pick_busiest() 9693 struct sched_group *sg = env->sd->groups; in update_sd_lb_stats() local 9703 local_group = cpumask_test_cpu(env->dst_cpu, sched_group_span(sg)); in update_sd_lb_stats() 9705 sds->local = sg; in update_sd_lb_stats() 9709 time_after_eq(jiffies, sg->sgc->next_update)) in update_sd_lb_stats() 9713 update_sg_lb_stats(env, sg, sgs, &sg_status); in update_sd_lb_stats() 9719 if (update_sd_pick_busiest(env, sds, sg, sgs)) { in update_sd_lb_stats() [all …]
|
/kernel/dma/ |
D | direct.c | 382 struct scatterlist *sg; in dma_direct_sync_sg_for_device() local 385 for_each_sg(sgl, sg, nents, i) { in dma_direct_sync_sg_for_device() 386 phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg)); in dma_direct_sync_sg_for_device() 389 swiotlb_sync_single_for_device(dev, paddr, sg->length, in dma_direct_sync_sg_for_device() 393 arch_sync_dma_for_device(paddr, sg->length, in dma_direct_sync_sg_for_device() 405 struct scatterlist *sg; in dma_direct_sync_sg_for_cpu() local 408 for_each_sg(sgl, sg, nents, i) { in dma_direct_sync_sg_for_cpu() 409 phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg)); in dma_direct_sync_sg_for_cpu() 412 arch_sync_dma_for_cpu(paddr, sg->length, dir); in dma_direct_sync_sg_for_cpu() 415 swiotlb_sync_single_for_cpu(dev, paddr, sg->length, in dma_direct_sync_sg_for_cpu() [all …]
|
D | mapping.c | 180 static int __dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in __dma_map_sg_attrs() argument 192 arch_dma_map_sg_direct(dev, sg, nents)) in __dma_map_sg_attrs() 193 ents = dma_direct_map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs() 195 ents = ops->map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs() 198 debug_dma_map_sg(dev, sg, nents, ents, dir, attrs); in __dma_map_sg_attrs() 223 unsigned int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_map_sg_attrs() argument 228 ret = __dma_map_sg_attrs(dev, sg, nents, dir, attrs); in dma_map_sg_attrs() 272 void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument 279 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs() 281 arch_dma_unmap_sg_direct(dev, sg, nents)) in dma_unmap_sg_attrs() [all …]
|
D | debug.h | 20 extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, 51 struct scatterlist *sg, 55 struct scatterlist *sg, 70 static inline void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument 119 struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument 125 struct scatterlist *sg, in debug_dma_sync_sg_for_device() argument
|
D | debug.c | 1161 static void check_sg_segment(struct device *dev, struct scatterlist *sg) in check_sg_segment() argument 1171 if (sg->length > max_seg) in check_sg_segment() 1173 sg->length, max_seg); in check_sg_segment() 1179 start = sg_dma_address(sg); in check_sg_segment() 1180 end = start + sg_dma_len(sg) - 1; in check_sg_segment() 1293 void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument 1304 for_each_sg(sg, s, nents, i) { in debug_dma_map_sg() 1310 for_each_sg(sg, s, mapped_ents, i) { in debug_dma_map_sg() 1523 void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument 1532 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_cpu() [all …]
|