Home
last modified time | relevance | path

Searched refs:sg (Results 1 – 7 of 7) sorted by relevance

/kernel/sched/
Dtopology.c597 static void free_sched_groups(struct sched_group *sg, int free_sgc) in free_sched_groups() argument
601 if (!sg) in free_sched_groups()
604 first = sg; in free_sched_groups()
606 tmp = sg->next; in free_sched_groups()
608 if (free_sgc && atomic_dec_and_test(&sg->sgc->ref)) in free_sched_groups()
609 kfree(sg->sgc); in free_sched_groups()
611 if (atomic_dec_and_test(&sg->ref)) in free_sched_groups()
612 kfree(sg); in free_sched_groups()
613 sg = tmp; in free_sched_groups()
614 } while (sg != first); in free_sched_groups()
[all …]
Dsched.h1238 static inline struct cpumask *sched_group_span(struct sched_group *sg);
1895 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span() argument
1897 return to_cpumask(sg->cpumask); in sched_group_span()
1903 static inline struct cpumask *group_balance_mask(struct sched_group *sg) in group_balance_mask() argument
1905 return to_cpumask(sg->sgc->cpumask); in group_balance_mask()
1908 extern int group_balance_cpu(struct sched_group *sg);
Dfair.c9264 struct sched_group *sg) in asym_smt_can_pull_tasks() argument
9271 sg_is_smt = sg->flags & SD_SHARE_CPUCAPACITY; in asym_smt_can_pull_tasks()
9290 return sched_asym_prefer(dst_cpu, sg->asym_prefer_cpu); in asym_smt_can_pull_tasks()
9301 return sched_asym_prefer(dst_cpu, sg->asym_prefer_cpu); in asym_smt_can_pull_tasks()
9312 return sched_asym_prefer(dst_cpu, sg->asym_prefer_cpu); in asym_smt_can_pull_tasks()
9448 struct sched_group *sg, in update_sd_pick_busiest() argument
9465 (!capacity_greater(capacity_of(env->dst_cpu), sg->sgc->max_capacity) || in update_sd_pick_busiest()
9496 if (sched_asym_prefer(sg->asym_prefer_cpu, sds->busiest->asym_prefer_cpu)) in update_sd_pick_busiest()
9549 (capacity_greater(sg->sgc->min_capacity, capacity_of(env->dst_cpu)))) in update_sd_pick_busiest()
9997 struct sched_group *sg = env->sd->groups; in update_sd_lb_stats() local
[all …]
/kernel/dma/
Ddirect.c417 struct scatterlist *sg; in dma_direct_sync_sg_for_device() local
420 for_each_sg(sgl, sg, nents, i) { in dma_direct_sync_sg_for_device()
421 phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg)); in dma_direct_sync_sg_for_device()
424 swiotlb_sync_single_for_device(dev, paddr, sg->length, in dma_direct_sync_sg_for_device()
428 arch_sync_dma_for_device(paddr, sg->length, in dma_direct_sync_sg_for_device()
440 struct scatterlist *sg; in dma_direct_sync_sg_for_cpu() local
443 for_each_sg(sgl, sg, nents, i) { in dma_direct_sync_sg_for_cpu()
444 phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg)); in dma_direct_sync_sg_for_cpu()
447 arch_sync_dma_for_cpu(paddr, sg->length, dir); in dma_direct_sync_sg_for_cpu()
450 swiotlb_sync_single_for_cpu(dev, paddr, sg->length, in dma_direct_sync_sg_for_cpu()
[all …]
Dmapping.c182 static int __dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in __dma_map_sg_attrs() argument
194 arch_dma_map_sg_direct(dev, sg, nents)) in __dma_map_sg_attrs()
195 ents = dma_direct_map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
197 ents = ops->map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
200 kmsan_handle_dma_sg(sg, nents, dir); in __dma_map_sg_attrs()
201 debug_dma_map_sg(dev, sg, nents, ents, dir, attrs); in __dma_map_sg_attrs()
227 unsigned int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_map_sg_attrs() argument
232 ret = __dma_map_sg_attrs(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
279 void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument
286 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs()
[all …]
Ddebug.h20 extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg,
51 struct scatterlist *sg,
55 struct scatterlist *sg,
70 static inline void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument
119 struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument
125 struct scatterlist *sg, in debug_dma_sync_sg_for_device() argument
Ddebug.c1160 static void check_sg_segment(struct device *dev, struct scatterlist *sg) in check_sg_segment() argument
1170 if (sg->length > max_seg) in check_sg_segment()
1172 sg->length, max_seg); in check_sg_segment()
1178 start = sg_dma_address(sg); in check_sg_segment()
1179 end = start + sg_dma_len(sg) - 1; in check_sg_segment()
1292 void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument
1303 for_each_sg(sg, s, nents, i) { in debug_dma_map_sg()
1309 for_each_sg(sg, s, mapped_ents, i) { in debug_dma_map_sg()
1522 void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument
1531 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_cpu()
[all …]