Searched refs:sg (Results 1 – 6 of 6) sorted by relevance
/kernel/sched/ |
D | topology.c | 561 static void free_sched_groups(struct sched_group *sg, int free_sgc) in free_sched_groups() argument 565 if (!sg) in free_sched_groups() 568 first = sg; in free_sched_groups() 570 tmp = sg->next; in free_sched_groups() 572 if (free_sgc && atomic_dec_and_test(&sg->sgc->ref)) in free_sched_groups() 573 kfree(sg->sgc); in free_sched_groups() 575 if (atomic_dec_and_test(&sg->ref)) in free_sched_groups() 576 kfree(sg); in free_sched_groups() 577 sg = tmp; in free_sched_groups() 578 } while (sg != first); in free_sched_groups() [all …]
|
D | fair.c | 8267 group_smaller_min_cpu_capacity(struct sched_group *sg, struct sched_group *ref) in group_smaller_min_cpu_capacity() argument 8269 return fits_capacity(sg->sgc->min_capacity, ref->sgc->min_capacity); in group_smaller_min_cpu_capacity() 8277 group_smaller_max_cpu_capacity(struct sched_group *sg, struct sched_group *ref) in group_smaller_max_cpu_capacity() argument 8279 return fits_capacity(sg->sgc->max_capacity, ref->sgc->max_capacity); in group_smaller_max_cpu_capacity() 8398 struct sched_group *sg, in update_sd_pick_busiest() argument 8410 (!group_smaller_max_cpu_capacity(sg, sds->local) || in update_sd_pick_busiest() 8433 group_smaller_min_cpu_capacity(sds->local, sg)) in update_sd_pick_busiest() 8457 sched_asym_prefer(env->dst_cpu, sg->asym_prefer_cpu)) { in update_sd_pick_busiest() 8463 sg->asym_prefer_cpu)) in update_sd_pick_busiest() 8508 struct sched_group *sg = env->sd->groups; in update_sd_lb_stats() local [all …]
|
D | sched.h | 1434 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span() argument 1436 return to_cpumask(sg->cpumask); in sched_group_span() 1442 static inline struct cpumask *group_balance_mask(struct sched_group *sg) in group_balance_mask() argument 1444 return to_cpumask(sg->sgc->cpumask); in group_balance_mask() 1456 extern int group_balance_cpu(struct sched_group *sg);
|
/kernel/dma/ |
D | direct.c | 245 struct scatterlist *sg; in dma_direct_sync_sg_for_device() local 248 for_each_sg(sgl, sg, nents, i) { in dma_direct_sync_sg_for_device() 249 phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg)); in dma_direct_sync_sg_for_device() 252 swiotlb_tbl_sync_single(dev, paddr, sg->length, in dma_direct_sync_sg_for_device() 256 arch_sync_dma_for_device(paddr, sg->length, in dma_direct_sync_sg_for_device() 284 struct scatterlist *sg; in dma_direct_sync_sg_for_cpu() local 287 for_each_sg(sgl, sg, nents, i) { in dma_direct_sync_sg_for_cpu() 288 phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg)); in dma_direct_sync_sg_for_cpu() 291 arch_sync_dma_for_cpu(paddr, sg->length, dir); in dma_direct_sync_sg_for_cpu() 294 swiotlb_tbl_sync_single(dev, paddr, sg->length, dir, in dma_direct_sync_sg_for_cpu() [all …]
|
D | virt.c | 42 struct scatterlist *sg; in dma_virt_map_sg() local 44 for_each_sg(sgl, sg, nents, i) { in dma_virt_map_sg() 45 BUG_ON(!sg_page(sg)); in dma_virt_map_sg() 46 sg_dma_address(sg) = (uintptr_t)sg_virt(sg); in dma_virt_map_sg() 47 sg_dma_len(sg) = sg->length; in dma_virt_map_sg()
|
D | debug.c | 1214 static void check_sg_segment(struct device *dev, struct scatterlist *sg) in check_sg_segment() argument 1224 if (sg->length > max_seg) in check_sg_segment() 1226 sg->length, max_seg); in check_sg_segment() 1232 start = sg_dma_address(sg); in check_sg_segment() 1233 end = start + sg_dma_len(sg) - 1; in check_sg_segment() 1347 void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument 1357 for_each_sg(sg, s, nents, i) { in debug_dma_map_sg() 1363 for_each_sg(sg, s, mapped_ents, i) { in debug_dma_map_sg() 1580 void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument 1589 for_each_sg(sg, s, nelems, i) { in debug_dma_sync_sg_for_cpu() [all …]
|