• Home
  • Raw
  • Download

Lines Matching refs:entry

227 static void node_affinity_destroy(struct hfi1_affinity_node *entry)  in node_affinity_destroy()  argument
229 free_percpu(entry->comp_vect_affinity); in node_affinity_destroy()
230 kfree(entry); in node_affinity_destroy()
236 struct hfi1_affinity_node *entry; in node_affinity_destroy_all() local
240 entry = list_entry(pos, struct hfi1_affinity_node, in node_affinity_destroy_all()
243 node_affinity_destroy(entry); in node_affinity_destroy_all()
251 struct hfi1_affinity_node *entry; in node_affinity_allocate() local
253 entry = kzalloc(sizeof(*entry), GFP_KERNEL); in node_affinity_allocate()
254 if (!entry) in node_affinity_allocate()
256 entry->node = node; in node_affinity_allocate()
257 entry->comp_vect_affinity = alloc_percpu(u16); in node_affinity_allocate()
258 INIT_LIST_HEAD(&entry->list); in node_affinity_allocate()
260 return entry; in node_affinity_allocate()
267 static void node_affinity_add_tail(struct hfi1_affinity_node *entry) in node_affinity_add_tail() argument
269 list_add_tail(&entry->list, &node_affinity.list); in node_affinity_add_tail()
276 struct hfi1_affinity_node *entry; in node_affinity_lookup() local
279 entry = list_entry(pos, struct hfi1_affinity_node, list); in node_affinity_lookup()
280 if (entry->node == node) in node_affinity_lookup()
281 return entry; in node_affinity_lookup()
365 struct hfi1_affinity_node *entry, in _dev_comp_vect_cpu_get() argument
390 &entry->def_intr.used); in _dev_comp_vect_cpu_get()
444 struct hfi1_affinity_node *entry) in _dev_comp_vect_mappings_create() argument
472 cpu = _dev_comp_vect_cpu_get(dd, entry, non_intr_cpus, in _dev_comp_vect_mappings_create()
500 struct hfi1_affinity_node *entry; in hfi1_comp_vectors_set_up() local
503 entry = node_affinity_lookup(dd->node); in hfi1_comp_vectors_set_up()
504 if (!entry) { in hfi1_comp_vectors_set_up()
508 ret = _dev_comp_vect_mappings_create(dd, entry); in hfi1_comp_vectors_set_up()
537 struct hfi1_affinity_node *entry, in _dev_comp_vect_cpu_mask_init() argument
553 if (cpumask_weight(&entry->comp_vect_mask) == 1) { in _dev_comp_vect_cpu_mask_init()
559 cpumask_weight(&entry->comp_vect_mask) / in _dev_comp_vect_cpu_mask_init()
568 cpumask_weight(&entry->comp_vect_mask) % in _dev_comp_vect_cpu_mask_init()
577 curr_cpu = per_cpu_affinity_get(&entry->comp_vect_mask, in _dev_comp_vect_cpu_mask_init()
578 entry->comp_vect_affinity); in _dev_comp_vect_cpu_mask_init()
594 per_cpu_affinity_put_max(&entry->comp_vect_mask, in _dev_comp_vect_cpu_mask_init()
595 entry->comp_vect_affinity); in _dev_comp_vect_cpu_mask_init()
604 struct hfi1_affinity_node *entry) in _dev_comp_vect_cpu_mask_clean_up() argument
615 entry->comp_vect_affinity); in _dev_comp_vect_cpu_mask_clean_up()
637 struct hfi1_affinity_node *entry; in hfi1_dev_affinity_init() local
647 entry = node_affinity_lookup(dd->node); in hfi1_dev_affinity_init()
653 if (!entry) { in hfi1_dev_affinity_init()
654 entry = node_affinity_allocate(dd->node); in hfi1_dev_affinity_init()
655 if (!entry) { in hfi1_dev_affinity_init()
663 init_cpu_mask_set(&entry->def_intr); in hfi1_dev_affinity_init()
664 init_cpu_mask_set(&entry->rcv_intr); in hfi1_dev_affinity_init()
665 cpumask_clear(&entry->comp_vect_mask); in hfi1_dev_affinity_init()
666 cpumask_clear(&entry->general_intr_mask); in hfi1_dev_affinity_init()
668 cpumask_and(&entry->def_intr.mask, &node_affinity.real_cpu_mask, in hfi1_dev_affinity_init()
672 possible = cpumask_weight(&entry->def_intr.mask); in hfi1_dev_affinity_init()
673 curr_cpu = cpumask_first(&entry->def_intr.mask); in hfi1_dev_affinity_init()
677 cpumask_set_cpu(curr_cpu, &entry->rcv_intr.mask); in hfi1_dev_affinity_init()
678 cpumask_set_cpu(curr_cpu, &entry->general_intr_mask); in hfi1_dev_affinity_init()
685 cpumask_clear_cpu(curr_cpu, &entry->def_intr.mask); in hfi1_dev_affinity_init()
686 cpumask_set_cpu(curr_cpu, &entry->general_intr_mask); in hfi1_dev_affinity_init()
688 &entry->def_intr.mask); in hfi1_dev_affinity_init()
699 &entry->def_intr.mask); in hfi1_dev_affinity_init()
701 &entry->rcv_intr.mask); in hfi1_dev_affinity_init()
703 &entry->def_intr.mask); in hfi1_dev_affinity_init()
713 if (cpumask_weight(&entry->def_intr.mask) == 0) in hfi1_dev_affinity_init()
714 cpumask_copy(&entry->def_intr.mask, in hfi1_dev_affinity_init()
715 &entry->general_intr_mask); in hfi1_dev_affinity_init()
719 cpumask_and(&entry->comp_vect_mask, in hfi1_dev_affinity_init()
721 cpumask_andnot(&entry->comp_vect_mask, in hfi1_dev_affinity_init()
722 &entry->comp_vect_mask, in hfi1_dev_affinity_init()
723 &entry->rcv_intr.mask); in hfi1_dev_affinity_init()
724 cpumask_andnot(&entry->comp_vect_mask, in hfi1_dev_affinity_init()
725 &entry->comp_vect_mask, in hfi1_dev_affinity_init()
726 &entry->general_intr_mask); in hfi1_dev_affinity_init()
733 if (cpumask_weight(&entry->comp_vect_mask) == 0) in hfi1_dev_affinity_init()
734 cpumask_copy(&entry->comp_vect_mask, in hfi1_dev_affinity_init()
735 &entry->general_intr_mask); in hfi1_dev_affinity_init()
738 ret = _dev_comp_vect_cpu_mask_init(dd, entry, new_entry); in hfi1_dev_affinity_init()
743 node_affinity_add_tail(entry); in hfi1_dev_affinity_init()
745 dd->affinity_entry = entry; in hfi1_dev_affinity_init()
752 node_affinity_destroy(entry); in hfi1_dev_affinity_init()
759 struct hfi1_affinity_node *entry; in hfi1_dev_affinity_clean_up() local
764 entry = node_affinity_lookup(dd->node); in hfi1_dev_affinity_clean_up()
765 if (!entry) in hfi1_dev_affinity_clean_up()
772 _dev_comp_vect_cpu_mask_clean_up(dd, entry); in hfi1_dev_affinity_clean_up()
787 struct hfi1_affinity_node *entry; in hfi1_update_sdma_affinity() local
795 entry = node_affinity_lookup(dd->node); in hfi1_update_sdma_affinity()
796 if (!entry) in hfi1_update_sdma_affinity()
812 set = &entry->def_intr; in hfi1_update_sdma_affinity()
881 struct hfi1_affinity_node *entry; in get_irq_affinity() local
891 entry = node_affinity_lookup(dd->node); in get_irq_affinity()
897 set = &entry->def_intr; in get_irq_affinity()
900 cpu = cpumask_first(&entry->general_intr_mask); in get_irq_affinity()
905 cpu = cpumask_first(&entry->general_intr_mask); in get_irq_affinity()
907 set = &entry->rcv_intr; in get_irq_affinity()
912 set = &entry->def_intr; in get_irq_affinity()
968 struct hfi1_affinity_node *entry; in hfi1_put_irq_affinity() local
971 entry = node_affinity_lookup(dd->node); in hfi1_put_irq_affinity()
975 set = &entry->def_intr; in hfi1_put_irq_affinity()
985 set = &entry->rcv_intr; in hfi1_put_irq_affinity()
989 set = &entry->def_intr; in hfi1_put_irq_affinity()
1041 struct hfi1_affinity_node *entry; in hfi1_get_proc_affinity() local
1115 entry = node_affinity_lookup(node); in hfi1_get_proc_affinity()
1116 if (entry) { in hfi1_get_proc_affinity()
1117 cpumask_copy(intrs_mask, (entry->def_intr.gen ? in hfi1_get_proc_affinity()
1118 &entry->def_intr.mask : in hfi1_get_proc_affinity()
1119 &entry->def_intr.used)); in hfi1_get_proc_affinity()
1120 cpumask_or(intrs_mask, intrs_mask, (entry->rcv_intr.gen ? in hfi1_get_proc_affinity()
1121 &entry->rcv_intr.mask : in hfi1_get_proc_affinity()
1122 &entry->rcv_intr.used)); in hfi1_get_proc_affinity()
1123 cpumask_or(intrs_mask, intrs_mask, &entry->general_intr_mask); in hfi1_get_proc_affinity()