Lines Matching refs:nodes_allowed
985 static int next_node_allowed(int nid, nodemask_t *nodes_allowed) in next_node_allowed() argument
987 nid = next_node_in(nid, *nodes_allowed); in next_node_allowed()
993 static int get_valid_node_allowed(int nid, nodemask_t *nodes_allowed) in get_valid_node_allowed() argument
995 if (!node_isset(nid, *nodes_allowed)) in get_valid_node_allowed()
996 nid = next_node_allowed(nid, nodes_allowed); in get_valid_node_allowed()
1007 nodemask_t *nodes_allowed) in hstate_next_node_to_alloc() argument
1011 VM_BUG_ON(!nodes_allowed); in hstate_next_node_to_alloc()
1013 nid = get_valid_node_allowed(h->next_nid_to_alloc, nodes_allowed); in hstate_next_node_to_alloc()
1014 h->next_nid_to_alloc = next_node_allowed(nid, nodes_allowed); in hstate_next_node_to_alloc()
1025 static int hstate_next_node_to_free(struct hstate *h, nodemask_t *nodes_allowed) in hstate_next_node_to_free() argument
1029 VM_BUG_ON(!nodes_allowed); in hstate_next_node_to_free()
1031 nid = get_valid_node_allowed(h->next_nid_to_free, nodes_allowed); in hstate_next_node_to_free()
1032 h->next_nid_to_free = next_node_allowed(nid, nodes_allowed); in hstate_next_node_to_free()
1531 static int alloc_pool_huge_page(struct hstate *h, nodemask_t *nodes_allowed, in alloc_pool_huge_page() argument
1538 for_each_node_mask_to_alloc(h, nr_nodes, node, nodes_allowed) { in alloc_pool_huge_page()
1539 page = alloc_fresh_huge_page(h, gfp_mask, node, nodes_allowed, in alloc_pool_huge_page()
1559 static int free_pool_huge_page(struct hstate *h, nodemask_t *nodes_allowed, in free_pool_huge_page() argument
1565 for_each_node_mask_to_free(h, nr_nodes, node, nodes_allowed) { in free_pool_huge_page()
2362 nodemask_t *nodes_allowed) in try_to_free_low() argument
2369 for_each_node_mask(i, *nodes_allowed) { in try_to_free_low()
2386 nodemask_t *nodes_allowed) in try_to_free_low() argument
2396 static int adjust_pool_surplus(struct hstate *h, nodemask_t *nodes_allowed, in adjust_pool_surplus() argument
2404 for_each_node_mask_to_alloc(h, nr_nodes, node, nodes_allowed) { in adjust_pool_surplus()
2409 for_each_node_mask_to_free(h, nr_nodes, node, nodes_allowed) { in adjust_pool_surplus()
2425 nodemask_t *nodes_allowed) in set_max_huge_pages() argument
2490 if (!adjust_pool_surplus(h, nodes_allowed, -1)) in set_max_huge_pages()
2505 ret = alloc_pool_huge_page(h, nodes_allowed, in set_max_huge_pages()
2533 try_to_free_low(h, min_count, nodes_allowed); in set_max_huge_pages()
2535 if (!free_pool_huge_page(h, nodes_allowed, 0)) in set_max_huge_pages()
2540 if (!adjust_pool_surplus(h, nodes_allowed, 1)) in set_max_huge_pages()
2599 nodemask_t nodes_allowed, *n_mask; in __nr_hugepages_store_common() local
2609 init_nodemask_of_mempolicy(&nodes_allowed))) in __nr_hugepages_store_common()
2612 n_mask = &nodes_allowed; in __nr_hugepages_store_common()
2618 init_nodemask_of_node(&nodes_allowed, nid); in __nr_hugepages_store_common()
2619 n_mask = &nodes_allowed; in __nr_hugepages_store_common()