Lines Matching refs:nodes_allowed
1181 static int next_node_allowed(int nid, nodemask_t *nodes_allowed) in next_node_allowed() argument
1183 nid = next_node_in(nid, *nodes_allowed); in next_node_allowed()
1189 static int get_valid_node_allowed(int nid, nodemask_t *nodes_allowed) in get_valid_node_allowed() argument
1191 if (!node_isset(nid, *nodes_allowed)) in get_valid_node_allowed()
1192 nid = next_node_allowed(nid, nodes_allowed); in get_valid_node_allowed()
1203 nodemask_t *nodes_allowed) in hstate_next_node_to_alloc() argument
1207 VM_BUG_ON(!nodes_allowed); in hstate_next_node_to_alloc()
1209 nid = get_valid_node_allowed(h->next_nid_to_alloc, nodes_allowed); in hstate_next_node_to_alloc()
1210 h->next_nid_to_alloc = next_node_allowed(nid, nodes_allowed); in hstate_next_node_to_alloc()
1221 static int hstate_next_node_to_free(struct hstate *h, nodemask_t *nodes_allowed) in hstate_next_node_to_free() argument
1225 VM_BUG_ON(!nodes_allowed); in hstate_next_node_to_free()
1227 nid = get_valid_node_allowed(h->next_nid_to_free, nodes_allowed); in hstate_next_node_to_free()
1228 h->next_nid_to_free = next_node_allowed(nid, nodes_allowed); in hstate_next_node_to_free()
1724 static int alloc_pool_huge_page(struct hstate *h, nodemask_t *nodes_allowed, in alloc_pool_huge_page() argument
1731 for_each_node_mask_to_alloc(h, nr_nodes, node, nodes_allowed) { in alloc_pool_huge_page()
1732 page = alloc_fresh_huge_page(h, gfp_mask, node, nodes_allowed, in alloc_pool_huge_page()
1752 static int free_pool_huge_page(struct hstate *h, nodemask_t *nodes_allowed, in free_pool_huge_page() argument
1758 for_each_node_mask_to_free(h, nr_nodes, node, nodes_allowed) { in free_pool_huge_page()
2588 nodemask_t *nodes_allowed) in try_to_free_low() argument
2595 for_each_node_mask(i, *nodes_allowed) { in try_to_free_low()
2612 nodemask_t *nodes_allowed) in try_to_free_low() argument
2622 static int adjust_pool_surplus(struct hstate *h, nodemask_t *nodes_allowed, in adjust_pool_surplus() argument
2630 for_each_node_mask_to_alloc(h, nr_nodes, node, nodes_allowed) { in adjust_pool_surplus()
2635 for_each_node_mask_to_free(h, nr_nodes, node, nodes_allowed) { in adjust_pool_surplus()
2651 nodemask_t *nodes_allowed) in set_max_huge_pages() argument
2716 if (!adjust_pool_surplus(h, nodes_allowed, -1)) in set_max_huge_pages()
2731 ret = alloc_pool_huge_page(h, nodes_allowed, in set_max_huge_pages()
2759 try_to_free_low(h, min_count, nodes_allowed); in set_max_huge_pages()
2761 if (!free_pool_huge_page(h, nodes_allowed, 0)) in set_max_huge_pages()
2766 if (!adjust_pool_surplus(h, nodes_allowed, 1)) in set_max_huge_pages()
2825 nodemask_t nodes_allowed, *n_mask; in __nr_hugepages_store_common() local
2835 init_nodemask_of_mempolicy(&nodes_allowed))) in __nr_hugepages_store_common()
2838 n_mask = &nodes_allowed; in __nr_hugepages_store_common()
2844 init_nodemask_of_node(&nodes_allowed, nid); in __nr_hugepages_store_common()
2845 n_mask = &nodes_allowed; in __nr_hugepages_store_common()