Lines Matching full:masks
47 cpumask_var_t *masks; in alloc_node_to_cpumask() local
50 masks = kcalloc(nr_node_ids, sizeof(cpumask_var_t), GFP_KERNEL); in alloc_node_to_cpumask()
51 if (!masks) in alloc_node_to_cpumask()
55 if (!zalloc_cpumask_var(&masks[node], GFP_KERNEL)) in alloc_node_to_cpumask()
59 return masks; in alloc_node_to_cpumask()
63 free_cpumask_var(masks[node]); in alloc_node_to_cpumask()
64 kfree(masks); in alloc_node_to_cpumask()
68 static void free_node_to_cpumask(cpumask_var_t *masks) in free_node_to_cpumask() argument
73 free_cpumask_var(masks[node]); in free_node_to_cpumask()
74 kfree(masks); in free_node_to_cpumask()
77 static void build_node_to_cpumask(cpumask_var_t *masks) in build_node_to_cpumask() argument
82 cpumask_set_cpu(cpu, masks[cpu_to_node(cpu)]); in build_node_to_cpumask()
252 struct cpumask *nmsk, struct cpumask *masks) in __group_cpus_evenly() argument
271 /* Ensure that only CPUs which are in both masks are set */ in __group_cpus_evenly()
273 cpumask_or(&masks[curgrp], &masks[curgrp], nmsk); in __group_cpus_evenly()
323 grp_spread_init_one(&masks[curgrp], nmsk, in __group_cpus_evenly()
339 * of initialized masks which can be less than numgrps.
355 struct cpumask *masks = NULL; in group_cpus_evenly() local
370 masks = kcalloc(numgrps, sizeof(*masks), GFP_KERNEL); in group_cpus_evenly()
371 if (!masks) in group_cpus_evenly()
392 npresmsk, nmsk, masks); in group_cpus_evenly()
409 npresmsk, nmsk, masks); in group_cpus_evenly()
422 kfree(masks); in group_cpus_evenly()
426 return masks; in group_cpus_evenly()
431 struct cpumask *masks; in group_cpus_evenly() local
436 masks = kcalloc(numgrps, sizeof(*masks), GFP_KERNEL); in group_cpus_evenly()
437 if (!masks) in group_cpus_evenly()
441 cpumask_copy(&masks[0], cpu_possible_mask); in group_cpus_evenly()
443 return masks; in group_cpus_evenly()