Lines Matching refs:new_mask
2192 __do_set_cpus_allowed(struct task_struct *p, const struct cpumask *new_mask, u32 flags);
2195 const struct cpumask *new_mask,
2526 void set_cpus_allowed_common(struct task_struct *p, const struct cpumask *new_mask, u32 flags) in set_cpus_allowed_common() argument
2529 p->cpus_ptr = new_mask; in set_cpus_allowed_common()
2533 cpumask_copy(&p->cpus_mask, new_mask); in set_cpus_allowed_common()
2534 p->nr_cpus_allowed = cpumask_weight(new_mask); in set_cpus_allowed_common()
2538 __do_set_cpus_allowed(struct task_struct *p, const struct cpumask *new_mask, u32 flags) in __do_set_cpus_allowed() argument
2574 p->sched_class->set_cpus_allowed(p, new_mask, flags); in __do_set_cpus_allowed()
2582 void do_set_cpus_allowed(struct task_struct *p, const struct cpumask *new_mask) in do_set_cpus_allowed() argument
2584 __do_set_cpus_allowed(p, new_mask, 0); in do_set_cpus_allowed()
2835 const struct cpumask *new_mask, in __set_cpus_allowed_ptr_locked() argument
2865 if (!kthread && !cpumask_subset(new_mask, cpu_allowed_mask)) { in __set_cpus_allowed_ptr_locked()
2880 if (cpumask_equal(&p->cpus_mask, new_mask)) in __set_cpus_allowed_ptr_locked()
2885 !cpumask_test_cpu(task_cpu(p), new_mask))) { in __set_cpus_allowed_ptr_locked()
2896 dest_cpu = cpumask_any_and_distribute(cpu_valid_mask, new_mask); in __set_cpus_allowed_ptr_locked()
2902 __do_set_cpus_allowed(p, new_mask, flags); in __set_cpus_allowed_ptr_locked()
2929 const struct cpumask *new_mask, u32 flags) in __set_cpus_allowed_ptr() argument
2935 return __set_cpus_allowed_ptr_locked(p, new_mask, flags, rq, &rf); in __set_cpus_allowed_ptr()
2938 int set_cpus_allowed_ptr(struct task_struct *p, const struct cpumask *new_mask) in set_cpus_allowed_ptr() argument
2940 return __set_cpus_allowed_ptr(p, new_mask, 0); in set_cpus_allowed_ptr()
2952 struct cpumask *new_mask, in restrict_cpus_allowed_ptr() argument
2978 if (!cpumask_and(new_mask, &p->cpus_mask, subset_mask)) { in restrict_cpus_allowed_ptr()
2992 return __set_cpus_allowed_ptr_locked(p, new_mask, 0, rq, &rf); in restrict_cpus_allowed_ptr()
3008 cpumask_var_t new_mask; in force_compatible_cpus_allowed_ptr() local
3011 alloc_cpumask_var(&new_mask, GFP_KERNEL); in force_compatible_cpus_allowed_ptr()
3019 if (!cpumask_available(new_mask)) in force_compatible_cpus_allowed_ptr()
3022 if (!restrict_cpus_allowed_ptr(p, new_mask, override_mask)) in force_compatible_cpus_allowed_ptr()
3029 cpuset_cpus_allowed(p, new_mask); in force_compatible_cpus_allowed_ptr()
3030 override_mask = new_mask; in force_compatible_cpus_allowed_ptr()
3042 free_cpumask_var(new_mask); in force_compatible_cpus_allowed_ptr()
3551 const struct cpumask *new_mask, in __set_cpus_allowed_ptr() argument
3554 return set_cpus_allowed_ptr(p, new_mask); in __set_cpus_allowed_ptr()
8112 cpumask_var_t cpus_allowed, new_mask; in __sched_setaffinity() local
8117 if (!alloc_cpumask_var(&new_mask, GFP_KERNEL)) { in __sched_setaffinity()
8123 cpumask_and(new_mask, mask, cpus_allowed); in __sched_setaffinity()
8125 retval = dl_task_check_affinity(p, new_mask); in __sched_setaffinity()
8129 retval = __set_cpus_allowed_ptr(p, new_mask, SCA_CHECK | SCA_USER); in __sched_setaffinity()
8134 if (!cpumask_subset(new_mask, cpus_allowed)) { in __sched_setaffinity()
8139 cpumask_copy(new_mask, cpus_allowed); in __sched_setaffinity()
8144 free_cpumask_var(new_mask); in __sched_setaffinity()
8193 struct cpumask *new_mask) in get_user_cpu_mask() argument
8196 cpumask_clear(new_mask); in get_user_cpu_mask()
8200 return copy_from_user(new_mask, user_mask_ptr, len) ? -EFAULT : 0; in get_user_cpu_mask()
8214 cpumask_var_t new_mask; in SYSCALL_DEFINE3() local
8217 if (!alloc_cpumask_var(&new_mask, GFP_KERNEL)) in SYSCALL_DEFINE3()
8220 retval = get_user_cpu_mask(user_mask_ptr, len, new_mask); in SYSCALL_DEFINE3()
8222 retval = sched_setaffinity(pid, new_mask); in SYSCALL_DEFINE3()
8223 free_cpumask_var(new_mask); in SYSCALL_DEFINE3()