Lines Matching refs:new_mask

2108 __do_set_cpus_allowed(struct task_struct *p, const struct cpumask *new_mask, u32 flags);
2111 const struct cpumask *new_mask,
2439 void set_cpus_allowed_common(struct task_struct *p, const struct cpumask *new_mask, u32 flags) in set_cpus_allowed_common() argument
2442 p->cpus_ptr = new_mask; in set_cpus_allowed_common()
2446 cpumask_copy(&p->cpus_mask, new_mask); in set_cpus_allowed_common()
2447 p->nr_cpus_allowed = cpumask_weight(new_mask); in set_cpus_allowed_common()
2451 __do_set_cpus_allowed(struct task_struct *p, const struct cpumask *new_mask, u32 flags) in __do_set_cpus_allowed() argument
2487 p->sched_class->set_cpus_allowed(p, new_mask, flags); in __do_set_cpus_allowed()
2495 void do_set_cpus_allowed(struct task_struct *p, const struct cpumask *new_mask) in do_set_cpus_allowed() argument
2497 __do_set_cpus_allowed(p, new_mask, 0); in do_set_cpus_allowed()
2748 const struct cpumask *new_mask, in __set_cpus_allowed_ptr_locked() argument
2778 if (!kthread && !cpumask_subset(new_mask, cpu_allowed_mask)) { in __set_cpus_allowed_ptr_locked()
2793 if (cpumask_equal(&p->cpus_mask, new_mask)) in __set_cpus_allowed_ptr_locked()
2798 !cpumask_test_cpu(task_cpu(p), new_mask))) { in __set_cpus_allowed_ptr_locked()
2809 dest_cpu = cpumask_any_and_distribute(cpu_valid_mask, new_mask); in __set_cpus_allowed_ptr_locked()
2815 __do_set_cpus_allowed(p, new_mask, flags); in __set_cpus_allowed_ptr_locked()
2842 const struct cpumask *new_mask, u32 flags) in __set_cpus_allowed_ptr() argument
2848 return __set_cpus_allowed_ptr_locked(p, new_mask, flags, rq, &rf); in __set_cpus_allowed_ptr()
2851 int set_cpus_allowed_ptr(struct task_struct *p, const struct cpumask *new_mask) in set_cpus_allowed_ptr() argument
2853 return __set_cpus_allowed_ptr(p, new_mask, 0); in set_cpus_allowed_ptr()
2865 struct cpumask *new_mask, in restrict_cpus_allowed_ptr() argument
2891 if (!cpumask_and(new_mask, &p->cpus_mask, subset_mask)) { in restrict_cpus_allowed_ptr()
2905 return __set_cpus_allowed_ptr_locked(p, new_mask, 0, rq, &rf); in restrict_cpus_allowed_ptr()
2921 cpumask_var_t new_mask; in force_compatible_cpus_allowed_ptr() local
2924 alloc_cpumask_var(&new_mask, GFP_KERNEL); in force_compatible_cpus_allowed_ptr()
2932 if (!cpumask_available(new_mask)) in force_compatible_cpus_allowed_ptr()
2935 if (!restrict_cpus_allowed_ptr(p, new_mask, override_mask)) in force_compatible_cpus_allowed_ptr()
2942 cpuset_cpus_allowed(p, new_mask); in force_compatible_cpus_allowed_ptr()
2943 override_mask = new_mask; in force_compatible_cpus_allowed_ptr()
2955 free_cpumask_var(new_mask); in force_compatible_cpus_allowed_ptr()
3464 const struct cpumask *new_mask, in __set_cpus_allowed_ptr() argument
3467 return set_cpus_allowed_ptr(p, new_mask); in __set_cpus_allowed_ptr()
7937 cpumask_var_t cpus_allowed, new_mask; in __sched_setaffinity() local
7942 if (!alloc_cpumask_var(&new_mask, GFP_KERNEL)) { in __sched_setaffinity()
7948 cpumask_and(new_mask, mask, cpus_allowed); in __sched_setaffinity()
7950 retval = dl_task_check_affinity(p, new_mask); in __sched_setaffinity()
7954 retval = __set_cpus_allowed_ptr(p, new_mask, SCA_CHECK | SCA_USER); in __sched_setaffinity()
7959 if (!cpumask_subset(new_mask, cpus_allowed)) { in __sched_setaffinity()
7964 cpumask_copy(new_mask, cpus_allowed); in __sched_setaffinity()
7969 free_cpumask_var(new_mask); in __sched_setaffinity()
8018 struct cpumask *new_mask) in get_user_cpu_mask() argument
8021 cpumask_clear(new_mask); in get_user_cpu_mask()
8025 return copy_from_user(new_mask, user_mask_ptr, len) ? -EFAULT : 0; in get_user_cpu_mask()
8039 cpumask_var_t new_mask; in SYSCALL_DEFINE3() local
8042 if (!alloc_cpumask_var(&new_mask, GFP_KERNEL)) in SYSCALL_DEFINE3()
8045 retval = get_user_cpu_mask(user_mask_ptr, len, new_mask); in SYSCALL_DEFINE3()
8047 retval = sched_setaffinity(pid, new_mask); in SYSCALL_DEFINE3()
8048 free_cpumask_var(new_mask); in SYSCALL_DEFINE3()