Home
last modified time | relevance | path

Searched refs:nr_cpus_allowed (Results 1 – 17 of 17) sorted by relevance

/linux/tools/testing/selftests/sched_ext/
H A Ddsp_local_on.bpf.c46 if (p->nr_cpus_allowed == nr_cpus && !is_migration_disabled(p)) in BPF_STRUCT_OPS()
/linux/kernel/sched/
H A Ddeadline.c2110 if (!task_current(rq, p) && !p->dl.dl_throttled && p->nr_cpus_allowed > 1) in enqueue_task_dl()
2194 (curr->nr_cpus_allowed < 2 || in select_task_rq_dl()
2196 p->nr_cpus_allowed > 1; in select_task_rq_dl()
2256 if (rq->curr->nr_cpus_allowed == 1 || in check_preempt_equal_dl()
2264 if (p->nr_cpus_allowed != 1 && in check_preempt_equal_dl()
2405 if (on_dl_rq(&p->dl) && p->nr_cpus_allowed > 1) in put_prev_task_dl()
2481 if (task->nr_cpus_allowed == 1) in find_later_rq()
2568 WARN_ON_ONCE(p->nr_cpus_allowed <= 1); in pick_next_pushable_dl_task()
2679 rq->curr->nr_cpus_allowed > 1) { in push_dl_task()
2839 p->nr_cpus_allowed > 1 && in task_woken_dl()
[all …]
H A Drt.c1446 if (!task_current(rq, p) && p->nr_cpus_allowed > 1) in enqueue_task_rt()
1543 (curr->nr_cpus_allowed < 2 || donor->prio <= p->prio); in select_task_rq_rt()
1573 if (rq->curr->nr_cpus_allowed == 1 || in check_preempt_equal_prio()
1581 if (p->nr_cpus_allowed != 1 && in check_preempt_equal_prio()
1728 if (on_rt_rq(&p->rt) && p->nr_cpus_allowed > 1) in put_prev_task_rt()
1769 if (task->nr_cpus_allowed == 1) in find_lowest_rq()
1861 BUG_ON(p->nr_cpus_allowed <= 1); in pick_next_pushable_task()
2346 p->nr_cpus_allowed > 1 && in task_woken_rt()
2348 (rq->curr->nr_cpus_allowed < 2 || in task_woken_rt()
2428 if (p->nr_cpus_allowed > 1 && rq->rt.overloaded) in switched_to_rt()
H A Didle.c394 WARN_ON_ONCE(current->nr_cpus_allowed != 1); in play_idle_precise()
H A Dext_idle.c413 return p->nr_cpus_allowed >= num_possible_cpus(); in task_affinity_all()
928 if (p->nr_cpus_allowed == 1 || is_bpf_migration_disabled(p)) { in select_cpu_from_kfunc()
H A Dsched.h2629 if (p->nr_cpus_allowed == 1) in get_push_task()
3523 if (p->nr_cpus_allowed != 1) in is_per_cpu_kthread()
3628 while ((allowed_max_nr_cid = min_t(int, READ_ONCE(mm->nr_cpus_allowed), in __mm_cid_try_get()
3649 while (cid < READ_ONCE(mm->nr_cpus_allowed) && cid < atomic_read(&mm->mm_users)) { in __mm_cid_try_get()
3664 if (cid < READ_ONCE(mm->nr_cpus_allowed)) in __mm_cid_try_get()
H A Dcore.c2686 p->nr_cpus_allowed = cpumask_weight(ctx->new_mask); in set_cpus_allowed_common()
3571 if (p->nr_cpus_allowed > 1 && !is_migration_disabled(p)) { in select_task_rq()
10629 if (dst_cid_is_set && atomic_read(&mm->mm_users) >= READ_ONCE(mm->nr_cpus_allowed)) in sched_mm_cid_migrate_to()
H A Dfair.c5093 if (!p || (p->nr_cpus_allowed == 1) || in update_misfit_status()
10918 if (p->nr_cpus_allowed != NR_CPUS) { in sched_balance_find_dst_group()
/linux/include/linux/
H A Dmm_types.h1015 unsigned int nr_cpus_allowed; member
1438 mm->nr_cpus_allowed = p->nr_cpus_allowed; in mm_init_cid()
1475 WRITE_ONCE(mm->nr_cpus_allowed, cpumask_weight(mm_allowed)); in mm_set_cpus_allowed()
H A Dsched.h921 int nr_cpus_allowed; member
1798 (current->nr_cpus_allowed == 1); in is_percpu_thread()
/linux/tools/sched_ext/
H A Dscx_central.bpf.c114 if ((p->flags & PF_KTHREAD) && p->nr_cpus_allowed == 1) { in BPF_STRUCT_OPS()
H A Dscx_flatcg.bpf.c362 if (p->nr_cpus_allowed != nr_cpus) { in BPF_STRUCT_OPS()
373 if (p->nr_cpus_allowed == 1 && (p->flags & PF_KTHREAD)) { in BPF_STRUCT_OPS()
H A Dscx_qmap.bpf.c140 if (p->nr_cpus_allowed == 1 || in pick_direct_dispatch_cpu()
/linux/init/
H A Dinit_task.c86 .nr_cpus_allowed= NR_CPUS,
/linux/drivers/infiniband/hw/hfi1/
H A Daffinity.c991 if (current->nr_cpus_allowed == 1) { in hfi1_get_proc_affinity()
1002 } else if (current->nr_cpus_allowed < cpumask_weight(&set->mask)) { in hfi1_get_proc_affinity()
H A Dsdma.c839 if (current->nr_cpus_allowed != 1) in sdma_select_user_engine()
/linux/kernel/trace/
H A Dtrace_osnoise.c2410 if (current->nr_cpus_allowed > 1 || cpu != smp_processor_id()) { in timerlat_fd_open()