/linux/arch/powerpc/kernel/ |
H A D | watchdog.c | 213 if (cpumask_test_cpu(cpu, &wd_smp_cpus_pending)) in watchdog_smp_panic() 218 if (!cpumask_test_cpu(c, &wd_smp_cpus_pending)) in watchdog_smp_panic() 265 if (!cpumask_test_cpu(cpu, &wd_smp_cpus_pending)) { in wd_smp_clear_cpu_pending() 266 if (unlikely(cpumask_test_cpu(cpu, &wd_smp_cpus_stuck))) { in wd_smp_clear_cpu_pending() 378 if (!cpumask_test_cpu(cpu, &wd_cpus_enabled)) in DEFINE_INTERRUPT_HANDLER_NMI() 392 if (cpumask_test_cpu(cpu, &wd_smp_cpus_stuck)) { in DEFINE_INTERRUPT_HANDLER_NMI() 444 if (!cpumask_test_cpu(cpu, &watchdog_cpumask)) in watchdog_timer_fn() 460 if (!cpumask_test_cpu(cpu, &watchdog_cpumask)) in arch_touch_nmi_watchdog() 477 if (cpumask_test_cpu(cpu, &wd_cpus_enabled)) { in start_watchdog() 485 if (!cpumask_test_cpu(cp in start_watchdog() [all...] |
/linux/arch/sparc/kernel/ |
H A D | sun4m_smp.c | 66 while (!cpumask_test_cpu(cpuid, &smp_commenced_mask)) in sun4m_cpu_pre_online() 197 if (cpumask_test_cpu(i, &mask)) { in sun4m_cross_call() 213 if (!cpumask_test_cpu(i, &mask)) in sun4m_cross_call() 221 if (!cpumask_test_cpu(i, &mask)) in sun4m_cross_call()
|
H A D | sun4d_smp.c | 103 while (!cpumask_test_cpu(cpuid, &smp_commenced_mask)) in sun4d_cpu_pre_online() 321 if (cpumask_test_cpu(i, &mask)) { in sun4d_cross_call() 334 if (!cpumask_test_cpu(i, &mask)) in sun4d_cross_call() 342 if (!cpumask_test_cpu(i, &mask)) in sun4d_cross_call()
|
H A D | leon_smp.c | 98 while (!cpumask_test_cpu(cpuid, &smp_commenced_mask)) in leon_cpu_pre_online() 409 if (cpumask_test_cpu(i, &mask)) { in leon_cross_call() 423 if (!cpumask_test_cpu(i, &mask)) in leon_cross_call() 432 if (!cpumask_test_cpu(i, &mask)) in leon_cross_call()
|
/linux/arch/alpha/kernel/ |
H A D | sys_titan.c | 88 if (!cpumask_test_cpu(0, &cpm)) dim0 = &dummy; in titan_update_irq_hw() 89 if (!cpumask_test_cpu(1, &cpm)) dim1 = &dummy; in titan_update_irq_hw() 90 if (!cpumask_test_cpu(2, &cpm)) dim2 = &dummy; in titan_update_irq_hw() 91 if (!cpumask_test_cpu(3, &cpm)) dim3 = &dummy; in titan_update_irq_hw() 141 if (cpumask_test_cpu(cpu, &affinity)) in titan_cpu_set_irq_affinity()
|
/linux/kernel/irq/ |
H A D | cpuhotplug.c | 50 return cpumask_test_cpu(cpu, m); in irq_needs_fixup() 201 return cpumask_test_cpu(cpu, hk_mask); in hk_should_isolate() 210 !irq_data_get_irq_chip(data) || !cpumask_test_cpu(cpu, affinity)) in irq_restore_affinity_of_irq()
|
/linux/kernel/time/ |
H A D | tick-broadcast.c | 284 if (!cpumask_test_cpu(cpu, tick_broadcast_on)) in tick_device_uses_broadcast() 318 ret = cpumask_test_cpu(cpu, tick_broadcast_mask); in tick_device_uses_broadcast() 355 if (cpumask_test_cpu(cpu, mask)) { in tick_do_broadcast() 576 return cpumask_test_cpu(smp_processor_id(), tick_broadcast_mask); in tick_resume_check_broadcast() 631 return cpumask_test_cpu(smp_processor_id(), tick_broadcast_force_mask); in tick_check_broadcast_expired() 672 if (cpumask_test_cpu(smp_processor_id(), tick_broadcast_oneshot_mask)) { in tick_check_oneshot_broadcast_this_cpu() 831 WARN_ON_ONCE(cpumask_test_cpu(cpu, tick_broadcast_pending_mask)); in ___tick_broadcast_oneshot_control() 846 if (cpumask_test_cpu(cpu, tick_broadcast_force_mask)) { in ___tick_broadcast_oneshot_control()
|
/linux/lib/ |
H A D | nmi_backtrace.c | 61 if (cpumask_test_cpu(this_cpu, to_cpumask(backtrace_mask))) in nmi_trigger_cpumask_backtrace() 99 if (cpumask_test_cpu(cpu, to_cpumask(backtrace_mask))) { in nmi_cpu_backtrace()
|
/linux/arch/powerpc/mm/ |
H A D | mmu_context.c | 50 if (!cpumask_test_cpu(cpu, mm_cpumask(next))) { in switch_mm_irqs_off() 106 VM_WARN_ON_ONCE(!cpumask_test_cpu(cpu, mm_cpumask(prev))); in switch_mm_irqs_off()
|
/linux/drivers/perf/ |
H A D | arm_pmu.c | 343 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_add() 520 !cpumask_test_cpu(event->cpu, &armpmu->supported_cpus)) in armpmu_event_init() 536 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_enable() 548 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in armpmu_disable() 562 return !cpumask_test_cpu(cpu, &armpmu->supported_cpus); in armpmu_filter() 715 if (!cpumask_test_cpu(cpu, &pmu->supported_cpus)) in arm_perf_starting_cpu() 734 if (!cpumask_test_cpu(cpu, &pmu->supported_cpus)) in arm_perf_teardown_cpu() 785 if (!cpumask_test_cpu(smp_processor_id(), &armpmu->supported_cpus)) in cpu_pm_pmu_notify()
|
H A D | arm_dsu_pmu.c | 237 if (WARN_ON(!cpumask_test_cpu(smp_processor_id(), in dsu_pmu_read_counter() 263 if (WARN_ON(!cpumask_test_cpu(smp_processor_id(), in dsu_pmu_write_counter() 429 if (WARN_ON_ONCE(!cpumask_test_cpu(smp_processor_id(), in dsu_pmu_add() 549 if (!cpumask_test_cpu(event->cpu, &dsu_pmu->associated_cpus)) { in dsu_pmu_event_init() 798 if (!cpumask_test_cpu(cpu, &dsu_pmu->associated_cpus)) in dsu_pmu_cpu_online()
|
/linux/arch/arm/mach-tegra/ |
H A D | platsmp.c | 93 if (cpumask_test_cpu(cpu, &tegra_cpu_init_mask)) { in tegra30_boot_secondary() 135 if (cpumask_test_cpu(cpu, &tegra_cpu_init_mask)) { in tegra114_boot_secondary()
|
/linux/arch/arm/include/asm/ |
H A D | cacheflush.h | 220 if (cpumask_test_cpu(smp_processor_id(), mm_cpumask(mm))) in vivt_flush_cache_mm() 229 if (!mm || cpumask_test_cpu(smp_processor_id(), mm_cpumask(mm))) in vivt_flush_cache_range() 239 if (!mm || cpumask_test_cpu(smp_processor_id(), mm_cpumask(mm))) { in vivt_flush_cache_pages()
|
/linux/include/linux/ |
H A D | cpumask.h | 637 * cpumask_test_cpu - test for a cpu in a cpumask 644 bool cpumask_test_cpu(int cpu, const struct cpumask *cpumask) in cpumask_test_cpu() function 1205 return cpumask_test_cpu(cpu, cpu_online_mask); in cpu_online() 1210 return cpumask_test_cpu(cpu, cpu_enabled_mask); in cpu_enabled() 1215 return cpumask_test_cpu(cpu, cpu_possible_mask); in cpu_possible() 1220 return cpumask_test_cpu(cpu, cpu_present_mask); in cpu_present() 1225 return cpumask_test_cpu(cpu, cpu_active_mask); in cpu_active() 1230 return cpumask_test_cpu(cpu, cpu_dying_mask); in cpu_dying()
|
H A D | mmu_context.h | 29 # define task_cpu_possible(cpu, p) cpumask_test_cpu((cpu), task_cpu_possible_mask(p))
|
/linux/arch/arm64/kernel/ |
H A D | topology.c | 168 if ((cpu >= nr_cpu_ids) || !cpumask_test_cpu(cpu, cpu_present_mask)) in freq_counters_valid() 267 cpumask_test_cpu(cpu, amu_fie_cpus); in amu_fie_cpu_supported() 476 if ((cpu >= nr_cpu_ids) || !cpumask_test_cpu(cpu, cpu_present_mask)) in cpc_ffh_supported()
|
/linux/drivers/sh/intc/ |
H A D | chip.c | 25 if (!cpumask_test_cpu(cpu, irq_data_get_affinity_mask(data))) in _intc_enable() 53 if (!cpumask_test_cpu(cpu, irq_data_get_affinity_mask(data))) in intc_disable()
|
/linux/drivers/cpuidle/ |
H A D | coupled.c | 429 if (!cpumask_test_cpu(cpu, &cpuidle_coupled_poke_pending)) in cpuidle_coupled_clear_pokes() 433 while (cpumask_test_cpu(cpu, &cpuidle_coupled_poke_pending)) in cpuidle_coupled_clear_pokes() 515 !cpumask_test_cpu(dev->cpu, &cpuidle_coupled_poked)) { in cpuidle_enter_state_coupled()
|
/linux/kernel/sched/ |
H A D | ext_idle.c | 98 * NOTE: Use cpumask_intersects() and cpumask_test_cpu() to in scx_idle_test_and_clear_cpu() 104 else if (cpumask_test_cpu(cpu, idle_smts)) in scx_idle_test_and_clear_cpu() 466 is_prev_allowed = cpumask_test_cpu(prev_cpu, allowed); in scx_select_cpu_dfl() 551 if (cpumask_test_cpu(cpu, allowed)) in scx_select_cpu_dfl() 565 cpumask_test_cpu(prev_cpu, idle_cpumask(node)->smt) && in scx_select_cpu_dfl() 902 if (cpumask_test_cpu(prev_cpu, allowed ?: p->cpus_ptr) && in select_cpu_from_kfunc()
|
H A D | cpufreq.c | 72 return cpumask_test_cpu(smp_processor_id(), policy->cpus) || in cpufreq_this_cpu_can_update()
|
/linux/arch/arm/mach-omap2/ |
H A D | cpuidle44xx.c | 133 if (dev->cpu == 0 && cpumask_test_cpu(1, cpu_online_mask)) { in omap_enter_idle_coupled() 191 if (dev->cpu == 0 && cpumask_test_cpu(1, cpu_online_mask)) { in omap_enter_idle_coupled()
|
/linux/drivers/hwtracing/coresight/ |
H A D | coresight-trbe.c | 1110 if (!cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in is_perf_trbe() 1388 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_probe_coresight() 1390 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_probe_coresight() 1419 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) { in arm_trbe_cpu_startup() 1427 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_cpu_startup() 1429 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_cpu_startup() 1442 if (cpumask_test_cpu(cpu, &drvdata->supported_cpus)) in arm_trbe_cpu_teardown()
|
/linux/kernel/trace/ |
H A D | ring_buffer.c | 968 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_wait() 1018 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_poll_wait() 2937 !cpumask_test_cpu(cpu_id, buffer->cpumask)) in ring_buffer_resize() 4675 if (unlikely(!cpumask_test_cpu(cpu, buffer->cpumask))) in ring_buffer_lock_reserve() 4822 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_write() 5007 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_record_disable_cpu() 5027 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_record_enable_cpu() 5047 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_oldest_event_ts() 5078 if (!cpumask_test_cpu(cpu, buffer->cpumask)) in ring_buffer_bytes_cpu() 5097 if (!cpumask_test_cpu(cp in ring_buffer_entries_cpu() [all...] |
/linux/drivers/virt/nitro_enclaves/ |
H A D | ne_misc_dev.c | 253 if (cpumask_test_cpu(0, cpu_pool)) { in ne_setup_cpu_pool() 262 if (cpumask_test_cpu(cpu_sibling, cpu_pool)) { in ne_setup_cpu_pool() 279 if (!cpumask_test_cpu(cpu_sibling, cpu_pool)) { in ne_setup_cpu_pool() 482 if (cpumask_test_cpu(cpu, ne_enclave->vcpu_ids)) in ne_donated_cpu() 632 if (cpumask_test_cpu(vcpu_id, ne_cpu_pool.avail_threads_per_core[i])) { in ne_get_vcpu_core_from_cpu_pool() 670 if (cpumask_test_cpu(vcpu_id, ne_enclave->threads_per_core[i])) in ne_check_cpu_in_cpu_pool() 1106 if (!cpumask_test_cpu(cpu, ne_enclave->vcpu_ids)) { in ne_start_enclave_ioctl()
|
/linux/arch/mips/kernel/ |
H A D | crash.c | 43 if (!cpumask_test_cpu(cpu, &cpus_in_crash)) in crash_shutdown_secondary()
|