for_each_cpu_mask_nr 205 arch/x86/kernel/cpu/cpufreq/acpi-cpufreq.c for_each_cpu_mask_nr(i, cmd->mask) { for_each_cpu_mask_nr 455 arch/x86/kernel/cpu/cpufreq/acpi-cpufreq.c for_each_cpu_mask_nr(i, cmd.mask) { for_each_cpu_mask_nr 470 arch/x86/kernel/cpu/cpufreq/acpi-cpufreq.c for_each_cpu_mask_nr(i, cmd.mask) { for_each_cpu_mask_nr 125 arch/x86/kernel/cpu/cpufreq/p4-clockmod.c for_each_cpu_mask_nr(i, policy->cpus) { for_each_cpu_mask_nr 133 arch/x86/kernel/cpu/cpufreq/p4-clockmod.c for_each_cpu_mask_nr(i, policy->cpus) for_each_cpu_mask_nr 137 arch/x86/kernel/cpu/cpufreq/p4-clockmod.c for_each_cpu_mask_nr(i, policy->cpus) { for_each_cpu_mask_nr 966 arch/x86/kernel/cpu/cpufreq/powernow-k8.c for_each_cpu_mask_nr(i, *(data->available_cores)) { for_each_cpu_mask_nr 974 arch/x86/kernel/cpu/cpufreq/powernow-k8.c for_each_cpu_mask_nr(i, *(data->available_cores)) { for_each_cpu_mask_nr 997 arch/x86/kernel/cpu/cpufreq/powernow-k8.c for_each_cpu_mask_nr(i, *(data->available_cores)) { for_each_cpu_mask_nr 1005 arch/x86/kernel/cpu/cpufreq/powernow-k8.c for_each_cpu_mask_nr(i, *(data->available_cores)) { for_each_cpu_mask_nr 510 arch/x86/kernel/cpu/cpufreq/speedstep-centrino.c for_each_cpu_mask_nr(j, *online_policy_cpus) { for_each_cpu_mask_nr 551 arch/x86/kernel/cpu/cpufreq/speedstep-centrino.c for_each_cpu_mask_nr(k, *online_policy_cpus) { for_each_cpu_mask_nr 574 arch/x86/kernel/cpu/cpufreq/speedstep-centrino.c for_each_cpu_mask_nr(k, *online_policy_cpus) { for_each_cpu_mask_nr 588 arch/x86/kernel/cpu/cpufreq/speedstep-centrino.c for_each_cpu_mask_nr(j, *covered_cpus) { for_each_cpu_mask_nr 597 arch/x86/kernel/cpu/cpufreq/speedstep-centrino.c for_each_cpu_mask_nr(j, *online_policy_cpus) { for_each_cpu_mask_nr 282 arch/x86/kernel/cpu/cpufreq/speedstep-ich.c for_each_cpu_mask_nr(i, policy->cpus) { for_each_cpu_mask_nr 295 arch/x86/kernel/cpu/cpufreq/speedstep-ich.c for_each_cpu_mask_nr(i, policy->cpus) { for_each_cpu_mask_nr 516 arch/x86/kernel/cpu/intel_cacheinfo.c for_each_cpu_mask_nr(sibling, this_leaf->shared_cpu_map) { for_each_cpu_mask_nr 530 arch/x86/kernel/cpu/mcheck/mce_amd_64.c for_each_cpu_mask_nr(i, b->cpus) { for_each_cpu_mask_nr 620 arch/x86/kernel/cpu/mcheck/mce_amd_64.c for_each_cpu_mask_nr(i, b->cpus) { for_each_cpu_mask_nr 821 arch/x86/kernel/io_apic_64.c for_each_cpu_mask_nr(cpu, mask) { for_each_cpu_mask_nr 842 arch/x86/kernel/io_apic_64.c for_each_cpu_mask_nr(new_cpu, new_mask) for_each_cpu_mask_nr 852 arch/x86/kernel/io_apic_64.c for_each_cpu_mask_nr(new_cpu, new_mask) for_each_cpu_mask_nr 884 arch/x86/kernel/io_apic_64.c for_each_cpu_mask_nr(cpu, mask) for_each_cpu_mask_nr 451 arch/x86/kernel/smpboot.c for_each_cpu_mask_nr(i, cpu_sibling_setup_map) { for_each_cpu_mask_nr 474 arch/x86/kernel/smpboot.c for_each_cpu_mask_nr(i, cpu_sibling_setup_map) { for_each_cpu_mask_nr 1309 arch/x86/kernel/smpboot.c for_each_cpu_mask_nr(sibling, per_cpu(cpu_core_map, cpu)) { for_each_cpu_mask_nr 1318 arch/x86/kernel/smpboot.c for_each_cpu_mask_nr(sibling, per_cpu(cpu_sibling_map, cpu)) for_each_cpu_mask_nr 417 arch/x86/xen/smp.c for_each_cpu_mask_nr(cpu, mask) for_each_cpu_mask_nr 428 arch/x86/xen/smp.c for_each_cpu_mask_nr(cpu, mask) { for_each_cpu_mask_nr 131 include/asm-x86/ipi.h for_each_cpu_mask_nr(query_cpu, mask) { for_each_cpu_mask_nr 526 include/linux/cpumask.h #define for_each_possible_cpu(cpu) for_each_cpu_mask_nr((cpu), cpu_possible_map) for_each_cpu_mask_nr 527 include/linux/cpumask.h #define for_each_online_cpu(cpu) for_each_cpu_mask_nr((cpu), cpu_online_map) for_each_cpu_mask_nr 528 include/linux/cpumask.h #define for_each_present_cpu(cpu) for_each_cpu_mask_nr((cpu), cpu_present_map) for_each_cpu_mask_nr 443 kernel/cpu.c for_each_cpu_mask_nr(cpu, frozen_cpus) { for_each_cpu_mask_nr 115 kernel/rcuclassic.c for_each_cpu_mask_nr(cpu, cpumask) for_each_cpu_mask_nr 751 kernel/rcupreempt.c for_each_cpu_mask_nr(cpu, rcu_cpu_online_map) { for_each_cpu_mask_nr 769 kernel/rcupreempt.c for_each_cpu_mask_nr(cpu, rcu_cpu_online_map) for_each_cpu_mask_nr 801 kernel/rcupreempt.c for_each_cpu_mask_nr(cpu, rcu_cpu_online_map) for_each_cpu_mask_nr 816 kernel/rcupreempt.c for_each_cpu_mask_nr(cpu, rcu_cpu_online_map) { for_each_cpu_mask_nr 836 kernel/rcupreempt.c for_each_cpu_mask_nr(cpu, rcu_cpu_online_map) for_each_cpu_mask_nr 2071 kernel/sched.c for_each_cpu_mask_nr(i, group->cpumask) { for_each_cpu_mask_nr 2113 kernel/sched.c for_each_cpu_mask_nr(i, *tmp) { for_each_cpu_mask_nr 3138 kernel/sched.c for_each_cpu_mask_nr(i, group->cpumask) { for_each_cpu_mask_nr 3417 kernel/sched.c for_each_cpu_mask_nr(i, group->cpumask) { for_each_cpu_mask_nr 3959 kernel/sched.c for_each_cpu_mask_nr(balance_cpu, cpus) { for_each_cpu_mask_nr 6940 kernel/sched.c for_each_cpu_mask_nr(i, *span) { for_each_cpu_mask_nr 6951 kernel/sched.c for_each_cpu_mask_nr(j, *span) { for_each_cpu_mask_nr 7151 kernel/sched.c for_each_cpu_mask_nr(j, sg->cpumask) { for_each_cpu_mask_nr 7176 kernel/sched.c for_each_cpu_mask_nr(cpu, *cpu_map) { for_each_cpu_mask_nr 7423 kernel/sched.c for_each_cpu_mask_nr(i, *cpu_map) { for_each_cpu_mask_nr 7490 kernel/sched.c for_each_cpu_mask_nr(i, *cpu_map) { for_each_cpu_mask_nr 7507 kernel/sched.c for_each_cpu_mask_nr(i, *cpu_map) { for_each_cpu_mask_nr 7574 kernel/sched.c for_each_cpu_mask_nr(j, *nodemask) { for_each_cpu_mask_nr 7620 kernel/sched.c for_each_cpu_mask_nr(i, *cpu_map) { for_each_cpu_mask_nr 7627 kernel/sched.c for_each_cpu_mask_nr(i, *cpu_map) { for_each_cpu_mask_nr 7634 kernel/sched.c for_each_cpu_mask_nr(i, *cpu_map) { for_each_cpu_mask_nr 7654 kernel/sched.c for_each_cpu_mask_nr(i, *cpu_map) { for_each_cpu_mask_nr 7737 kernel/sched.c for_each_cpu_mask_nr(i, *cpu_map) for_each_cpu_mask_nr 980 kernel/sched_fair.c for_each_cpu_mask_nr(i, tmp) { for_each_cpu_mask_nr 248 kernel/sched_rt.c for_each_cpu_mask_nr(i, rd->span) { for_each_cpu_mask_nr 1180 kernel/sched_rt.c for_each_cpu_mask_nr(cpu, this_rq->rd->rto_mask) { for_each_cpu_mask_nr 304 kernel/taskstats.c for_each_cpu_mask_nr(cpu, mask) { for_each_cpu_mask_nr 323 kernel/taskstats.c for_each_cpu_mask_nr(cpu, mask) { for_each_cpu_mask_nr 403 kernel/time/tick-broadcast.c for_each_cpu_mask_nr(cpu, tick_broadcast_oneshot_mask) { for_each_cpu_mask_nr 500 kernel/time/tick-broadcast.c for_each_cpu_mask_nr(cpu, *mask) { for_each_cpu_mask_nr 418 kernel/workqueue.c for_each_cpu_mask_nr(cpu, *cpu_map) for_each_cpu_mask_nr 549 kernel/workqueue.c for_each_cpu_mask_nr(cpu, *cpu_map) for_each_cpu_mask_nr 909 kernel/workqueue.c for_each_cpu_mask_nr(cpu, *cpu_map) for_each_cpu_mask_nr 37 mm/allocpercpu.c for_each_cpu_mask_nr(cpu, *mask) for_each_cpu_mask_nr 89 mm/allocpercpu.c for_each_cpu_mask_nr(cpu, *mask) for_each_cpu_mask_nr 30 mm/vmstat.c for_each_cpu_mask_nr(cpu, *cpumask) { for_each_cpu_mask_nr 2444 net/core/dev.c for_each_cpu_mask_nr(chan_idx, net_dma.channel_mask) { for_each_cpu_mask_nr 4608 net/core/dev.c for_each_cpu_mask_nr(chan_idx, net_dma->channel_mask) { for_each_cpu_mask_nr 500 net/iucv/iucv.c for_each_cpu_mask_nr(cpu, cpumask)