cpu_to_node 896 arch/x86/kernel/cpu/common.c if (pda->nodenumber == 0 && cpu_to_node(cpu) != NUMA_NO_NODE) cpu_to_node 897 arch/x86/kernel/cpu/common.c pda->nodenumber = cpu_to_node(cpu); cpu_to_node 693 arch/x86/kernel/cpu/intel_cacheinfo.c int node = cpu_to_node(first_cpu(this_leaf->shared_cpu_map)); cpu_to_node 727 arch/x86/kernel/cpu/intel_cacheinfo.c int node = cpu_to_node(first_cpu(this_leaf->shared_cpu_map)); cpu_to_node 418 arch/x86/kernel/genx2apic_uv_x.c nid = cpu_to_node(cpu); cpu_to_node 254 arch/x86/kernel/setup_percpu.c cpu_clear(cpu, node_to_cpumask_map[cpu_to_node(cpu)]); cpu_to_node 264 arch/x86/kernel/setup_percpu.c int node = cpu_to_node(cpu); cpu_to_node 776 arch/x86/kernel/smpboot.c int node = cpu_to_node(cpu); cpu_to_node 256 arch/x86/kernel/vsyscall_64.c node = cpu_to_node(cpu); cpu_to_node 1207 arch/x86/kvm/vmx.c int node = cpu_to_node(cpu); cpu_to_node 34 include/asm-generic/topology.h #ifndef cpu_to_node cpu_to_node 58 include/asm-x86/topology.h #define early_cpu_to_node(cpu) cpu_to_node(cpu) cpu_to_node 83 include/asm-x86/topology.h extern int cpu_to_node(int cpu); cpu_to_node 686 include/linux/mmzone.h #define numa_node_id() (cpu_to_node(raw_smp_processor_id())) cpu_to_node 342 kernel/profile.c node = cpu_to_node(cpu); cpu_to_node 535 kernel/profile.c int node = cpu_to_node(cpu); cpu_to_node 6124 kernel/sched.c mask = node_to_cpumask(cpu_to_node(dead_cpu)); cpu_to_node 7134 kernel/sched.c *nodemask = node_to_cpumask(cpu_to_node(cpu)); cpu_to_node 7427 kernel/sched.c *nodemask = node_to_cpumask(cpu_to_node(i)); cpu_to_node 7446 kernel/sched.c sched_domain_node_span(cpu_to_node(i), &sd->span); cpu_to_node 8490 kernel/sched.c GFP_KERNEL|__GFP_ZERO, cpu_to_node(i)); cpu_to_node 8495 kernel/sched.c GFP_KERNEL|__GFP_ZERO, cpu_to_node(i)); cpu_to_node 8579 kernel/sched.c GFP_KERNEL|__GFP_ZERO, cpu_to_node(i)); cpu_to_node 8584 kernel/sched.c GFP_KERNEL|__GFP_ZERO, cpu_to_node(i)); cpu_to_node 1743 kernel/sys.c err |= put_user(cpu_to_node(cpu), nodep); cpu_to_node 306 kernel/taskstats.c cpu_to_node(cpu)); cpu_to_node 1375 kernel/timer.c cpu_to_node(cpu)); cpu_to_node 58 mm/allocpercpu.c int node = cpu_to_node(cpu); cpu_to_node 2724 mm/page_alloc.c int node = cpu_to_node(cpu); cpu_to_node 904 mm/slab.c node = next_node(cpu_to_node(cpu), node_online_map); cpu_to_node 1161 mm/slab.c int node = cpu_to_node(cpu); cpu_to_node 1226 mm/slab.c int node = cpu_to_node(cpu); cpu_to_node 3922 mm/slab.c new->new[i] = alloc_arraycache(cpu_to_node(i), limit, cpu_to_node 3944 mm/slab.c spin_lock_irq(&cachep->nodelists[cpu_to_node(i)]->list_lock); cpu_to_node 3945 mm/slab.c free_block(cachep, ccold->entry, ccold->avail, cpu_to_node(i)); cpu_to_node 3946 mm/slab.c spin_unlock_irq(&cachep->nodelists[cpu_to_node(i)]->list_lock); cpu_to_node 1976 mm/slub.c flags, cpu_to_node(cpu)); cpu_to_node 565 net/iucv/iucv.c GFP_KERNEL|GFP_DMA, cpu_to_node(cpu)); cpu_to_node 569 net/iucv/iucv.c GFP_KERNEL|GFP_DMA, cpu_to_node(cpu)); cpu_to_node 1621 net/iucv/iucv.c GFP_KERNEL|GFP_DMA, cpu_to_node(cpu)); cpu_to_node 1629 net/iucv/iucv.c GFP_KERNEL|GFP_DMA, cpu_to_node(cpu)); cpu_to_node 591 net/netfilter/x_tables.c cpu_to_node(cpu)); cpu_to_node 594 net/netfilter/x_tables.c cpu_to_node(cpu)); cpu_to_node 349 net/sunrpc/svc.c pidx = m->to_pool[cpu_to_node(cpu)];