per_cpu_offset 176 arch/x86/kernel/setup_percpu.c per_cpu_offset(cpu) = ptr - __per_cpu_start;
per_cpu_offset 233 arch/x86/kernel/setup_percpu.c else if (per_cpu_offset(cpu))
per_cpu_offset 316 arch/x86/kernel/setup_percpu.c if (!per_cpu_offset(cpu)) {
per_cpu_offset 34 include/asm-generic/percpu.h #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
per_cpu_offset 37 include/asm-generic/percpu.h #define my_cpu_offset per_cpu_offset(smp_processor_id())
per_cpu_offset 57 include/asm-generic/percpu.h (*SHIFT_PERCPU_PTR(&per_cpu_var(var), per_cpu_offset(cpu)))
per_cpu_offset 641 kernel/lockdep.c start = (unsigned long) &__per_cpu_start + per_cpu_offset(i);
per_cpu_offset 643 kernel/lockdep.c + per_cpu_offset(i);
per_cpu_offset 467 kernel/module.c memcpy(pcpudest + per_cpu_offset(cpu), from, size);