/linux/arch/sparc/include/asm/ |
H A D | percpu_64.h | 15 #define __per_cpu_offset(__cpu) \ macro 17 #define per_cpu_offset(x) (__per_cpu_offset(x))
|
/linux/arch/s390/mm/ |
H A D | init.c | 176 unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; variable 177 EXPORT_SYMBOL(__per_cpu_offset); 208 __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu]; in setup_per_cpu_areas()
|
/linux/drivers/base/ |
H A D | arch_numa.c | 144 unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; variable 145 EXPORT_SYMBOL(__per_cpu_offset); 188 __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu]; in setup_per_cpu_areas()
|
/linux/arch/loongarch/kernel/ |
H A D | numa.c | 44 unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; variable 45 EXPORT_SYMBOL(__per_cpu_offset); 97 __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu]; in setup_per_cpu_areas()
|
/linux/arch/x86/kernel/ |
H A D | setup_percpu.c | 32 unsigned long __per_cpu_offset[NR_CPUS] __ro_after_init; variable 33 EXPORT_SYMBOL(__per_cpu_offset);
|
H A D | hw_breakpoint.c | 274 * GSBASE value via __per_cpu_offset or pcpu_unit_offsets. in within_cpu_entry() 277 if (within_area(addr, end, (unsigned long)__per_cpu_offset, in within_cpu_entry()
|
H A D | head_64.S | 314 movq __per_cpu_offset(,%rcx,8), %rdx
|
/linux/arch/powerpc/kernel/ |
H A D | setup_64.c | 831 unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; variable 832 EXPORT_SYMBOL(__per_cpu_offset); 879 __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu]; in setup_per_cpu_areas() 880 paca_ptrs[cpu]->data_offset = __per_cpu_offset[cpu]; in setup_per_cpu_areas()
|
/linux/arch/arm/include/asm/ |
H A D | percpu.h | 43 "2: " LOAD_SYM_ARMV6(%0, __per_cpu_offset) " \n\t" in __my_cpu_offset() 49 "3: .long __per_cpu_offset \n\t" in __my_cpu_offset()
|
H A D | assembler.h | 300 .L1_\@: ldr_va \rd, __per_cpu_offset
|
/linux/arch/mips/mm/ |
H A D | init.c | 490 unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; variable 491 EXPORT_SYMBOL(__per_cpu_offset); 522 __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu]; in setup_per_cpu_areas()
|
/linux/arch/parisc/kernel/ |
H A D | toc_asm.S | 23 load32 PA(__per_cpu_offset),%r1
|
/linux/include/asm-generic/ |
H A D | percpu.h | 28 * Most arches use the __per_cpu_offset array for those offsets but 31 #ifndef __per_cpu_offset 32 extern unsigned long __per_cpu_offset[NR_CPUS]; 34 #define per_cpu_offset(x) (__per_cpu_offset[x])
|
/linux/arch/riscv/include/asm/ |
H A D | asm.h | 96 la \dst, __per_cpu_offset
|
/linux/arch/arm64/include/asm/ |
H A D | percpu.h | 255 #define __per_cpu_offset macro
|
/linux/arch/arm64/kernel/ |
H A D | head.S | 209 adr_l \tmp1, __per_cpu_offset
|
/linux/arch/s390/kernel/ |
H A D | smp.c | 255 lc->percpu_offset = __per_cpu_offset[cpu]; in pcpu_prepare_secondary() 964 lc->percpu_offset = __per_cpu_offset[0]; in smp_prepare_boot_cpu()
|
/linux/kernel/debug/kdb/ |
H A D | kdb_main.c | 2469 /* Most architectures use __per_cpu_offset[cpu], some use in kdb_per_cpu() 2470 * __per_cpu_offset(cpu), smp has no __per_cpu_offset. in kdb_per_cpu() 2472 #ifdef __per_cpu_offset in kdb_per_cpu() 2473 #define KDB_PCU(cpu) __per_cpu_offset(cpu) in kdb_per_cpu() 2476 #define KDB_PCU(cpu) __per_cpu_offset[cpu] in kdb_per_cpu()
|
/linux/mm/ |
H A D | percpu.c | 3284 unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; variable 3285 EXPORT_SYMBOL(__per_cpu_offset); 3304 __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu]; in setup_per_cpu_areas()
|
/linux/arch/riscv/net/ |
H A D | bpf_jit_comp64.c | 1361 /* Load address of __per_cpu_offset array in T2 */ in bpf_jit_emit_insn() 1362 emit_addr(RV_REG_T2, (u64)&__per_cpu_offset, extra_pass, ctx); in bpf_jit_emit_insn() 1363 /* Get address of __per_cpu_offset[cpu] in T1 */ in bpf_jit_emit_insn() 1365 /* Load __per_cpu_offset[cpu] in T1 */ in bpf_jit_emit_insn()
|