1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */ 21965aae3SH. Peter Anvin #ifndef _ASM_X86_PERCPU_H 31965aae3SH. Peter Anvin #define _ASM_X86_PERCPU_H 43334052aStravis@sgi.com 51a51e3a0STejun Heo #ifdef CONFIG_X86_64 69939ddafSTejun Heo # define __percpu_seg gs 759bec00aSUros Bizjak # define __percpu_rel (%rip) 81a51e3a0STejun Heo #else 99939ddafSTejun Heo # define __percpu_seg fs 1059bec00aSUros Bizjak # define __percpu_rel 111a51e3a0STejun Heo #endif 123334052aStravis@sgi.com 1324a295e4SThomas Huth #ifdef __ASSEMBLER__ 143334052aStravis@sgi.com 153334052aStravis@sgi.com #ifdef CONFIG_SMP 1659bec00aSUros Bizjak # define __percpu %__percpu_seg: 1759bec00aSUros Bizjak #else 1859bec00aSUros Bizjak # define __percpu 1959bec00aSUros Bizjak #endif 2059bec00aSUros Bizjak 2159bec00aSUros Bizjak #define PER_CPU_VAR(var) __percpu(var)__percpu_rel 223334052aStravis@sgi.com 2361d73e4fSIngo Molnar #else /* !__ASSEMBLY__: */ 243334052aStravis@sgi.com 2579165720SUros Bizjak #include <linux/args.h> 260e370363SUros Bizjak #include <linux/build_bug.h> 279939ddafSTejun Heo #include <linux/stringify.h> 2830094208SKent Overstreet #include <asm/asm.h> 299939ddafSTejun Heo 303334052aStravis@sgi.com #ifdef CONFIG_SMP 319a462b9eSNadav Amit 32*d51faee4SUros Bizjak #define __force_percpu_prefix "%%"__stringify(__percpu_seg)":" 33*d51faee4SUros Bizjak 349a462b9eSNadav Amit #ifdef CONFIG_CC_HAS_NAMED_AS 359a462b9eSNadav Amit 363a1d3829SUros Bizjak #ifdef __CHECKER__ 373a1d3829SUros Bizjak # define __seg_gs __attribute__((address_space(__seg_gs))) 383a1d3829SUros Bizjak # define __seg_fs __attribute__((address_space(__seg_fs))) 393a1d3829SUros Bizjak #endif 403a1d3829SUros Bizjak 41*d51faee4SUros Bizjak #define __percpu_prefix 4279165720SUros Bizjak #define __percpu_seg_override CONCATENATE(__seg_, __percpu_seg) 439a462b9eSNadav Amit 4461d73e4fSIngo Molnar #else /* !CONFIG_CC_HAS_NAMED_AS: */ 459a462b9eSNadav Amit 46*d51faee4SUros Bizjak #define __percpu_prefix __force_percpu_prefix 479a462b9eSNadav Amit #define __percpu_seg_override 489a462b9eSNadav Amit 499a462b9eSNadav Amit #endif /* CONFIG_CC_HAS_NAMED_AS */ 509a462b9eSNadav Amit 51db7829c6SBrian Gerst /* 52db7829c6SBrian Gerst * Compared to the generic __my_cpu_offset version, the following 53db7829c6SBrian Gerst * saves one instruction and avoids clobbering a temp register. 54*d51faee4SUros Bizjak */ 55*d51faee4SUros Bizjak #define __my_cpu_offset this_cpu_read(this_cpu_off) 56*d51faee4SUros Bizjak 57*d51faee4SUros Bizjak /* 584e5b0e80SUros Bizjak * arch_raw_cpu_ptr should not be used in 32-bit VDSO for a 64-bit 594e5b0e80SUros Bizjak * kernel, because games are played with CONFIG_X86_64 there and 604e5b0e80SUros Bizjak * sizeof(this_cpu_off) becames 4. 61db7829c6SBrian Gerst */ 624e5b0e80SUros Bizjak #ifndef BUILD_VDSO32_64 634e5b0e80SUros Bizjak #define arch_raw_cpu_ptr(_ptr) \ 64db7829c6SBrian Gerst ({ \ 6593cfa544SUros Bizjak unsigned long tcp_ptr__ = raw_cpu_read_long(this_cpu_off); \ 669130ea06SIngo Molnar \ 67a55c1fdaSUros Bizjak tcp_ptr__ += (__force unsigned long)(_ptr); \ 686a39fe05SUros Bizjak (TYPEOF_UNQUAL(*(_ptr)) __force __kernel *)tcp_ptr__; \ 69db7829c6SBrian Gerst }) 704e5b0e80SUros Bizjak #else 716a39fe05SUros Bizjak #define arch_raw_cpu_ptr(_ptr) \ 726a39fe05SUros Bizjak ({ \ 736a39fe05SUros Bizjak BUILD_BUG(); \ 746a39fe05SUros Bizjak (TYPEOF_UNQUAL(*(_ptr)) __force __kernel *)0; \ 756a39fe05SUros Bizjak }) 764e5b0e80SUros Bizjak #endif 771d10f3aeSUros Bizjak 7843bda69eSUros Bizjak #define PER_CPU_VAR(var) %__percpu_seg:(var)__percpu_rel 7943bda69eSUros Bizjak 8061d73e4fSIngo Molnar #else /* !CONFIG_SMP: */ 8161d73e4fSIngo Molnar 82*d51faee4SUros Bizjak #define __force_percpu_prefix 83*d51faee4SUros Bizjak #define __percpu_prefix 849a462b9eSNadav Amit #define __percpu_seg_override 8543bda69eSUros Bizjak 8643bda69eSUros Bizjak #define PER_CPU_VAR(var) (var)__percpu_rel 8743bda69eSUros Bizjak 889a462b9eSNadav Amit #endif /* CONFIG_SMP */ 893334052aStravis@sgi.com 906a367577SUros Bizjak #if defined(CONFIG_USE_X86_SEG_SUPPORT) && defined(USE_TYPEOF_UNQUAL) 916a367577SUros Bizjak # define __my_cpu_type(var) typeof(var) 926a367577SUros Bizjak # define __my_cpu_ptr(ptr) (ptr) 936a367577SUros Bizjak # define __my_cpu_var(var) (var) 946a367577SUros Bizjak 956a367577SUros Bizjak # define __percpu_qual __percpu_seg_override 966a367577SUros Bizjak #else 979a462b9eSNadav Amit # define __my_cpu_type(var) typeof(var) __percpu_seg_override 98a3f8a3a2SUros Bizjak # define __my_cpu_ptr(ptr) (__my_cpu_type(*(ptr))*)(__force uintptr_t)(ptr) 99a55c1fdaSUros Bizjak # define __my_cpu_var(var) (*__my_cpu_ptr(&(var))) 1006a367577SUros Bizjak #endif 1016a367577SUros Bizjak 1029a462b9eSNadav Amit #define __force_percpu_arg(x) __force_percpu_prefix "%" #x 103*d51faee4SUros Bizjak #define __percpu_arg(x) __percpu_prefix "%" #x 104d7c3f8ceSChristoph Lameter 1052add8e23SBrian Gerst /* 10661d73e4fSIngo Molnar * For arch-specific code, we can use direct single-insn ops (they 10761d73e4fSIngo Molnar * don't give an lvalue though). 10861d73e4fSIngo Molnar */ 1093334052aStravis@sgi.com 1106865dc3aSBrian Gerst #define __pcpu_type_1 u8 1116865dc3aSBrian Gerst #define __pcpu_type_2 u16 1126865dc3aSBrian Gerst #define __pcpu_type_4 u32 1136865dc3aSBrian Gerst #define __pcpu_type_8 u64 1146865dc3aSBrian Gerst 1156865dc3aSBrian Gerst #define __pcpu_cast_1(val) ((u8)(((unsigned long) val) & 0xff)) 1166865dc3aSBrian Gerst #define __pcpu_cast_2(val) ((u16)(((unsigned long) val) & 0xffff)) 1176865dc3aSBrian Gerst #define __pcpu_cast_4(val) ((u32)(((unsigned long) val) & 0xffffffff)) 1186865dc3aSBrian Gerst #define __pcpu_cast_8(val) ((u64)(val)) 1196865dc3aSBrian Gerst 120d40459ccSUros Bizjak #define __pcpu_op_1(op) op "b " 121d40459ccSUros Bizjak #define __pcpu_op_2(op) op "w " 122d40459ccSUros Bizjak #define __pcpu_op_4(op) op "l " 123d40459ccSUros Bizjak #define __pcpu_op_8(op) op "q " 1246865dc3aSBrian Gerst 1256865dc3aSBrian Gerst #define __pcpu_reg_1(mod, x) mod "q" (x) 1266865dc3aSBrian Gerst #define __pcpu_reg_2(mod, x) mod "r" (x) 1276865dc3aSBrian Gerst #define __pcpu_reg_4(mod, x) mod "r" (x) 1286865dc3aSBrian Gerst #define __pcpu_reg_8(mod, x) mod "r" (x) 1296865dc3aSBrian Gerst 1306865dc3aSBrian Gerst #define __pcpu_reg_imm_1(x) "qi" (x) 1316865dc3aSBrian Gerst #define __pcpu_reg_imm_2(x) "ri" (x) 1326865dc3aSBrian Gerst #define __pcpu_reg_imm_4(x) "ri" (x) 1336865dc3aSBrian Gerst #define __pcpu_reg_imm_8(x) "re" (x) 1346865dc3aSBrian Gerst 13508d564adSUros Bizjak #ifdef CONFIG_USE_X86_SEG_SUPPORT 13608d564adSUros Bizjak 137a50ea641SUros Bizjak #define __raw_cpu_read(size, qual, pcp) \ 13808d564adSUros Bizjak ({ \ 13908d564adSUros Bizjak *(qual __my_cpu_type(pcp) *)__my_cpu_ptr(&(pcp)); \ 14008d564adSUros Bizjak }) 14108d564adSUros Bizjak 142a50ea641SUros Bizjak #define __raw_cpu_write(size, qual, pcp, val) \ 14308d564adSUros Bizjak do { \ 14408d564adSUros Bizjak *(qual __my_cpu_type(pcp) *)__my_cpu_ptr(&(pcp)) = (val); \ 14508d564adSUros Bizjak } while (0) 14608d564adSUros Bizjak 147539615deSUros Bizjak #define __raw_cpu_read_const(pcp) __raw_cpu_read(, , pcp) 148539615deSUros Bizjak 14961d73e4fSIngo Molnar #else /* !CONFIG_USE_X86_SEG_SUPPORT: */ 15008d564adSUros Bizjak 151a50ea641SUros Bizjak #define __raw_cpu_read(size, qual, _var) \ 15208d564adSUros Bizjak ({ \ 15308d564adSUros Bizjak __pcpu_type_##size pfo_val__; \ 1549130ea06SIngo Molnar \ 155d40459ccSUros Bizjak asm qual (__pcpu_op_##size("mov") \ 156d40459ccSUros Bizjak __percpu_arg([var]) ", %[val]" \ 15708d564adSUros Bizjak : [val] __pcpu_reg_##size("=", pfo_val__) \ 15808d564adSUros Bizjak : [var] "m" (__my_cpu_var(_var))); \ 1599130ea06SIngo Molnar \ 16008d564adSUros Bizjak (typeof(_var))(unsigned long) pfo_val__; \ 16108d564adSUros Bizjak }) 16208d564adSUros Bizjak 163a50ea641SUros Bizjak #define __raw_cpu_write(size, qual, _var, _val) \ 1643334052aStravis@sgi.com do { \ 165c175acc1SBrian Gerst __pcpu_type_##size pto_val__ = __pcpu_cast_##size(_val); \ 1669130ea06SIngo Molnar \ 167bc9e3be2SJoe Perches if (0) { \ 1688a3c3923SUros Bizjak TYPEOF_UNQUAL(_var) pto_tmp__; \ 169c175acc1SBrian Gerst pto_tmp__ = (_val); \ 17023b764d0SAndi Kleen (void)pto_tmp__; \ 171bc9e3be2SJoe Perches } \ 172d40459ccSUros Bizjak asm qual (__pcpu_op_##size("mov") "%[val], " \ 173d40459ccSUros Bizjak __percpu_arg([var]) \ 1741fe67aeeSUros Bizjak : [var] "=m" (__my_cpu_var(_var)) \ 175c175acc1SBrian Gerst : [val] __pcpu_reg_imm_##size(pto_val__)); \ 1763334052aStravis@sgi.com } while (0) 1773334052aStravis@sgi.com 178539615deSUros Bizjak /* 17961d73e4fSIngo Molnar * The generic per-CPU infrastrucutre is not suitable for 180539615deSUros Bizjak * reading const-qualified variables. 181539615deSUros Bizjak */ 182539615deSUros Bizjak #define __raw_cpu_read_const(pcp) ({ BUILD_BUG(); (typeof(pcp))0; }) 183539615deSUros Bizjak 18408d564adSUros Bizjak #endif /* CONFIG_USE_X86_SEG_SUPPORT */ 18508d564adSUros Bizjak 18648908919SUros Bizjak #define __raw_cpu_read_stable(size, _var) \ 18708d564adSUros Bizjak ({ \ 18808d564adSUros Bizjak __pcpu_type_##size pfo_val__; \ 1899130ea06SIngo Molnar \ 190d40459ccSUros Bizjak asm(__pcpu_op_##size("mov") \ 191d40459ccSUros Bizjak __force_percpu_arg(a[var]) ", %[val]" \ 19208d564adSUros Bizjak : [val] __pcpu_reg_##size("=", pfo_val__) \ 19308d564adSUros Bizjak : [var] "i" (&(_var))); \ 1949130ea06SIngo Molnar \ 19508d564adSUros Bizjak (typeof(_var))(unsigned long) pfo_val__; \ 19608d564adSUros Bizjak }) 19708d564adSUros Bizjak 19833e5614aSBrian Gerst #define percpu_unary_op(size, qual, op, _var) \ 19933e5614aSBrian Gerst ({ \ 200d40459ccSUros Bizjak asm qual (__pcpu_op_##size(op) __percpu_arg([var]) \ 2019a462b9eSNadav Amit : [var] "+m" (__my_cpu_var(_var))); \ 20233e5614aSBrian Gerst }) 20333e5614aSBrian Gerst 204455ca134SUros Bizjak #define percpu_binary_op(size, qual, op, _var, _val) \ 205455ca134SUros Bizjak do { \ 206455ca134SUros Bizjak __pcpu_type_##size pto_val__ = __pcpu_cast_##size(_val); \ 2079130ea06SIngo Molnar \ 208455ca134SUros Bizjak if (0) { \ 2098a3c3923SUros Bizjak TYPEOF_UNQUAL(_var) pto_tmp__; \ 210455ca134SUros Bizjak pto_tmp__ = (_val); \ 211455ca134SUros Bizjak (void)pto_tmp__; \ 212455ca134SUros Bizjak } \ 213d40459ccSUros Bizjak asm qual (__pcpu_op_##size(op) "%[val], " __percpu_arg([var]) \ 214455ca134SUros Bizjak : [var] "+m" (__my_cpu_var(_var)) \ 215455ca134SUros Bizjak : [val] __pcpu_reg_imm_##size(pto_val__)); \ 216455ca134SUros Bizjak } while (0) 217455ca134SUros Bizjak 2185917dae8SChristoph Lameter /* 21961d73e4fSIngo Molnar * Generate a per-CPU add to memory instruction and optimize code 22040f0a5d0SJustin P. Mattock * if one is added or subtracted. 2215917dae8SChristoph Lameter */ 22233e5614aSBrian Gerst #define percpu_add_op(size, qual, var, val) \ 2235917dae8SChristoph Lameter do { \ 2246c2625e9SAndy Shevchenko const int pao_ID__ = \ 2256c2625e9SAndy Shevchenko (__builtin_constant_p(val) && \ 2266c2625e9SAndy Shevchenko ((val) == 1 || \ 2276c2625e9SAndy Shevchenko (val) == (typeof(val))-1)) ? (int)(val) : 0; \ 2289130ea06SIngo Molnar \ 2295917dae8SChristoph Lameter if (0) { \ 2308a3c3923SUros Bizjak TYPEOF_UNQUAL(var) pao_tmp__; \ 2315917dae8SChristoph Lameter pao_tmp__ = (val); \ 23223b764d0SAndi Kleen (void)pao_tmp__; \ 2335917dae8SChristoph Lameter } \ 2345917dae8SChristoph Lameter if (pao_ID__ == 1) \ 23533e5614aSBrian Gerst percpu_unary_op(size, qual, "inc", var); \ 2365917dae8SChristoph Lameter else if (pao_ID__ == -1) \ 23733e5614aSBrian Gerst percpu_unary_op(size, qual, "dec", var); \ 2385917dae8SChristoph Lameter else \ 239455ca134SUros Bizjak percpu_binary_op(size, qual, "add", var, val); \ 2405917dae8SChristoph Lameter } while (0) 2415917dae8SChristoph Lameter 242ed8d9adfSLinus Torvalds /* 24340304775STejun Heo * Add return operation 24440304775STejun Heo */ 245bbff583bSBrian Gerst #define percpu_add_return_op(size, qual, _var, _val) \ 24640304775STejun Heo ({ \ 247bbff583bSBrian Gerst __pcpu_type_##size paro_tmp__ = __pcpu_cast_##size(_val); \ 2489130ea06SIngo Molnar \ 249d40459ccSUros Bizjak asm qual (__pcpu_op_##size("xadd") "%[tmp], " \ 250d40459ccSUros Bizjak __percpu_arg([var]) \ 251bbff583bSBrian Gerst : [tmp] __pcpu_reg_##size("+", paro_tmp__), \ 2529a462b9eSNadav Amit [var] "+m" (__my_cpu_var(_var)) \ 25340304775STejun Heo : : "memory"); \ 254bbff583bSBrian Gerst (typeof(_var))(unsigned long) (paro_tmp__ + _val); \ 25540304775STejun Heo }) 25640304775STejun Heo 25740304775STejun Heo /* 258ce99b9c8SUros Bizjak * raw_cpu_xchg() can use a load-store since 259ce99b9c8SUros Bizjak * it is not required to be IRQ-safe. 260ce99b9c8SUros Bizjak */ 261ce99b9c8SUros Bizjak #define raw_percpu_xchg_op(_var, _nval) \ 262ce99b9c8SUros Bizjak ({ \ 2638a3c3923SUros Bizjak TYPEOF_UNQUAL(_var) pxo_old__ = raw_cpu_read(_var); \ 2649130ea06SIngo Molnar \ 265ce99b9c8SUros Bizjak raw_cpu_write(_var, _nval); \ 2669130ea06SIngo Molnar \ 267ce99b9c8SUros Bizjak pxo_old__; \ 268ce99b9c8SUros Bizjak }) 269ce99b9c8SUros Bizjak 270ce99b9c8SUros Bizjak /* 27161d73e4fSIngo Molnar * this_cpu_xchg() is implemented using CMPXCHG without a LOCK prefix. 27261d73e4fSIngo Molnar * XCHG is expensive due to the implied LOCK prefix. The processor 27361d73e4fSIngo Molnar * cannot prefetch cachelines if XCHG is used. 2747296e08aSChristoph Lameter */ 27505390846SUros Bizjak #define this_percpu_xchg_op(_var, _nval) \ 2767296e08aSChristoph Lameter ({ \ 2778a3c3923SUros Bizjak TYPEOF_UNQUAL(_var) pxo_old__ = this_cpu_read(_var); \ 2789130ea06SIngo Molnar \ 27905390846SUros Bizjak do { } while (!this_cpu_try_cmpxchg(_var, &pxo_old__, _nval)); \ 2809130ea06SIngo Molnar \ 28105390846SUros Bizjak pxo_old__; \ 2827296e08aSChristoph Lameter }) 2837296e08aSChristoph Lameter 2847296e08aSChristoph Lameter /* 28561d73e4fSIngo Molnar * CMPXCHG has no such implied lock semantics as a result it is much 28661d73e4fSIngo Molnar * more efficient for CPU-local operations. 2877296e08aSChristoph Lameter */ 288ebcd580bSBrian Gerst #define percpu_cmpxchg_op(size, qual, _var, _oval, _nval) \ 2897296e08aSChristoph Lameter ({ \ 290ebcd580bSBrian Gerst __pcpu_type_##size pco_old__ = __pcpu_cast_##size(_oval); \ 291ebcd580bSBrian Gerst __pcpu_type_##size pco_new__ = __pcpu_cast_##size(_nval); \ 2929130ea06SIngo Molnar \ 293d40459ccSUros Bizjak asm qual (__pcpu_op_##size("cmpxchg") "%[nval], " \ 294d40459ccSUros Bizjak __percpu_arg([var]) \ 295ebcd580bSBrian Gerst : [oval] "+a" (pco_old__), \ 2969a462b9eSNadav Amit [var] "+m" (__my_cpu_var(_var)) \ 297ebcd580bSBrian Gerst : [nval] __pcpu_reg_##size(, pco_new__) \ 2987296e08aSChristoph Lameter : "memory"); \ 2999130ea06SIngo Molnar \ 300ebcd580bSBrian Gerst (typeof(_var))(unsigned long) pco_old__; \ 3017296e08aSChristoph Lameter }) 3027296e08aSChristoph Lameter 3035f863897SUros Bizjak #define percpu_try_cmpxchg_op(size, qual, _var, _ovalp, _nval) \ 3045f863897SUros Bizjak ({ \ 3055f863897SUros Bizjak bool success; \ 3065f863897SUros Bizjak __pcpu_type_##size *pco_oval__ = (__pcpu_type_##size *)(_ovalp); \ 3075f863897SUros Bizjak __pcpu_type_##size pco_old__ = *pco_oval__; \ 3085f863897SUros Bizjak __pcpu_type_##size pco_new__ = __pcpu_cast_##size(_nval); \ 3099130ea06SIngo Molnar \ 310d40459ccSUros Bizjak asm qual (__pcpu_op_##size("cmpxchg") "%[nval], " \ 311d40459ccSUros Bizjak __percpu_arg([var]) \ 3125f863897SUros Bizjak CC_SET(z) \ 3135f863897SUros Bizjak : CC_OUT(z) (success), \ 3145f863897SUros Bizjak [oval] "+a" (pco_old__), \ 3159a462b9eSNadav Amit [var] "+m" (__my_cpu_var(_var)) \ 3165f863897SUros Bizjak : [nval] __pcpu_reg_##size(, pco_new__) \ 3175f863897SUros Bizjak : "memory"); \ 3185f863897SUros Bizjak if (unlikely(!success)) \ 3195f863897SUros Bizjak *pco_oval__ = pco_old__; \ 3209130ea06SIngo Molnar \ 3215f863897SUros Bizjak likely(success); \ 3225f863897SUros Bizjak }) 3235f863897SUros Bizjak 3246d12c8d3SPeter Zijlstra #if defined(CONFIG_X86_32) && !defined(CONFIG_UML) 32561d73e4fSIngo Molnar 3266d12c8d3SPeter Zijlstra #define percpu_cmpxchg64_op(size, qual, _var, _oval, _nval) \ 3276d12c8d3SPeter Zijlstra ({ \ 3286d12c8d3SPeter Zijlstra union { \ 3296d12c8d3SPeter Zijlstra u64 var; \ 3306d12c8d3SPeter Zijlstra struct { \ 3316d12c8d3SPeter Zijlstra u32 low, high; \ 3326d12c8d3SPeter Zijlstra }; \ 3336d12c8d3SPeter Zijlstra } old__, new__; \ 3346d12c8d3SPeter Zijlstra \ 3356d12c8d3SPeter Zijlstra old__.var = _oval; \ 3366d12c8d3SPeter Zijlstra new__.var = _nval; \ 3376d12c8d3SPeter Zijlstra \ 3382d352ec9SUros Bizjak asm_inline qual ( \ 3392d352ec9SUros Bizjak ALTERNATIVE("call this_cpu_cmpxchg8b_emu", \ 3406d12c8d3SPeter Zijlstra "cmpxchg8b " __percpu_arg([var]), X86_FEATURE_CX8) \ 3414087e16bSUros Bizjak : ALT_OUTPUT_SP([var] "+m" (__my_cpu_var(_var)), \ 3422d352ec9SUros Bizjak "+a" (old__.low), "+d" (old__.high)) \ 3432d352ec9SUros Bizjak : "b" (new__.low), "c" (new__.high), \ 3447c097ca5SUros Bizjak "S" (&(_var)) \ 3457c097ca5SUros Bizjak : "memory"); \ 3466d12c8d3SPeter Zijlstra \ 3476d12c8d3SPeter Zijlstra old__.var; \ 3486d12c8d3SPeter Zijlstra }) 3496d12c8d3SPeter Zijlstra 3506d12c8d3SPeter Zijlstra #define raw_cpu_cmpxchg64(pcp, oval, nval) percpu_cmpxchg64_op(8, , pcp, oval, nval) 3516d12c8d3SPeter Zijlstra #define this_cpu_cmpxchg64(pcp, oval, nval) percpu_cmpxchg64_op(8, volatile, pcp, oval, nval) 35254cd971cSUros Bizjak 35354cd971cSUros Bizjak #define percpu_try_cmpxchg64_op(size, qual, _var, _ovalp, _nval) \ 35454cd971cSUros Bizjak ({ \ 35554cd971cSUros Bizjak bool success; \ 35654cd971cSUros Bizjak u64 *_oval = (u64 *)(_ovalp); \ 35754cd971cSUros Bizjak union { \ 35854cd971cSUros Bizjak u64 var; \ 35954cd971cSUros Bizjak struct { \ 36054cd971cSUros Bizjak u32 low, high; \ 36154cd971cSUros Bizjak }; \ 36254cd971cSUros Bizjak } old__, new__; \ 36354cd971cSUros Bizjak \ 36454cd971cSUros Bizjak old__.var = *_oval; \ 36554cd971cSUros Bizjak new__.var = _nval; \ 36654cd971cSUros Bizjak \ 3672d352ec9SUros Bizjak asm_inline qual ( \ 3682d352ec9SUros Bizjak ALTERNATIVE("call this_cpu_cmpxchg8b_emu", \ 36954cd971cSUros Bizjak "cmpxchg8b " __percpu_arg([var]), X86_FEATURE_CX8) \ 37054cd971cSUros Bizjak CC_SET(z) \ 3714087e16bSUros Bizjak : ALT_OUTPUT_SP(CC_OUT(z) (success), \ 3729a462b9eSNadav Amit [var] "+m" (__my_cpu_var(_var)), \ 3732d352ec9SUros Bizjak "+a" (old__.low), "+d" (old__.high)) \ 3742d352ec9SUros Bizjak : "b" (new__.low), "c" (new__.high), \ 3757c097ca5SUros Bizjak "S" (&(_var)) \ 3767c097ca5SUros Bizjak : "memory"); \ 37754cd971cSUros Bizjak if (unlikely(!success)) \ 37854cd971cSUros Bizjak *_oval = old__.var; \ 3799130ea06SIngo Molnar \ 38054cd971cSUros Bizjak likely(success); \ 38154cd971cSUros Bizjak }) 38254cd971cSUros Bizjak 38354cd971cSUros Bizjak #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) percpu_try_cmpxchg64_op(8, , pcp, ovalp, nval) 38454cd971cSUros Bizjak #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) percpu_try_cmpxchg64_op(8, volatile, pcp, ovalp, nval) 38561d73e4fSIngo Molnar 38661d73e4fSIngo Molnar #endif /* defined(CONFIG_X86_32) && !defined(CONFIG_UML) */ 3876d12c8d3SPeter Zijlstra 3886d12c8d3SPeter Zijlstra #ifdef CONFIG_X86_64 3896d12c8d3SPeter Zijlstra #define raw_cpu_cmpxchg64(pcp, oval, nval) percpu_cmpxchg_op(8, , pcp, oval, nval); 3906d12c8d3SPeter Zijlstra #define this_cpu_cmpxchg64(pcp, oval, nval) percpu_cmpxchg_op(8, volatile, pcp, oval, nval); 3916d12c8d3SPeter Zijlstra 39254cd971cSUros Bizjak #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) percpu_try_cmpxchg_op(8, , pcp, ovalp, nval); 39354cd971cSUros Bizjak #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) percpu_try_cmpxchg_op(8, volatile, pcp, ovalp, nval); 39454cd971cSUros Bizjak 3956d12c8d3SPeter Zijlstra #define percpu_cmpxchg128_op(size, qual, _var, _oval, _nval) \ 3966d12c8d3SPeter Zijlstra ({ \ 3976d12c8d3SPeter Zijlstra union { \ 3986d12c8d3SPeter Zijlstra u128 var; \ 3996d12c8d3SPeter Zijlstra struct { \ 4006d12c8d3SPeter Zijlstra u64 low, high; \ 4016d12c8d3SPeter Zijlstra }; \ 4026d12c8d3SPeter Zijlstra } old__, new__; \ 4036d12c8d3SPeter Zijlstra \ 4046d12c8d3SPeter Zijlstra old__.var = _oval; \ 4056d12c8d3SPeter Zijlstra new__.var = _nval; \ 4066d12c8d3SPeter Zijlstra \ 4072d352ec9SUros Bizjak asm_inline qual ( \ 4082d352ec9SUros Bizjak ALTERNATIVE("call this_cpu_cmpxchg16b_emu", \ 4096d12c8d3SPeter Zijlstra "cmpxchg16b " __percpu_arg([var]), X86_FEATURE_CX16) \ 4104087e16bSUros Bizjak : ALT_OUTPUT_SP([var] "+m" (__my_cpu_var(_var)), \ 4112d352ec9SUros Bizjak "+a" (old__.low), "+d" (old__.high)) \ 4122d352ec9SUros Bizjak : "b" (new__.low), "c" (new__.high), \ 4137c097ca5SUros Bizjak "S" (&(_var)) \ 4147c097ca5SUros Bizjak : "memory"); \ 4156d12c8d3SPeter Zijlstra \ 4166d12c8d3SPeter Zijlstra old__.var; \ 4176d12c8d3SPeter Zijlstra }) 4186d12c8d3SPeter Zijlstra 4196d12c8d3SPeter Zijlstra #define raw_cpu_cmpxchg128(pcp, oval, nval) percpu_cmpxchg128_op(16, , pcp, oval, nval) 4206d12c8d3SPeter Zijlstra #define this_cpu_cmpxchg128(pcp, oval, nval) percpu_cmpxchg128_op(16, volatile, pcp, oval, nval) 42154cd971cSUros Bizjak 42254cd971cSUros Bizjak #define percpu_try_cmpxchg128_op(size, qual, _var, _ovalp, _nval) \ 42354cd971cSUros Bizjak ({ \ 42454cd971cSUros Bizjak bool success; \ 42554cd971cSUros Bizjak u128 *_oval = (u128 *)(_ovalp); \ 42654cd971cSUros Bizjak union { \ 42754cd971cSUros Bizjak u128 var; \ 42854cd971cSUros Bizjak struct { \ 42954cd971cSUros Bizjak u64 low, high; \ 43054cd971cSUros Bizjak }; \ 43154cd971cSUros Bizjak } old__, new__; \ 43254cd971cSUros Bizjak \ 43354cd971cSUros Bizjak old__.var = *_oval; \ 43454cd971cSUros Bizjak new__.var = _nval; \ 43554cd971cSUros Bizjak \ 4362d352ec9SUros Bizjak asm_inline qual ( \ 4372d352ec9SUros Bizjak ALTERNATIVE("call this_cpu_cmpxchg16b_emu", \ 43854cd971cSUros Bizjak "cmpxchg16b " __percpu_arg([var]), X86_FEATURE_CX16) \ 43954cd971cSUros Bizjak CC_SET(z) \ 4404087e16bSUros Bizjak : ALT_OUTPUT_SP(CC_OUT(z) (success), \ 4419a462b9eSNadav Amit [var] "+m" (__my_cpu_var(_var)), \ 4422d352ec9SUros Bizjak "+a" (old__.low), "+d" (old__.high)) \ 4432d352ec9SUros Bizjak : "b" (new__.low), "c" (new__.high), \ 4447c097ca5SUros Bizjak "S" (&(_var)) \ 4457c097ca5SUros Bizjak : "memory"); \ 44654cd971cSUros Bizjak if (unlikely(!success)) \ 44754cd971cSUros Bizjak *_oval = old__.var; \ 4482d352ec9SUros Bizjak \ 44954cd971cSUros Bizjak likely(success); \ 45054cd971cSUros Bizjak }) 45154cd971cSUros Bizjak 45254cd971cSUros Bizjak #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) percpu_try_cmpxchg128_op(16, , pcp, ovalp, nval) 45354cd971cSUros Bizjak #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) percpu_try_cmpxchg128_op(16, volatile, pcp, ovalp, nval) 45461d73e4fSIngo Molnar 45561d73e4fSIngo Molnar #endif /* CONFIG_X86_64 */ 4566d12c8d3SPeter Zijlstra 457a50ea641SUros Bizjak #define raw_cpu_read_1(pcp) __raw_cpu_read(1, , pcp) 458a50ea641SUros Bizjak #define raw_cpu_read_2(pcp) __raw_cpu_read(2, , pcp) 459a50ea641SUros Bizjak #define raw_cpu_read_4(pcp) __raw_cpu_read(4, , pcp) 460a50ea641SUros Bizjak #define raw_cpu_write_1(pcp, val) __raw_cpu_write(1, , pcp, val) 461a50ea641SUros Bizjak #define raw_cpu_write_2(pcp, val) __raw_cpu_write(2, , pcp, val) 462a50ea641SUros Bizjak #define raw_cpu_write_4(pcp, val) __raw_cpu_write(4, , pcp, val) 463a50ea641SUros Bizjak 464a50ea641SUros Bizjak #define this_cpu_read_1(pcp) __raw_cpu_read(1, volatile, pcp) 465a50ea641SUros Bizjak #define this_cpu_read_2(pcp) __raw_cpu_read(2, volatile, pcp) 466a50ea641SUros Bizjak #define this_cpu_read_4(pcp) __raw_cpu_read(4, volatile, pcp) 467a50ea641SUros Bizjak #define this_cpu_write_1(pcp, val) __raw_cpu_write(1, volatile, pcp, val) 468a50ea641SUros Bizjak #define this_cpu_write_2(pcp, val) __raw_cpu_write(2, volatile, pcp, val) 469a50ea641SUros Bizjak #define this_cpu_write_4(pcp, val) __raw_cpu_write(4, volatile, pcp, val) 470a50ea641SUros Bizjak 47148908919SUros Bizjak #define this_cpu_read_stable_1(pcp) __raw_cpu_read_stable(1, pcp) 47248908919SUros Bizjak #define this_cpu_read_stable_2(pcp) __raw_cpu_read_stable(2, pcp) 47348908919SUros Bizjak #define this_cpu_read_stable_4(pcp) __raw_cpu_read_stable(4, pcp) 474b90169b4SUros Bizjak 47533e5614aSBrian Gerst #define raw_cpu_add_1(pcp, val) percpu_add_op(1, , (pcp), val) 47633e5614aSBrian Gerst #define raw_cpu_add_2(pcp, val) percpu_add_op(2, , (pcp), val) 47733e5614aSBrian Gerst #define raw_cpu_add_4(pcp, val) percpu_add_op(4, , (pcp), val) 478455ca134SUros Bizjak #define raw_cpu_and_1(pcp, val) percpu_binary_op(1, , "and", (pcp), val) 479455ca134SUros Bizjak #define raw_cpu_and_2(pcp, val) percpu_binary_op(2, , "and", (pcp), val) 480455ca134SUros Bizjak #define raw_cpu_and_4(pcp, val) percpu_binary_op(4, , "and", (pcp), val) 481455ca134SUros Bizjak #define raw_cpu_or_1(pcp, val) percpu_binary_op(1, , "or", (pcp), val) 482455ca134SUros Bizjak #define raw_cpu_or_2(pcp, val) percpu_binary_op(2, , "or", (pcp), val) 483455ca134SUros Bizjak #define raw_cpu_or_4(pcp, val) percpu_binary_op(4, , "or", (pcp), val) 4842234a6d3SPeter Zijlstra #define raw_cpu_xchg_1(pcp, val) raw_percpu_xchg_op(pcp, val) 4852234a6d3SPeter Zijlstra #define raw_cpu_xchg_2(pcp, val) raw_percpu_xchg_op(pcp, val) 4862234a6d3SPeter Zijlstra #define raw_cpu_xchg_4(pcp, val) raw_percpu_xchg_op(pcp, val) 48730ed1a79SChristoph Lameter 48833e5614aSBrian Gerst #define this_cpu_add_1(pcp, val) percpu_add_op(1, volatile, (pcp), val) 48933e5614aSBrian Gerst #define this_cpu_add_2(pcp, val) percpu_add_op(2, volatile, (pcp), val) 49033e5614aSBrian Gerst #define this_cpu_add_4(pcp, val) percpu_add_op(4, volatile, (pcp), val) 491455ca134SUros Bizjak #define this_cpu_and_1(pcp, val) percpu_binary_op(1, volatile, "and", (pcp), val) 492455ca134SUros Bizjak #define this_cpu_and_2(pcp, val) percpu_binary_op(2, volatile, "and", (pcp), val) 493455ca134SUros Bizjak #define this_cpu_and_4(pcp, val) percpu_binary_op(4, volatile, "and", (pcp), val) 494455ca134SUros Bizjak #define this_cpu_or_1(pcp, val) percpu_binary_op(1, volatile, "or", (pcp), val) 495455ca134SUros Bizjak #define this_cpu_or_2(pcp, val) percpu_binary_op(2, volatile, "or", (pcp), val) 496455ca134SUros Bizjak #define this_cpu_or_4(pcp, val) percpu_binary_op(4, volatile, "or", (pcp), val) 49705390846SUros Bizjak #define this_cpu_xchg_1(pcp, nval) this_percpu_xchg_op(pcp, nval) 49805390846SUros Bizjak #define this_cpu_xchg_2(pcp, nval) this_percpu_xchg_op(pcp, nval) 49905390846SUros Bizjak #define this_cpu_xchg_4(pcp, nval) this_percpu_xchg_op(pcp, nval) 50030ed1a79SChristoph Lameter 501bbff583bSBrian Gerst #define raw_cpu_add_return_1(pcp, val) percpu_add_return_op(1, , pcp, val) 502bbff583bSBrian Gerst #define raw_cpu_add_return_2(pcp, val) percpu_add_return_op(2, , pcp, val) 503bbff583bSBrian Gerst #define raw_cpu_add_return_4(pcp, val) percpu_add_return_op(4, , pcp, val) 504ebcd580bSBrian Gerst #define raw_cpu_cmpxchg_1(pcp, oval, nval) percpu_cmpxchg_op(1, , pcp, oval, nval) 505ebcd580bSBrian Gerst #define raw_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(2, , pcp, oval, nval) 506ebcd580bSBrian Gerst #define raw_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(4, , pcp, oval, nval) 5075f863897SUros Bizjak #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) percpu_try_cmpxchg_op(1, , pcp, ovalp, nval) 5085f863897SUros Bizjak #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) percpu_try_cmpxchg_op(2, , pcp, ovalp, nval) 5095f863897SUros Bizjak #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) percpu_try_cmpxchg_op(4, , pcp, ovalp, nval) 5107296e08aSChristoph Lameter 511bbff583bSBrian Gerst #define this_cpu_add_return_1(pcp, val) percpu_add_return_op(1, volatile, pcp, val) 512bbff583bSBrian Gerst #define this_cpu_add_return_2(pcp, val) percpu_add_return_op(2, volatile, pcp, val) 513bbff583bSBrian Gerst #define this_cpu_add_return_4(pcp, val) percpu_add_return_op(4, volatile, pcp, val) 514ebcd580bSBrian Gerst #define this_cpu_cmpxchg_1(pcp, oval, nval) percpu_cmpxchg_op(1, volatile, pcp, oval, nval) 515ebcd580bSBrian Gerst #define this_cpu_cmpxchg_2(pcp, oval, nval) percpu_cmpxchg_op(2, volatile, pcp, oval, nval) 516ebcd580bSBrian Gerst #define this_cpu_cmpxchg_4(pcp, oval, nval) percpu_cmpxchg_op(4, volatile, pcp, oval, nval) 5175f863897SUros Bizjak #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) percpu_try_cmpxchg_op(1, volatile, pcp, ovalp, nval) 5185f863897SUros Bizjak #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) percpu_try_cmpxchg_op(2, volatile, pcp, ovalp, nval) 5195f863897SUros Bizjak #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) percpu_try_cmpxchg_op(4, volatile, pcp, ovalp, nval) 5207296e08aSChristoph Lameter 52130ed1a79SChristoph Lameter /* 52261d73e4fSIngo Molnar * Per-CPU atomic 64-bit operations are only available under 64-bit kernels. 52361d73e4fSIngo Molnar * 32-bit kernels must fall back to generic operations. 52430ed1a79SChristoph Lameter */ 52530ed1a79SChristoph Lameter #ifdef CONFIG_X86_64 5269130ea06SIngo Molnar 52747c9dbd2SUros Bizjak #define raw_cpu_read_8(pcp) __raw_cpu_read(8, , pcp) 52847c9dbd2SUros Bizjak #define raw_cpu_write_8(pcp, val) __raw_cpu_write(8, , pcp, val) 52947c9dbd2SUros Bizjak 53047c9dbd2SUros Bizjak #define this_cpu_read_8(pcp) __raw_cpu_read(8, volatile, pcp) 53147c9dbd2SUros Bizjak #define this_cpu_write_8(pcp, val) __raw_cpu_write(8, volatile, pcp, val) 53247c9dbd2SUros Bizjak 53348908919SUros Bizjak #define this_cpu_read_stable_8(pcp) __raw_cpu_read_stable(8, pcp) 534b90169b4SUros Bizjak 53533e5614aSBrian Gerst #define raw_cpu_add_8(pcp, val) percpu_add_op(8, , (pcp), val) 536455ca134SUros Bizjak #define raw_cpu_and_8(pcp, val) percpu_binary_op(8, , "and", (pcp), val) 537455ca134SUros Bizjak #define raw_cpu_or_8(pcp, val) percpu_binary_op(8, , "or", (pcp), val) 538bbff583bSBrian Gerst #define raw_cpu_add_return_8(pcp, val) percpu_add_return_op(8, , pcp, val) 5392234a6d3SPeter Zijlstra #define raw_cpu_xchg_8(pcp, nval) raw_percpu_xchg_op(pcp, nval) 540ebcd580bSBrian Gerst #define raw_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(8, , pcp, oval, nval) 5415f863897SUros Bizjak #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) percpu_try_cmpxchg_op(8, , pcp, ovalp, nval) 54230ed1a79SChristoph Lameter 54333e5614aSBrian Gerst #define this_cpu_add_8(pcp, val) percpu_add_op(8, volatile, (pcp), val) 544455ca134SUros Bizjak #define this_cpu_and_8(pcp, val) percpu_binary_op(8, volatile, "and", (pcp), val) 545455ca134SUros Bizjak #define this_cpu_or_8(pcp, val) percpu_binary_op(8, volatile, "or", (pcp), val) 546bbff583bSBrian Gerst #define this_cpu_add_return_8(pcp, val) percpu_add_return_op(8, volatile, pcp, val) 54705390846SUros Bizjak #define this_cpu_xchg_8(pcp, nval) this_percpu_xchg_op(pcp, nval) 548ebcd580bSBrian Gerst #define this_cpu_cmpxchg_8(pcp, oval, nval) percpu_cmpxchg_op(8, volatile, pcp, oval, nval) 5495f863897SUros Bizjak #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) percpu_try_cmpxchg_op(8, volatile, pcp, ovalp, nval) 55093cfa544SUros Bizjak 55193cfa544SUros Bizjak #define raw_cpu_read_long(pcp) raw_cpu_read_8(pcp) 55261d73e4fSIngo Molnar 55361d73e4fSIngo Molnar #else /* !CONFIG_X86_64: */ 55461d73e4fSIngo Molnar 55561d73e4fSIngo Molnar /* There is no generic 64-bit read stable operation for 32-bit targets. */ 556b90169b4SUros Bizjak #define this_cpu_read_stable_8(pcp) ({ BUILD_BUG(); (typeof(pcp))0; }) 55793cfa544SUros Bizjak 55893cfa544SUros Bizjak #define raw_cpu_read_long(pcp) raw_cpu_read_4(pcp) 55961d73e4fSIngo Molnar 56061d73e4fSIngo Molnar #endif /* CONFIG_X86_64 */ 56130ed1a79SChristoph Lameter 56247c9dbd2SUros Bizjak #define this_cpu_read_const(pcp) __raw_cpu_read_const(pcp) 56347c9dbd2SUros Bizjak 56447c9dbd2SUros Bizjak /* 56561d73e4fSIngo Molnar * this_cpu_read() makes the compiler load the per-CPU variable every time 56661d73e4fSIngo Molnar * it is accessed while this_cpu_read_stable() allows the value to be cached. 56747c9dbd2SUros Bizjak * this_cpu_read_stable() is more efficient and can be used if its value 56861d73e4fSIngo Molnar * is guaranteed to be valid across CPUs. The current users include 569a1e4cc01SBrian Gerst * current_task and cpu_current_top_of_stack, both of which are 57047c9dbd2SUros Bizjak * actually per-thread variables implemented as per-CPU variables and 57147c9dbd2SUros Bizjak * thus stable for the duration of the respective task. 57247c9dbd2SUros Bizjak */ 57347c9dbd2SUros Bizjak #define this_cpu_read_stable(pcp) __pcpu_size_call_return(this_cpu_read_stable_, pcp) 57447c9dbd2SUros Bizjak 575a3f8a3a2SUros Bizjak #define x86_this_cpu_constant_test_bit(_nr, _var) \ 576a3f8a3a2SUros Bizjak ({ \ 577a3f8a3a2SUros Bizjak unsigned long __percpu *addr__ = \ 578a3f8a3a2SUros Bizjak (unsigned long __percpu *)&(_var) + ((_nr) / BITS_PER_LONG); \ 5799130ea06SIngo Molnar \ 580a3f8a3a2SUros Bizjak !!((1UL << ((_nr) % BITS_PER_LONG)) & raw_cpu_read(*addr__)); \ 581a3f8a3a2SUros Bizjak }) 582349c004eSChristoph Lameter 583a3f8a3a2SUros Bizjak #define x86_this_cpu_variable_test_bit(_nr, _var) \ 584a3f8a3a2SUros Bizjak ({ \ 585a3f8a3a2SUros Bizjak bool oldbit; \ 586a3f8a3a2SUros Bizjak \ 587a3f8a3a2SUros Bizjak asm volatile("btl %[nr], " __percpu_arg([var]) \ 588a3f8a3a2SUros Bizjak CC_SET(c) \ 589a3f8a3a2SUros Bizjak : CC_OUT(c) (oldbit) \ 590a3f8a3a2SUros Bizjak : [var] "m" (__my_cpu_var(_var)), \ 591a3f8a3a2SUros Bizjak [nr] "rI" (_nr)); \ 592a3f8a3a2SUros Bizjak oldbit; \ 593a3f8a3a2SUros Bizjak }) 594349c004eSChristoph Lameter 595a3f8a3a2SUros Bizjak #define x86_this_cpu_test_bit(_nr, _var) \ 596a3f8a3a2SUros Bizjak (__builtin_constant_p(_nr) \ 597a3f8a3a2SUros Bizjak ? x86_this_cpu_constant_test_bit(_nr, _var) \ 598a3f8a3a2SUros Bizjak : x86_this_cpu_variable_test_bit(_nr, _var)) 599349c004eSChristoph Lameter 600349c004eSChristoph Lameter 6016dbde353SIngo Molnar #include <asm-generic/percpu.h> 6026dbde353SIngo Molnar 6036dbde353SIngo Molnar /* We can use this directly for local CPU (faster). */ 60406aa0305SBrian Gerst DECLARE_PER_CPU_CACHE_HOT(unsigned long, this_cpu_off); 6056dbde353SIngo Molnar 60624a295e4SThomas Huth #endif /* !__ASSEMBLER__ */ 60723ca4bbaSMike Travis 60823ca4bbaSMike Travis #ifdef CONFIG_SMP 60923ca4bbaSMike Travis 61023ca4bbaSMike Travis /* 61123ca4bbaSMike Travis * Define the "EARLY_PER_CPU" macros. These are used for some per_cpu 61223ca4bbaSMike Travis * variables that are initialized and accessed before there are per_cpu 61323ca4bbaSMike Travis * areas allocated. 61423ca4bbaSMike Travis */ 61523ca4bbaSMike Travis 61623ca4bbaSMike Travis #define DEFINE_EARLY_PER_CPU(_type, _name, _initvalue) \ 61723ca4bbaSMike Travis DEFINE_PER_CPU(_type, _name) = _initvalue; \ 61823ca4bbaSMike Travis __typeof__(_type) _name##_early_map[NR_CPUS] __initdata = \ 61923ca4bbaSMike Travis { [0 ... NR_CPUS-1] = _initvalue }; \ 620c6a92a25SMarcin Slusarz __typeof__(_type) *_name##_early_ptr __refdata = _name##_early_map 62123ca4bbaSMike Travis 622c35f7741SIdo Yariv #define DEFINE_EARLY_PER_CPU_READ_MOSTLY(_type, _name, _initvalue) \ 623c35f7741SIdo Yariv DEFINE_PER_CPU_READ_MOSTLY(_type, _name) = _initvalue; \ 624c35f7741SIdo Yariv __typeof__(_type) _name##_early_map[NR_CPUS] __initdata = \ 625c35f7741SIdo Yariv { [0 ... NR_CPUS-1] = _initvalue }; \ 626c35f7741SIdo Yariv __typeof__(_type) *_name##_early_ptr __refdata = _name##_early_map 627c35f7741SIdo Yariv 62823ca4bbaSMike Travis #define EXPORT_EARLY_PER_CPU_SYMBOL(_name) \ 62923ca4bbaSMike Travis EXPORT_PER_CPU_SYMBOL(_name) 63023ca4bbaSMike Travis 63123ca4bbaSMike Travis #define DECLARE_EARLY_PER_CPU(_type, _name) \ 63223ca4bbaSMike Travis DECLARE_PER_CPU(_type, _name); \ 63323ca4bbaSMike Travis extern __typeof__(_type) *_name##_early_ptr; \ 63423ca4bbaSMike Travis extern __typeof__(_type) _name##_early_map[] 63523ca4bbaSMike Travis 636c35f7741SIdo Yariv #define DECLARE_EARLY_PER_CPU_READ_MOSTLY(_type, _name) \ 637c35f7741SIdo Yariv DECLARE_PER_CPU_READ_MOSTLY(_type, _name); \ 638c35f7741SIdo Yariv extern __typeof__(_type) *_name##_early_ptr; \ 639c35f7741SIdo Yariv extern __typeof__(_type) _name##_early_map[] 640c35f7741SIdo Yariv 64123ca4bbaSMike Travis #define early_per_cpu_ptr(_name) (_name##_early_ptr) 64223ca4bbaSMike Travis #define early_per_cpu_map(_name, _idx) (_name##_early_map[_idx]) 6439130ea06SIngo Molnar 64423ca4bbaSMike Travis #define early_per_cpu(_name, _cpu) \ 645f10fcd47STejun Heo *(early_per_cpu_ptr(_name) ? \ 646f10fcd47STejun Heo &early_per_cpu_ptr(_name)[_cpu] : \ 647f10fcd47STejun Heo &per_cpu(_name, _cpu)) 64823ca4bbaSMike Travis 64961d73e4fSIngo Molnar #else /* !CONFIG_SMP: */ 65023ca4bbaSMike Travis #define DEFINE_EARLY_PER_CPU(_type, _name, _initvalue) \ 65123ca4bbaSMike Travis DEFINE_PER_CPU(_type, _name) = _initvalue 65223ca4bbaSMike Travis 653c35f7741SIdo Yariv #define DEFINE_EARLY_PER_CPU_READ_MOSTLY(_type, _name, _initvalue) \ 654c35f7741SIdo Yariv DEFINE_PER_CPU_READ_MOSTLY(_type, _name) = _initvalue 655c35f7741SIdo Yariv 65623ca4bbaSMike Travis #define EXPORT_EARLY_PER_CPU_SYMBOL(_name) \ 65723ca4bbaSMike Travis EXPORT_PER_CPU_SYMBOL(_name) 65823ca4bbaSMike Travis 65923ca4bbaSMike Travis #define DECLARE_EARLY_PER_CPU(_type, _name) \ 66023ca4bbaSMike Travis DECLARE_PER_CPU(_type, _name) 66123ca4bbaSMike Travis 662c35f7741SIdo Yariv #define DECLARE_EARLY_PER_CPU_READ_MOSTLY(_type, _name) \ 663c35f7741SIdo Yariv DECLARE_PER_CPU_READ_MOSTLY(_type, _name) 664c35f7741SIdo Yariv 66523ca4bbaSMike Travis #define early_per_cpu(_name, _cpu) per_cpu(_name, _cpu) 66623ca4bbaSMike Travis #define early_per_cpu_ptr(_name) NULL 66723ca4bbaSMike Travis /* no early_per_cpu_map() */ 66823ca4bbaSMike Travis 6699130ea06SIngo Molnar #endif /* !CONFIG_SMP */ 67023ca4bbaSMike Travis 6711965aae3SH. Peter Anvin #endif /* _ASM_X86_PERCPU_H */ 672