1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_PERCPU_H_
3 #define _ASM_GENERIC_PERCPU_H_
4 
5 #include <linux/compiler.h>
6 #include <linux/threads.h>
7 #include <linux/percpu-defs.h>
8 
9 /*
10  * __percpu_qual is the qualifier for the percpu named address space.
11  *
12  * Most arches use generic named address space for percpu variables but
13  * some arches define percpu variables in different named address space
14  * (on the x86 arch, percpu variable may be declared as being relative
15  * to the %fs or %gs segments using __seg_fs or __seg_gs named address
16  * space qualifier).
17  */
18 #ifndef __percpu_qual
19 # define __percpu_qual
20 #endif
21 
22 #ifdef CONFIG_SMP
23 
24 /*
25  * per_cpu_offset() is the offset that has to be added to a
26  * percpu variable to get to the instance for a certain processor.
27  *
28  * Most arches use the __per_cpu_offset array for those offsets but
29  * some arches have their own ways of determining the offset (x86_64, s390).
30  */
31 #ifndef __per_cpu_offset
32 extern unsigned long __per_cpu_offset[NR_CPUS];
33 
34 #define per_cpu_offset(x) (__per_cpu_offset[x])
35 #endif
36 
37 /*
38  * Determine the offset for the currently active processor.
39  * An arch may define __my_cpu_offset to provide a more effective
40  * means of obtaining the offset to the per cpu variables of the
41  * current processor.
42  */
43 #ifndef __my_cpu_offset
44 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
45 #endif
46 #ifdef CONFIG_DEBUG_PREEMPT
47 #define my_cpu_offset per_cpu_offset(smp_processor_id())
48 #else
49 #define my_cpu_offset __my_cpu_offset
50 #endif
51 
52 /*
53  * Arch may define arch_raw_cpu_ptr() to provide more efficient address
54  * translations for raw_cpu_ptr().
55  */
56 #ifndef arch_raw_cpu_ptr
57 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
58 #endif
59 
60 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
61 extern void setup_per_cpu_areas(void);
62 #endif
63 
64 #endif	/* SMP */
65 
66 #ifndef PER_CPU_BASE_SECTION
67 #ifdef CONFIG_SMP
68 #define PER_CPU_BASE_SECTION ".data..percpu"
69 #else
70 #define PER_CPU_BASE_SECTION ".data"
71 #endif
72 #endif
73 
74 #ifndef PER_CPU_ATTRIBUTES
75 #define PER_CPU_ATTRIBUTES
76 #endif
77 
78 #define raw_cpu_generic_read(pcp)					\
79 ({									\
80 	*raw_cpu_ptr(&(pcp));						\
81 })
82 
83 #define raw_cpu_generic_to_op(pcp, val, op)				\
84 do {									\
85 	*raw_cpu_ptr(&(pcp)) op val;					\
86 } while (0)
87 
88 #define raw_cpu_generic_add_return(pcp, val)				\
89 ({									\
90 	TYPEOF_UNQUAL(pcp) *__p = raw_cpu_ptr(&(pcp));			\
91 									\
92 	*__p += val;							\
93 	*__p;								\
94 })
95 
96 #define raw_cpu_generic_xchg(pcp, nval)					\
97 ({									\
98 	TYPEOF_UNQUAL(pcp) *__p = raw_cpu_ptr(&(pcp));			\
99 	TYPEOF_UNQUAL(pcp) __ret;					\
100 	__ret = *__p;							\
101 	*__p = nval;							\
102 	__ret;								\
103 })
104 
105 #define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg)		\
106 ({									\
107 	TYPEOF_UNQUAL(pcp) __val, __old = *(ovalp);			\
108 	__val = _cmpxchg(pcp, __old, nval);				\
109 	if (__val != __old)						\
110 		*(ovalp) = __val;					\
111 	__val == __old;							\
112 })
113 
114 #define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
115 ({									\
116 	TYPEOF_UNQUAL(pcp) *__p = raw_cpu_ptr(&(pcp));			\
117 	TYPEOF_UNQUAL(pcp) __val = *__p, ___old = *(ovalp);		\
118 	bool __ret;							\
119 	if (__val == ___old) {						\
120 		*__p = nval;						\
121 		__ret = true;						\
122 	} else {							\
123 		*(ovalp) = __val;					\
124 		__ret = false;						\
125 	}								\
126 	__ret;								\
127 })
128 
129 #define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
130 ({									\
131 	TYPEOF_UNQUAL(pcp) __old = (oval);				\
132 	raw_cpu_generic_try_cmpxchg(pcp, &__old, nval);			\
133 	__old;								\
134 })
135 
136 #define __this_cpu_generic_read_nopreempt(pcp)				\
137 ({									\
138 	TYPEOF_UNQUAL(pcp) ___ret;					\
139 	preempt_disable_notrace();					\
140 	___ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));			\
141 	preempt_enable_notrace();					\
142 	___ret;								\
143 })
144 
145 #define __this_cpu_generic_read_noirq(pcp)				\
146 ({									\
147 	TYPEOF_UNQUAL(pcp) ___ret;					\
148 	unsigned long ___flags;						\
149 	raw_local_irq_save(___flags);					\
150 	___ret = raw_cpu_generic_read(pcp);				\
151 	raw_local_irq_restore(___flags);				\
152 	___ret;								\
153 })
154 
155 #define this_cpu_generic_read(pcp)					\
156 ({									\
157 	TYPEOF_UNQUAL(pcp) __ret;					\
158 	if (__native_word(pcp))						\
159 		__ret = __this_cpu_generic_read_nopreempt(pcp);		\
160 	else								\
161 		__ret = __this_cpu_generic_read_noirq(pcp);		\
162 	__ret;								\
163 })
164 
165 #define this_cpu_generic_to_op(pcp, val, op)				\
166 do {									\
167 	unsigned long __flags;						\
168 	raw_local_irq_save(__flags);					\
169 	raw_cpu_generic_to_op(pcp, val, op);				\
170 	raw_local_irq_restore(__flags);					\
171 } while (0)
172 
173 
174 #define this_cpu_generic_add_return(pcp, val)				\
175 ({									\
176 	TYPEOF_UNQUAL(pcp) __ret;					\
177 	unsigned long __flags;						\
178 	raw_local_irq_save(__flags);					\
179 	__ret = raw_cpu_generic_add_return(pcp, val);			\
180 	raw_local_irq_restore(__flags);					\
181 	__ret;								\
182 })
183 
184 #define this_cpu_generic_xchg(pcp, nval)				\
185 ({									\
186 	TYPEOF_UNQUAL(pcp) __ret;					\
187 	unsigned long __flags;						\
188 	raw_local_irq_save(__flags);					\
189 	__ret = raw_cpu_generic_xchg(pcp, nval);			\
190 	raw_local_irq_restore(__flags);					\
191 	__ret;								\
192 })
193 
194 #define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
195 ({									\
196 	bool __ret;							\
197 	unsigned long __flags;						\
198 	raw_local_irq_save(__flags);					\
199 	__ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval);		\
200 	raw_local_irq_restore(__flags);					\
201 	__ret;								\
202 })
203 
204 #define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
205 ({									\
206 	TYPEOF_UNQUAL(pcp) __ret;					\
207 	unsigned long __flags;						\
208 	raw_local_irq_save(__flags);					\
209 	__ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);		\
210 	raw_local_irq_restore(__flags);					\
211 	__ret;								\
212 })
213 
214 #ifndef raw_cpu_read_1
215 #define raw_cpu_read_1(pcp)		raw_cpu_generic_read(pcp)
216 #endif
217 #ifndef raw_cpu_read_2
218 #define raw_cpu_read_2(pcp)		raw_cpu_generic_read(pcp)
219 #endif
220 #ifndef raw_cpu_read_4
221 #define raw_cpu_read_4(pcp)		raw_cpu_generic_read(pcp)
222 #endif
223 #ifndef raw_cpu_read_8
224 #define raw_cpu_read_8(pcp)		raw_cpu_generic_read(pcp)
225 #endif
226 
227 #ifndef raw_cpu_write_1
228 #define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
229 #endif
230 #ifndef raw_cpu_write_2
231 #define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
232 #endif
233 #ifndef raw_cpu_write_4
234 #define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
235 #endif
236 #ifndef raw_cpu_write_8
237 #define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
238 #endif
239 
240 #ifndef raw_cpu_add_1
241 #define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
242 #endif
243 #ifndef raw_cpu_add_2
244 #define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
245 #endif
246 #ifndef raw_cpu_add_4
247 #define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
248 #endif
249 #ifndef raw_cpu_add_8
250 #define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
251 #endif
252 
253 #ifndef raw_cpu_and_1
254 #define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
255 #endif
256 #ifndef raw_cpu_and_2
257 #define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
258 #endif
259 #ifndef raw_cpu_and_4
260 #define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
261 #endif
262 #ifndef raw_cpu_and_8
263 #define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
264 #endif
265 
266 #ifndef raw_cpu_or_1
267 #define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
268 #endif
269 #ifndef raw_cpu_or_2
270 #define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
271 #endif
272 #ifndef raw_cpu_or_4
273 #define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
274 #endif
275 #ifndef raw_cpu_or_8
276 #define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
277 #endif
278 
279 #ifndef raw_cpu_add_return_1
280 #define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
281 #endif
282 #ifndef raw_cpu_add_return_2
283 #define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
284 #endif
285 #ifndef raw_cpu_add_return_4
286 #define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
287 #endif
288 #ifndef raw_cpu_add_return_8
289 #define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
290 #endif
291 
292 #ifndef raw_cpu_xchg_1
293 #define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
294 #endif
295 #ifndef raw_cpu_xchg_2
296 #define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
297 #endif
298 #ifndef raw_cpu_xchg_4
299 #define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
300 #endif
301 #ifndef raw_cpu_xchg_8
302 #define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
303 #endif
304 
305 #ifndef raw_cpu_try_cmpxchg_1
306 #ifdef raw_cpu_cmpxchg_1
307 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
308 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1)
309 #else
310 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
311 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
312 #endif
313 #endif
314 #ifndef raw_cpu_try_cmpxchg_2
315 #ifdef raw_cpu_cmpxchg_2
316 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
317 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2)
318 #else
319 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
320 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
321 #endif
322 #endif
323 #ifndef raw_cpu_try_cmpxchg_4
324 #ifdef raw_cpu_cmpxchg_4
325 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
326 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4)
327 #else
328 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
329 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
330 #endif
331 #endif
332 #ifndef raw_cpu_try_cmpxchg_8
333 #ifdef raw_cpu_cmpxchg_8
334 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
335 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8)
336 #else
337 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
338 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
339 #endif
340 #endif
341 
342 #ifndef raw_cpu_try_cmpxchg64
343 #ifdef raw_cpu_cmpxchg64
344 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
345 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg64)
346 #else
347 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
348 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
349 #endif
350 #endif
351 #ifndef raw_cpu_try_cmpxchg128
352 #ifdef raw_cpu_cmpxchg128
353 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
354 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg128)
355 #else
356 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
357 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
358 #endif
359 #endif
360 
361 #ifndef raw_cpu_cmpxchg_1
362 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
363 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
364 #endif
365 #ifndef raw_cpu_cmpxchg_2
366 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
367 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
368 #endif
369 #ifndef raw_cpu_cmpxchg_4
370 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
371 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
372 #endif
373 #ifndef raw_cpu_cmpxchg_8
374 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
375 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
376 #endif
377 
378 #ifndef raw_cpu_cmpxchg64
379 #define raw_cpu_cmpxchg64(pcp, oval, nval) \
380 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
381 #endif
382 #ifndef raw_cpu_cmpxchg128
383 #define raw_cpu_cmpxchg128(pcp, oval, nval) \
384 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
385 #endif
386 
387 #ifndef this_cpu_read_1
388 #define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
389 #endif
390 #ifndef this_cpu_read_2
391 #define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
392 #endif
393 #ifndef this_cpu_read_4
394 #define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
395 #endif
396 #ifndef this_cpu_read_8
397 #define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
398 #endif
399 
400 #ifndef this_cpu_write_1
401 #define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
402 #endif
403 #ifndef this_cpu_write_2
404 #define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
405 #endif
406 #ifndef this_cpu_write_4
407 #define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
408 #endif
409 #ifndef this_cpu_write_8
410 #define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
411 #endif
412 
413 #ifndef this_cpu_add_1
414 #define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
415 #endif
416 #ifndef this_cpu_add_2
417 #define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
418 #endif
419 #ifndef this_cpu_add_4
420 #define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
421 #endif
422 #ifndef this_cpu_add_8
423 #define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
424 #endif
425 
426 #ifndef this_cpu_and_1
427 #define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
428 #endif
429 #ifndef this_cpu_and_2
430 #define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
431 #endif
432 #ifndef this_cpu_and_4
433 #define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
434 #endif
435 #ifndef this_cpu_and_8
436 #define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
437 #endif
438 
439 #ifndef this_cpu_or_1
440 #define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
441 #endif
442 #ifndef this_cpu_or_2
443 #define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
444 #endif
445 #ifndef this_cpu_or_4
446 #define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
447 #endif
448 #ifndef this_cpu_or_8
449 #define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
450 #endif
451 
452 #ifndef this_cpu_add_return_1
453 #define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
454 #endif
455 #ifndef this_cpu_add_return_2
456 #define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
457 #endif
458 #ifndef this_cpu_add_return_4
459 #define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
460 #endif
461 #ifndef this_cpu_add_return_8
462 #define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
463 #endif
464 
465 #ifndef this_cpu_xchg_1
466 #define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
467 #endif
468 #ifndef this_cpu_xchg_2
469 #define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
470 #endif
471 #ifndef this_cpu_xchg_4
472 #define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
473 #endif
474 #ifndef this_cpu_xchg_8
475 #define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
476 #endif
477 
478 #ifndef this_cpu_try_cmpxchg_1
479 #ifdef this_cpu_cmpxchg_1
480 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
481 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1)
482 #else
483 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
484 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
485 #endif
486 #endif
487 #ifndef this_cpu_try_cmpxchg_2
488 #ifdef this_cpu_cmpxchg_2
489 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
490 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2)
491 #else
492 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
493 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
494 #endif
495 #endif
496 #ifndef this_cpu_try_cmpxchg_4
497 #ifdef this_cpu_cmpxchg_4
498 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
499 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4)
500 #else
501 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
502 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
503 #endif
504 #endif
505 #ifndef this_cpu_try_cmpxchg_8
506 #ifdef this_cpu_cmpxchg_8
507 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
508 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8)
509 #else
510 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
511 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
512 #endif
513 #endif
514 
515 #ifndef this_cpu_try_cmpxchg64
516 #ifdef this_cpu_cmpxchg64
517 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
518 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg64)
519 #else
520 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
521 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
522 #endif
523 #endif
524 #ifndef this_cpu_try_cmpxchg128
525 #ifdef this_cpu_cmpxchg128
526 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
527 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg128)
528 #else
529 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
530 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
531 #endif
532 #endif
533 
534 #ifndef this_cpu_cmpxchg_1
535 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
536 	this_cpu_generic_cmpxchg(pcp, oval, nval)
537 #endif
538 #ifndef this_cpu_cmpxchg_2
539 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
540 	this_cpu_generic_cmpxchg(pcp, oval, nval)
541 #endif
542 #ifndef this_cpu_cmpxchg_4
543 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
544 	this_cpu_generic_cmpxchg(pcp, oval, nval)
545 #endif
546 #ifndef this_cpu_cmpxchg_8
547 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
548 	this_cpu_generic_cmpxchg(pcp, oval, nval)
549 #endif
550 
551 #ifndef this_cpu_cmpxchg64
552 #define this_cpu_cmpxchg64(pcp, oval, nval) \
553 	this_cpu_generic_cmpxchg(pcp, oval, nval)
554 #endif
555 #ifndef this_cpu_cmpxchg128
556 #define this_cpu_cmpxchg128(pcp, oval, nval) \
557 	this_cpu_generic_cmpxchg(pcp, oval, nval)
558 #endif
559 
560 #endif /* _ASM_GENERIC_PERCPU_H_ */
561