xref: /linux/arch/s390/include/asm/atomic_ops.h (revision 8be98d2f2a0a262f8bf8a0bc1fdf522b3c7aab17)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2126b30c3SMartin Schwidefsky /*
3126b30c3SMartin Schwidefsky  * Low level function for atomic operations
4126b30c3SMartin Schwidefsky  *
5126b30c3SMartin Schwidefsky  * Copyright IBM Corp. 1999, 2016
6126b30c3SMartin Schwidefsky  */
7126b30c3SMartin Schwidefsky 
8126b30c3SMartin Schwidefsky #ifndef __ARCH_S390_ATOMIC_OPS__
9126b30c3SMartin Schwidefsky #define __ARCH_S390_ATOMIC_OPS__
10126b30c3SMartin Schwidefsky 
11c8a91c28SHeiko Carstens #include <linux/limits.h>
12c8a91c28SHeiko Carstens #include <asm/march.h>
13c8a91c28SHeiko Carstens #include <asm/asm.h>
14c8a91c28SHeiko Carstens 
__atomic_read(const int * ptr)15c8a91c28SHeiko Carstens static __always_inline int __atomic_read(const int *ptr)
16c8a91c28SHeiko Carstens {
17ca897bb1SHeiko Carstens 	int val;
18c8a91c28SHeiko Carstens 
19c8a91c28SHeiko Carstens 	asm volatile(
20c8a91c28SHeiko Carstens 		"	l	%[val],%[ptr]"
21c8a91c28SHeiko Carstens 		: [val] "=d" (val) : [ptr] "R" (*ptr));
22c8a91c28SHeiko Carstens 	return val;
23c8a91c28SHeiko Carstens }
24c8a91c28SHeiko Carstens 
__atomic_set(int * ptr,int val)25ca897bb1SHeiko Carstens static __always_inline void __atomic_set(int *ptr, int val)
26c8a91c28SHeiko Carstens {
27c8a91c28SHeiko Carstens 	if (__builtin_constant_p(val) && val >= S16_MIN && val <= S16_MAX) {
28c8a91c28SHeiko Carstens 		asm volatile(
29c8a91c28SHeiko Carstens 			"	mvhi	%[ptr],%[val]"
30c8a91c28SHeiko Carstens 			: [ptr] "=Q" (*ptr) : [val] "K" (val));
31c8a91c28SHeiko Carstens 	} else {
32c8a91c28SHeiko Carstens 		asm volatile(
33c8a91c28SHeiko Carstens 			"	st	%[val],%[ptr]"
34*3e5ee323SHeiko Carstens 			: [ptr] "=R" (*ptr) : [val] "d" (val));
35c8a91c28SHeiko Carstens 	}
36c8a91c28SHeiko Carstens }
37c8a91c28SHeiko Carstens 
__atomic64_read(const long * ptr)38c8a91c28SHeiko Carstens static __always_inline long __atomic64_read(const long *ptr)
39c8a91c28SHeiko Carstens {
40c8a91c28SHeiko Carstens 	long val;
41c8a91c28SHeiko Carstens 
42*3e5ee323SHeiko Carstens 	asm volatile(
43c8a91c28SHeiko Carstens 		"	lg	%[val],%[ptr]"
44c8a91c28SHeiko Carstens 		: [val] "=d" (val) : [ptr] "RT" (*ptr));
45126b30c3SMartin Schwidefsky 	return val;
46126b30c3SMartin Schwidefsky }
47126b30c3SMartin Schwidefsky 
__atomic64_set(long * ptr,long val)48126b30c3SMartin Schwidefsky static __always_inline void __atomic64_set(long *ptr, long val)
49126b30c3SMartin Schwidefsky {
50126b30c3SMartin Schwidefsky 	if (__builtin_constant_p(val) && val >= S16_MIN && val <= S16_MAX) {
51126b30c3SMartin Schwidefsky 		asm volatile(
52126b30c3SMartin Schwidefsky 			"	mvghi	%[ptr],%[val]"
53126b30c3SMartin Schwidefsky 			: [ptr] "=Q" (*ptr) : [val] "K" (val));
54126b30c3SMartin Schwidefsky 	} else {
55*3e5ee323SHeiko Carstens 		asm volatile(
56126b30c3SMartin Schwidefsky 			"	stg	%[val],%[ptr]"
57126b30c3SMartin Schwidefsky 			: [ptr] "=RT" (*ptr) : [val] "d" (val));
58126b30c3SMartin Schwidefsky 	}
59126b30c3SMartin Schwidefsky }
60126b30c3SMartin Schwidefsky 
61126b30c3SMartin Schwidefsky #ifdef MARCH_HAS_Z196_FEATURES
62126b30c3SMartin Schwidefsky 
63126b30c3SMartin Schwidefsky #define __ATOMIC_OP(op_name, op_type, op_string, op_barrier)		\
64126b30c3SMartin Schwidefsky static __always_inline op_type op_name(op_type val, op_type *ptr)	\
65126b30c3SMartin Schwidefsky {									\
66126b30c3SMartin Schwidefsky 	op_type old;							\
67126b30c3SMartin Schwidefsky 									\
68126b30c3SMartin Schwidefsky 	asm volatile(							\
69126b30c3SMartin Schwidefsky 		op_string "	%[old],%[val],%[ptr]"			\
70126b30c3SMartin Schwidefsky 		op_barrier						\
71126b30c3SMartin Schwidefsky 		: [old] "=d" (old), [ptr] "+QS" (*ptr)			\
72126b30c3SMartin Schwidefsky 		: [val] "d" (val) : "cc", "memory");			\
73126b30c3SMartin Schwidefsky 	return old;							\
74126b30c3SMartin Schwidefsky }									\
75126b30c3SMartin Schwidefsky 
76126b30c3SMartin Schwidefsky #define __ATOMIC_OPS(op_name, op_type, op_string)			\
77eb3b7b84SMartin Schwidefsky 	__ATOMIC_OP(op_name, op_type, op_string, "")			\
78b4fd5a0aSHeiko Carstens 	__ATOMIC_OP(op_name##_barrier, op_type, op_string, "\nbcr 14,0")
79eb3b7b84SMartin Schwidefsky 
80eb3b7b84SMartin Schwidefsky __ATOMIC_OPS(__atomic_add, int, "laa")
81eb3b7b84SMartin Schwidefsky __ATOMIC_OPS(__atomic_and, int, "lan")
82eb3b7b84SMartin Schwidefsky __ATOMIC_OPS(__atomic_or,  int, "lao")
83*3e5ee323SHeiko Carstens __ATOMIC_OPS(__atomic_xor, int, "lax")
84126b30c3SMartin Schwidefsky 
85126b30c3SMartin Schwidefsky __ATOMIC_OPS(__atomic64_add, long, "laag")
86eb3b7b84SMartin Schwidefsky __ATOMIC_OPS(__atomic64_and, long, "lang")
87eb3b7b84SMartin Schwidefsky __ATOMIC_OPS(__atomic64_or,  long, "laog")
88eb3b7b84SMartin Schwidefsky __ATOMIC_OPS(__atomic64_xor, long, "laxg")
89eb3b7b84SMartin Schwidefsky 
90eb3b7b84SMartin Schwidefsky #undef __ATOMIC_OPS
91eb3b7b84SMartin Schwidefsky #undef __ATOMIC_OP
92eb3b7b84SMartin Schwidefsky 
93eb3b7b84SMartin Schwidefsky #define __ATOMIC_CONST_OP(op_name, op_type, op_string, op_barrier)	\
94eb3b7b84SMartin Schwidefsky static __always_inline void op_name(op_type val, op_type *ptr)		\
95126b30c3SMartin Schwidefsky {									\
96126b30c3SMartin Schwidefsky 	asm volatile(							\
97126b30c3SMartin Schwidefsky 		op_string "	%[ptr],%[val]"				\
98126b30c3SMartin Schwidefsky 		op_barrier						\
99126b30c3SMartin Schwidefsky 		: [ptr] "+QS" (*ptr) : [val] "i" (val) : "cc", "memory");\
100126b30c3SMartin Schwidefsky }
101126b30c3SMartin Schwidefsky 
102126b30c3SMartin Schwidefsky #define __ATOMIC_CONST_OPS(op_name, op_type, op_string)			\
103126b30c3SMartin Schwidefsky 	__ATOMIC_CONST_OP(op_name, op_type, op_string, "")		\
104126b30c3SMartin Schwidefsky 	__ATOMIC_CONST_OP(op_name##_barrier, op_type, op_string, "\nbcr 14,0")
105126b30c3SMartin Schwidefsky 
106126b30c3SMartin Schwidefsky __ATOMIC_CONST_OPS(__atomic_add_const, int, "asi")
107126b30c3SMartin Schwidefsky __ATOMIC_CONST_OPS(__atomic64_add_const, long, "agsi")
108126b30c3SMartin Schwidefsky 
109126b30c3SMartin Schwidefsky #undef __ATOMIC_CONST_OPS
110126b30c3SMartin Schwidefsky #undef __ATOMIC_CONST_OP
111126b30c3SMartin Schwidefsky 
112126b30c3SMartin Schwidefsky #else /* MARCH_HAS_Z196_FEATURES */
113126b30c3SMartin Schwidefsky 
114126b30c3SMartin Schwidefsky #define __ATOMIC_OP(op_name, op_string)					\
115126b30c3SMartin Schwidefsky static __always_inline int op_name(int val, int *ptr)			\
116126b30c3SMartin Schwidefsky {									\
117126b30c3SMartin Schwidefsky 	int old, new;							\
118126b30c3SMartin Schwidefsky 									\
119126b30c3SMartin Schwidefsky 	asm volatile(							\
120126b30c3SMartin Schwidefsky 		"0:	lr	%[new],%[old]\n"			\
121126b30c3SMartin Schwidefsky 		op_string "	%[new],%[val]\n"			\
122126b30c3SMartin Schwidefsky 		"	cs	%[old],%[new],%[ptr]\n"			\
123126b30c3SMartin Schwidefsky 		"	jl	0b"					\
124126b30c3SMartin Schwidefsky 		: [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
125126b30c3SMartin Schwidefsky 		: [val] "d" (val), "0" (*ptr) : "cc", "memory");	\
126126b30c3SMartin Schwidefsky 	return old;							\
127126b30c3SMartin Schwidefsky }
128126b30c3SMartin Schwidefsky 
129126b30c3SMartin Schwidefsky #define __ATOMIC_OPS(op_name, op_string)				\
130126b30c3SMartin Schwidefsky 	__ATOMIC_OP(op_name, op_string)					\
131126b30c3SMartin Schwidefsky 	__ATOMIC_OP(op_name##_barrier, op_string)
132126b30c3SMartin Schwidefsky 
133126b30c3SMartin Schwidefsky __ATOMIC_OPS(__atomic_add, "ar")
134*3e5ee323SHeiko Carstens __ATOMIC_OPS(__atomic_and, "nr")
135126b30c3SMartin Schwidefsky __ATOMIC_OPS(__atomic_or,  "or")
136126b30c3SMartin Schwidefsky __ATOMIC_OPS(__atomic_xor, "xr")
137126b30c3SMartin Schwidefsky 
138126b30c3SMartin Schwidefsky #undef __ATOMIC_OPS
139126b30c3SMartin Schwidefsky 
140126b30c3SMartin Schwidefsky #define __ATOMIC64_OP(op_name, op_string)				\
141126b30c3SMartin Schwidefsky static __always_inline long op_name(long val, long *ptr)		\
142126b30c3SMartin Schwidefsky {									\
143126b30c3SMartin Schwidefsky 	long old, new;							\
144126b30c3SMartin Schwidefsky 									\
145126b30c3SMartin Schwidefsky 	asm volatile(							\
146126b30c3SMartin Schwidefsky 		"0:	lgr	%[new],%[old]\n"			\
147126b30c3SMartin Schwidefsky 		op_string "	%[new],%[val]\n"			\
148126b30c3SMartin Schwidefsky 		"	csg	%[old],%[new],%[ptr]\n"			\
149126b30c3SMartin Schwidefsky 		"	jl	0b"					\
150eb3b7b84SMartin Schwidefsky 		: [old] "=d" (old), [new] "=&d" (new), [ptr] "+QS" (*ptr)\
151eb3b7b84SMartin Schwidefsky 		: [val] "d" (val), "0" (*ptr) : "cc", "memory");	\
152eb3b7b84SMartin Schwidefsky 	return old;							\
153eb3b7b84SMartin Schwidefsky }
154eb3b7b84SMartin Schwidefsky 
155126b30c3SMartin Schwidefsky #define __ATOMIC64_OPS(op_name, op_string)				\
156126b30c3SMartin Schwidefsky 	__ATOMIC64_OP(op_name, op_string)				\
157126b30c3SMartin Schwidefsky 	__ATOMIC64_OP(op_name##_barrier, op_string)
158126b30c3SMartin Schwidefsky 
159b23eb636SHeiko Carstens __ATOMIC64_OPS(__atomic64_add, "agr")
160b23eb636SHeiko Carstens __ATOMIC64_OPS(__atomic64_and, "ngr")
161b23eb636SHeiko Carstens __ATOMIC64_OPS(__atomic64_or,  "ogr")
162b23eb636SHeiko Carstens __ATOMIC64_OPS(__atomic64_xor, "xgr")
163b23eb636SHeiko Carstens 
164b23eb636SHeiko Carstens #undef __ATOMIC64_OPS
16502c503ffSMartin Schwidefsky 
16602c503ffSMartin Schwidefsky #define __atomic_add_const(val, ptr)		((void)__atomic_add(val, ptr))
167b23eb636SHeiko Carstens #define __atomic_add_const_barrier(val, ptr)	((void)__atomic_add(val, ptr))
16802c503ffSMartin Schwidefsky #define __atomic64_add_const(val, ptr)		((void)__atomic64_add(val, ptr))
169b23eb636SHeiko Carstens #define __atomic64_add_const_barrier(val, ptr)	((void)__atomic64_add(val, ptr))
170b23eb636SHeiko Carstens 
171b23eb636SHeiko Carstens #endif /* MARCH_HAS_Z196_FEATURES */
172b23eb636SHeiko Carstens 
173b23eb636SHeiko Carstens #if defined(MARCH_HAS_Z196_FEATURES) && defined(__HAVE_ASM_FLAG_OUTPUTS__)
174b23eb636SHeiko Carstens 
175b23eb636SHeiko Carstens #define __ATOMIC_TEST_OP(op_name, op_type, op_string, op_barrier)	\
176b23eb636SHeiko Carstens static __always_inline bool op_name(op_type val, op_type *ptr)		\
177126b30c3SMartin Schwidefsky {									\
178126b30c3SMartin Schwidefsky 	op_type tmp;							\
179126b30c3SMartin Schwidefsky 	int cc;								\
180126b30c3SMartin Schwidefsky 									\
181b23eb636SHeiko Carstens 	asm volatile(							\
182b23eb636SHeiko Carstens 		op_string "	%[tmp],%[val],%[ptr]"			\
183*3e5ee323SHeiko Carstens 		op_barrier						\
184b23eb636SHeiko Carstens 		: "=@cc" (cc), [tmp] "=d" (tmp), [ptr] "+QS" (*ptr)	\
185b23eb636SHeiko Carstens 		: [val] "d" (val)					\
186b23eb636SHeiko Carstens 		: "memory");						\
18702c503ffSMartin Schwidefsky 	return (cc == 0) || (cc == 2);					\
18802c503ffSMartin Schwidefsky }									\
189b23eb636SHeiko Carstens 
19002c503ffSMartin Schwidefsky #define __ATOMIC_TEST_OPS(op_name, op_type, op_string)			\
191b23eb636SHeiko Carstens 	__ATOMIC_TEST_OP(op_name, op_type, op_string, "")		\
192b23eb636SHeiko Carstens 	__ATOMIC_TEST_OP(op_name##_barrier, op_type, op_string, "\nbcr 14,0")
193b23eb636SHeiko Carstens 
194b23eb636SHeiko Carstens __ATOMIC_TEST_OPS(__atomic_add_and_test, int, "laal")
195*3e5ee323SHeiko Carstens __ATOMIC_TEST_OPS(__atomic64_add_and_test, long, "laalg")
196b23eb636SHeiko Carstens 
197b23eb636SHeiko Carstens #undef __ATOMIC_TEST_OPS
198b23eb636SHeiko Carstens #undef __ATOMIC_TEST_OP
199126b30c3SMartin Schwidefsky 
200126b30c3SMartin Schwidefsky #define __ATOMIC_CONST_TEST_OP(op_name, op_type, op_string, op_barrier)	\
201126b30c3SMartin Schwidefsky static __always_inline bool op_name(op_type val, op_type *ptr)		\
202 {									\
203 	int cc;								\
204 									\
205 	asm volatile(							\
206 		op_string "	%[ptr],%[val]"				\
207 		op_barrier						\
208 		: "=@cc" (cc), [ptr] "+QS" (*ptr)			\
209 		: [val] "i" (val)					\
210 		: "memory");						\
211 	return (cc == 0) || (cc == 2);					\
212 }
213 
214 #define __ATOMIC_CONST_TEST_OPS(op_name, op_type, op_string)		\
215 	__ATOMIC_CONST_TEST_OP(op_name, op_type, op_string, "")		\
216 	__ATOMIC_CONST_TEST_OP(op_name##_barrier, op_type, op_string, "\nbcr 14,0")
217 
218 __ATOMIC_CONST_TEST_OPS(__atomic_add_const_and_test, int, "alsi")
219 __ATOMIC_CONST_TEST_OPS(__atomic64_add_const_and_test, long, "algsi")
220 
221 #undef __ATOMIC_CONST_TEST_OPS
222 #undef __ATOMIC_CONST_TEST_OP
223 
224 #else /* defined(MARCH_HAS_Z196_FEATURES) && defined(__HAVE_ASM_FLAG_OUTPUTS__) */
225 
226 #define __ATOMIC_TEST_OP(op_name, op_func, op_type)			\
227 static __always_inline bool op_name(op_type val, op_type *ptr)		\
228 {									\
229 	return op_func(val, ptr) == -val;				\
230 }
231 
232 __ATOMIC_TEST_OP(__atomic_add_and_test,			__atomic_add,		int)
233 __ATOMIC_TEST_OP(__atomic_add_and_test_barrier,		__atomic_add_barrier,	int)
234 __ATOMIC_TEST_OP(__atomic_add_const_and_test,		__atomic_add,		int)
235 __ATOMIC_TEST_OP(__atomic_add_const_and_test_barrier,	__atomic_add_barrier,	int)
236 __ATOMIC_TEST_OP(__atomic64_add_and_test,		__atomic64_add,		long)
237 __ATOMIC_TEST_OP(__atomic64_add_and_test_barrier,	__atomic64_add_barrier, long)
238 __ATOMIC_TEST_OP(__atomic64_add_const_and_test,		__atomic64_add,		long)
239 __ATOMIC_TEST_OP(__atomic64_add_const_and_test_barrier,	__atomic64_add_barrier,	long)
240 
241 #undef __ATOMIC_TEST_OP
242 
243 #endif /* defined(MARCH_HAS_Z196_FEATURES) && defined(__HAVE_ASM_FLAG_OUTPUTS__) */
244 
245 #endif /* __ARCH_S390_ATOMIC_OPS__  */
246