1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_ATOMIC64_32_H
3 #define _ASM_X86_ATOMIC64_32_H
4
5 #include <linux/compiler.h>
6 #include <linux/types.h>
7 //#include <asm/cmpxchg.h>
8
9 /* An 64bit atomic type */
10
11 typedef struct {
12 s64 __aligned(8) counter;
13 } atomic64_t;
14
15 #define ATOMIC64_INIT(val) { (val) }
16
17 /*
18 * Read an atomic64_t non-atomically.
19 *
20 * This is intended to be used in cases where a subsequent atomic operation
21 * will handle the torn value, and can be used to prime the first iteration
22 * of unconditional try_cmpxchg() loops, e.g.:
23 *
24 * s64 val = arch_atomic64_read_nonatomic(v);
25 * do { } while (!arch_atomic64_try_cmpxchg(v, &val, val OP i);
26 *
27 * This is NOT safe to use where the value is not always checked by a
28 * subsequent atomic operation, such as in conditional try_cmpxchg() loops
29 * that can break before the atomic operation, e.g.:
30 *
31 * s64 val = arch_atomic64_read_nonatomic(v);
32 * do {
33 * if (condition(val))
34 * break;
35 * } while (!arch_atomic64_try_cmpxchg(v, &val, val OP i);
36 */
arch_atomic64_read_nonatomic(const atomic64_t * v)37 static __always_inline s64 arch_atomic64_read_nonatomic(const atomic64_t *v)
38 {
39 /* See comment in arch_atomic_read(). */
40 return __READ_ONCE(v->counter);
41 }
42
43 #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
44 #ifndef ATOMIC64_EXPORT
45 #define ATOMIC64_DECL_ONE __ATOMIC64_DECL
46 #else
47 #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
48 ATOMIC64_EXPORT(atomic64_##sym)
49 #endif
50
51 #ifdef CONFIG_X86_CX8
52 #define __alternative_atomic64(f, g, out, in, clobbers...) \
53 asm volatile("call %c[func]" \
54 : ALT_OUTPUT_SP(out) \
55 : [func] "i" (atomic64_##g##_cx8) \
56 COMMA(in) \
57 : clobbers)
58
59 #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
60 #else
61 #define __alternative_atomic64(f, g, out, in, clobbers...) \
62 alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
63 X86_FEATURE_CX8, ASM_OUTPUT(out), \
64 ASM_INPUT(in), clobbers)
65
66 #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
67 ATOMIC64_DECL_ONE(sym##_386)
68
69 ATOMIC64_DECL_ONE(add_386);
70 ATOMIC64_DECL_ONE(sub_386);
71 ATOMIC64_DECL_ONE(inc_386);
72 ATOMIC64_DECL_ONE(dec_386);
73 #endif
74
75 #define alternative_atomic64(f, out, in, clobbers...) \
76 __alternative_atomic64(f, f, ASM_OUTPUT(out), ASM_INPUT(in), clobbers)
77
78 ATOMIC64_DECL(read);
79 ATOMIC64_DECL(set);
80 ATOMIC64_DECL(xchg);
81 ATOMIC64_DECL(add_return);
82 ATOMIC64_DECL(sub_return);
83 ATOMIC64_DECL(inc_return);
84 ATOMIC64_DECL(dec_return);
85 ATOMIC64_DECL(dec_if_positive);
86 ATOMIC64_DECL(inc_not_zero);
87 ATOMIC64_DECL(add_unless);
88
89 #undef ATOMIC64_DECL
90 #undef ATOMIC64_DECL_ONE
91 #undef __ATOMIC64_DECL
92 #undef ATOMIC64_EXPORT
93
arch_atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)94 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
95 {
96 return arch_cmpxchg64(&v->counter, old, new);
97 }
98 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
99
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)100 static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
101 {
102 return arch_try_cmpxchg64(&v->counter, old, new);
103 }
104 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
105
arch_atomic64_xchg(atomic64_t * v,s64 n)106 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
107 {
108 s64 o;
109 unsigned high = (unsigned)(n >> 32);
110 unsigned low = (unsigned)n;
111 alternative_atomic64(xchg,
112 "=&A" (o),
113 ASM_INPUT("S" (v), "b" (low), "c" (high)),
114 "memory");
115 return o;
116 }
117 #define arch_atomic64_xchg arch_atomic64_xchg
118
arch_atomic64_set(atomic64_t * v,s64 i)119 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
120 {
121 unsigned high = (unsigned)(i >> 32);
122 unsigned low = (unsigned)i;
123 alternative_atomic64(set,
124 /* no output */,
125 ASM_INPUT("S" (v), "b" (low), "c" (high)),
126 "eax", "edx", "memory");
127 }
128
arch_atomic64_read(const atomic64_t * v)129 static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
130 {
131 s64 r;
132 alternative_atomic64(read, "=&A" (r), "c" (v), "memory");
133 return r;
134 }
135
arch_atomic64_add_return(s64 i,atomic64_t * v)136 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
137 {
138 alternative_atomic64(add_return,
139 ASM_OUTPUT("+A" (i), "+c" (v)),
140 /* no input */,
141 "memory");
142 return i;
143 }
144 #define arch_atomic64_add_return arch_atomic64_add_return
145
arch_atomic64_sub_return(s64 i,atomic64_t * v)146 static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
147 {
148 alternative_atomic64(sub_return,
149 ASM_OUTPUT("+A" (i), "+c" (v)),
150 /* no input */,
151 "memory");
152 return i;
153 }
154 #define arch_atomic64_sub_return arch_atomic64_sub_return
155
arch_atomic64_inc_return(atomic64_t * v)156 static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v)
157 {
158 s64 a;
159 alternative_atomic64(inc_return,
160 "=&A" (a),
161 "S" (v),
162 "memory", "ecx");
163 return a;
164 }
165 #define arch_atomic64_inc_return arch_atomic64_inc_return
166
arch_atomic64_dec_return(atomic64_t * v)167 static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v)
168 {
169 s64 a;
170 alternative_atomic64(dec_return,
171 "=&A" (a),
172 "S" (v),
173 "memory", "ecx");
174 return a;
175 }
176 #define arch_atomic64_dec_return arch_atomic64_dec_return
177
arch_atomic64_add(s64 i,atomic64_t * v)178 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
179 {
180 __alternative_atomic64(add, add_return,
181 ASM_OUTPUT("+A" (i), "+c" (v)),
182 /* no input */,
183 "memory");
184 }
185
arch_atomic64_sub(s64 i,atomic64_t * v)186 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v)
187 {
188 __alternative_atomic64(sub, sub_return,
189 ASM_OUTPUT("+A" (i), "+c" (v)),
190 /* no input */,
191 "memory");
192 }
193
arch_atomic64_inc(atomic64_t * v)194 static __always_inline void arch_atomic64_inc(atomic64_t *v)
195 {
196 __alternative_atomic64(inc, inc_return,
197 /* no output */,
198 "S" (v),
199 "memory", "eax", "ecx", "edx");
200 }
201 #define arch_atomic64_inc arch_atomic64_inc
202
arch_atomic64_dec(atomic64_t * v)203 static __always_inline void arch_atomic64_dec(atomic64_t *v)
204 {
205 __alternative_atomic64(dec, dec_return,
206 /* no output */,
207 "S" (v),
208 "memory", "eax", "ecx", "edx");
209 }
210 #define arch_atomic64_dec arch_atomic64_dec
211
arch_atomic64_add_unless(atomic64_t * v,s64 a,s64 u)212 static __always_inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
213 {
214 unsigned low = (unsigned)u;
215 unsigned high = (unsigned)(u >> 32);
216 alternative_atomic64(add_unless,
217 ASM_OUTPUT("+A" (a), "+c" (low), "+D" (high)),
218 "S" (v),
219 "memory");
220 return (int)a;
221 }
222 #define arch_atomic64_add_unless arch_atomic64_add_unless
223
arch_atomic64_inc_not_zero(atomic64_t * v)224 static __always_inline int arch_atomic64_inc_not_zero(atomic64_t *v)
225 {
226 int r;
227 alternative_atomic64(inc_not_zero,
228 "=&a" (r),
229 "S" (v),
230 "ecx", "edx", "memory");
231 return r;
232 }
233 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
234
arch_atomic64_dec_if_positive(atomic64_t * v)235 static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
236 {
237 s64 r;
238 alternative_atomic64(dec_if_positive,
239 "=&A" (r),
240 "S" (v),
241 "ecx", "memory");
242 return r;
243 }
244 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
245
246 #undef alternative_atomic64
247 #undef __alternative_atomic64
248
arch_atomic64_and(s64 i,atomic64_t * v)249 static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v)
250 {
251 s64 val = arch_atomic64_read_nonatomic(v);
252
253 do { } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
254 }
255
arch_atomic64_fetch_and(s64 i,atomic64_t * v)256 static __always_inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
257 {
258 s64 val = arch_atomic64_read_nonatomic(v);
259
260 do { } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
261
262 return val;
263 }
264 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
265
arch_atomic64_or(s64 i,atomic64_t * v)266 static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v)
267 {
268 s64 val = arch_atomic64_read_nonatomic(v);
269
270 do { } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
271 }
272
arch_atomic64_fetch_or(s64 i,atomic64_t * v)273 static __always_inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
274 {
275 s64 val = arch_atomic64_read_nonatomic(v);
276
277 do { } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
278
279 return val;
280 }
281 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
282
arch_atomic64_xor(s64 i,atomic64_t * v)283 static __always_inline void arch_atomic64_xor(s64 i, atomic64_t *v)
284 {
285 s64 val = arch_atomic64_read_nonatomic(v);
286
287 do { } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
288 }
289
arch_atomic64_fetch_xor(s64 i,atomic64_t * v)290 static __always_inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
291 {
292 s64 val = arch_atomic64_read_nonatomic(v);
293
294 do { } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
295
296 return val;
297 }
298 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
299
arch_atomic64_fetch_add(s64 i,atomic64_t * v)300 static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
301 {
302 s64 val = arch_atomic64_read_nonatomic(v);
303
304 do { } while (!arch_atomic64_try_cmpxchg(v, &val, val + i));
305
306 return val;
307 }
308 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
309
310 #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v))
311
312 #endif /* _ASM_X86_ATOMIC64_32_H */
313