Lines Matching defs:v

13 static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
15 return __READ_ONCE((v)->counter);
18 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
20 __WRITE_ONCE(v->counter, i);
23 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
26 : "=m" (v->counter)
27 : "er" (i), "m" (v->counter) : "memory");
30 static __always_inline void arch_atomic64_sub(s64 i, atomic64_t *v)
33 : "=m" (v->counter)
34 : "er" (i), "m" (v->counter) : "memory");
37 static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
39 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
43 static __always_inline void arch_atomic64_inc(atomic64_t *v)
46 : "=m" (v->counter)
47 : "m" (v->counter) : "memory");
51 static __always_inline void arch_atomic64_dec(atomic64_t *v)
54 : "=m" (v->counter)
55 : "m" (v->counter) : "memory");
59 static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v)
61 return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
65 static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v)
67 return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
71 static __always_inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v)
73 return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
77 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
79 return i + xadd(&v->counter, i);
83 #define arch_atomic64_sub_return(i, v) arch_atomic64_add_return(-(i), v)
85 static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
87 return xadd(&v->counter, i);
91 #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), v)
93 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
95 return arch_cmpxchg(&v->counter, old, new);
99 static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
101 return arch_try_cmpxchg(&v->counter, old, new);
105 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
107 return arch_xchg(&v->counter, new);
111 static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v)
114 : "+m" (v->counter)
119 static __always_inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
121 s64 val = arch_atomic64_read(v);
124 } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
129 static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v)
132 : "+m" (v->counter)
137 static __always_inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
139 s64 val = arch_atomic64_read(v);
142 } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
147 static __always_inline void arch_atomic64_xor(s64 i, atomic64_t *v)
150 : "+m" (v->counter)
155 static __always_inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
157 s64 val = arch_atomic64_read(v);
160 } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));