Lines Matching defs:v
18 static __always_inline int arch_atomic_read(const atomic_t *v)
20 return __atomic_read(&v->counter);
24 static __always_inline void arch_atomic_set(atomic_t *v, int i)
26 __atomic_set(&v->counter, i);
30 static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
32 return __atomic_add_barrier(i, &v->counter) + i;
36 static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
38 return __atomic_add_barrier(i, &v->counter);
42 static __always_inline void arch_atomic_add(int i, atomic_t *v)
44 __atomic_add(i, &v->counter);
48 static __always_inline void arch_atomic_inc(atomic_t *v)
50 __atomic_add_const(1, &v->counter);
54 static __always_inline void arch_atomic_dec(atomic_t *v)
56 __atomic_add_const(-1, &v->counter);
60 static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
62 return __atomic_add_and_test_barrier(-i, &v->counter);
66 static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
68 return __atomic_add_const_and_test_barrier(-1, &v->counter);
72 static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
74 return __atomic_add_const_and_test_barrier(1, &v->counter);
83 static __always_inline void arch_atomic_##op(int i, atomic_t *v) \
85 __atomic_##op(i, &v->counter); \
87 static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
89 return __atomic_##op##_barrier(i, &v->counter); \
105 static __always_inline int arch_atomic_xchg(atomic_t *v, int new)
107 return arch_xchg(&v->counter, new);
111 static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
113 return arch_cmpxchg(&v->counter, old, new);
117 static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
119 return arch_try_cmpxchg(&v->counter, old, new);
125 static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
127 return __atomic64_read((long *)&v->counter);
131 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
133 __atomic64_set((long *)&v->counter, i);
137 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
139 return __atomic64_add_barrier(i, (long *)&v->counter) + i;
143 static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
145 return __atomic64_add_barrier(i, (long *)&v->counter);
149 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
151 __atomic64_add(i, (long *)&v->counter);
155 static __always_inline void arch_atomic64_inc(atomic64_t *v)
157 __atomic64_add_const(1, (long *)&v->counter);
161 static __always_inline void arch_atomic64_dec(atomic64_t *v)
163 __atomic64_add_const(-1, (long *)&v->counter);
167 static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
169 return __atomic64_add_and_test_barrier(-i, (long *)&v->counter);
173 static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v)
175 return __atomic64_add_const_and_test_barrier(-1, (long *)&v->counter);
179 static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v)
181 return __atomic64_add_const_and_test_barrier(1, (long *)&v->counter);
185 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
187 return arch_xchg(&v->counter, new);
191 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
193 return arch_cmpxchg(&v->counter, old, new);
197 static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
199 return arch_try_cmpxchg(&v->counter, old, new);
204 static __always_inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
206 __atomic64_##op(i, (long *)&v->counter); \
208 static __always_inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
210 return __atomic64_##op##_barrier(i, (long *)&v->counter); \