Lines Matching full:s64

12 	s64 __aligned(8) counter;
64 static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n) in arch_atomic64_cmpxchg()
70 static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n) in arch_atomic64_xchg()
72 s64 o; in arch_atomic64_xchg()
82 static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set()
91 static __always_inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read()
93 s64 r; in arch_atomic64_read()
98 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) in arch_atomic64_add_return()
107 static __always_inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v) in arch_atomic64_sub_return()
116 static __always_inline s64 arch_atomic64_inc_return(atomic64_t *v) in arch_atomic64_inc_return()
118 s64 a; in arch_atomic64_inc_return()
125 static __always_inline s64 arch_atomic64_dec_return(atomic64_t *v) in arch_atomic64_dec_return()
127 s64 a; in arch_atomic64_dec_return()
134 static __always_inline s64 arch_atomic64_add(s64 i, atomic64_t *v) in arch_atomic64_add()
142 static __always_inline s64 arch_atomic64_sub(s64 i, atomic64_t *v) in arch_atomic64_sub()
164 static __always_inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_add_unless()
184 static __always_inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive()
186 s64 r; in arch_atomic64_dec_if_positive()
196 static __always_inline void arch_atomic64_and(s64 i, atomic64_t *v) in arch_atomic64_and()
198 s64 old, c = 0; in arch_atomic64_and()
204 static __always_inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v) in arch_atomic64_fetch_and()
206 s64 old, c = 0; in arch_atomic64_fetch_and()
215 static __always_inline void arch_atomic64_or(s64 i, atomic64_t *v) in arch_atomic64_or()
217 s64 old, c = 0; in arch_atomic64_or()
223 static __always_inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v) in arch_atomic64_fetch_or()
225 s64 old, c = 0; in arch_atomic64_fetch_or()
234 static __always_inline void arch_atomic64_xor(s64 i, atomic64_t *v) in arch_atomic64_xor()
236 s64 old, c = 0; in arch_atomic64_xor()
242 static __always_inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) in arch_atomic64_fetch_xor()
244 s64 old, c = 0; in arch_atomic64_fetch_xor()
253 static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) in arch_atomic64_fetch_add()
255 s64 old, c = 0; in arch_atomic64_fetch_add()