Lines Matching full:s64

2566 static __always_inline s64
2582 static __always_inline s64
2588 s64 ret; in raw_atomic64_read_acquire()
2604 * @i: s64 value to assign
2613 raw_atomic64_set(atomic64_t *v, s64 i) in raw_atomic64_set()
2621 * @i: s64 value to assign
2630 raw_atomic64_set_release(atomic64_t *v, s64 i) in raw_atomic64_set_release()
2646 * @i: s64 value to add
2656 raw_atomic64_add(s64 i, atomic64_t *v) in raw_atomic64_add()
2663 * @i: s64 value to add
2672 static __always_inline s64
2673 raw_atomic64_add_return(s64 i, atomic64_t *v) in raw_atomic64_add_return()
2678 s64 ret; in raw_atomic64_add_return()
2690 * @i: s64 value to add
2699 static __always_inline s64
2700 raw_atomic64_add_return_acquire(s64 i, atomic64_t *v) in raw_atomic64_add_return_acquire()
2705 s64 ret = arch_atomic64_add_return_relaxed(i, v); in raw_atomic64_add_return_acquire()
2717 * @i: s64 value to add
2726 static __always_inline s64
2727 raw_atomic64_add_return_release(s64 i, atomic64_t *v) in raw_atomic64_add_return_release()
2743 * @i: s64 value to add
2752 static __always_inline s64
2753 raw_atomic64_add_return_relaxed(s64 i, atomic64_t *v) in raw_atomic64_add_return_relaxed()
2766 * @i: s64 value to add
2775 static __always_inline s64
2776 raw_atomic64_fetch_add(s64 i, atomic64_t *v) in raw_atomic64_fetch_add()
2781 s64 ret; in raw_atomic64_fetch_add()
2793 * @i: s64 value to add
2802 static __always_inline s64
2803 raw_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_add_acquire()
2808 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); in raw_atomic64_fetch_add_acquire()
2820 * @i: s64 value to add
2829 static __always_inline s64
2830 raw_atomic64_fetch_add_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_add_release()
2846 * @i: s64 value to add
2855 static __always_inline s64
2856 raw_atomic64_fetch_add_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_add_relaxed()
2869 * @i: s64 value to subtract
2879 raw_atomic64_sub(s64 i, atomic64_t *v) in raw_atomic64_sub()
2886 * @i: s64 value to subtract
2895 static __always_inline s64
2896 raw_atomic64_sub_return(s64 i, atomic64_t *v) in raw_atomic64_sub_return()
2901 s64 ret; in raw_atomic64_sub_return()
2913 * @i: s64 value to subtract
2922 static __always_inline s64
2923 raw_atomic64_sub_return_acquire(s64 i, atomic64_t *v) in raw_atomic64_sub_return_acquire()
2928 s64 ret = arch_atomic64_sub_return_relaxed(i, v); in raw_atomic64_sub_return_acquire()
2940 * @i: s64 value to subtract
2949 static __always_inline s64
2950 raw_atomic64_sub_return_release(s64 i, atomic64_t *v) in raw_atomic64_sub_return_release()
2966 * @i: s64 value to subtract
2975 static __always_inline s64
2976 raw_atomic64_sub_return_relaxed(s64 i, atomic64_t *v) in raw_atomic64_sub_return_relaxed()
2989 * @i: s64 value to subtract
2998 static __always_inline s64
2999 raw_atomic64_fetch_sub(s64 i, atomic64_t *v) in raw_atomic64_fetch_sub()
3004 s64 ret; in raw_atomic64_fetch_sub()
3016 * @i: s64 value to subtract
3025 static __always_inline s64
3026 raw_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_sub_acquire()
3031 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); in raw_atomic64_fetch_sub_acquire()
3043 * @i: s64 value to subtract
3052 static __always_inline s64
3053 raw_atomic64_fetch_sub_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_sub_release()
3069 * @i: s64 value to subtract
3078 static __always_inline s64
3079 raw_atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_sub_relaxed()
3120 static __always_inline s64
3126 s64 ret; in raw_atomic64_inc_return()
3146 static __always_inline s64
3152 s64 ret = arch_atomic64_inc_return_relaxed(v); in raw_atomic64_inc_return_acquire()
3172 static __always_inline s64
3197 static __always_inline s64
3219 static __always_inline s64
3225 s64 ret; in raw_atomic64_fetch_inc()
3245 static __always_inline s64
3251 s64 ret = arch_atomic64_fetch_inc_relaxed(v); in raw_atomic64_fetch_inc_acquire()
3271 static __always_inline s64
3296 static __always_inline s64
3338 static __always_inline s64
3344 s64 ret; in raw_atomic64_dec_return()
3364 static __always_inline s64
3370 s64 ret = arch_atomic64_dec_return_relaxed(v); in raw_atomic64_dec_return_acquire()
3390 static __always_inline s64
3415 static __always_inline s64
3437 static __always_inline s64
3443 s64 ret; in raw_atomic64_fetch_dec()
3463 static __always_inline s64
3469 s64 ret = arch_atomic64_fetch_dec_relaxed(v); in raw_atomic64_fetch_dec_acquire()
3489 static __always_inline s64
3514 static __always_inline s64
3528 * @i: s64 value
3538 raw_atomic64_and(s64 i, atomic64_t *v) in raw_atomic64_and()
3545 * @i: s64 value
3554 static __always_inline s64
3555 raw_atomic64_fetch_and(s64 i, atomic64_t *v) in raw_atomic64_fetch_and()
3560 s64 ret; in raw_atomic64_fetch_and()
3572 * @i: s64 value
3581 static __always_inline s64
3582 raw_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_and_acquire()
3587 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); in raw_atomic64_fetch_and_acquire()
3599 * @i: s64 value
3608 static __always_inline s64
3609 raw_atomic64_fetch_and_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_and_release()
3625 * @i: s64 value
3634 static __always_inline s64
3635 raw_atomic64_fetch_and_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_and_relaxed()
3648 * @i: s64 value
3658 raw_atomic64_andnot(s64 i, atomic64_t *v) in raw_atomic64_andnot()
3669 * @i: s64 value
3678 static __always_inline s64
3679 raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) in raw_atomic64_fetch_andnot()
3684 s64 ret; in raw_atomic64_fetch_andnot()
3696 * @i: s64 value
3705 static __always_inline s64
3706 raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_andnot_acquire()
3711 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); in raw_atomic64_fetch_andnot_acquire()
3723 * @i: s64 value
3732 static __always_inline s64
3733 raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_andnot_release()
3749 * @i: s64 value
3758 static __always_inline s64
3759 raw_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_andnot_relaxed()
3772 * @i: s64 value
3782 raw_atomic64_or(s64 i, atomic64_t *v) in raw_atomic64_or()
3789 * @i: s64 value
3798 static __always_inline s64
3799 raw_atomic64_fetch_or(s64 i, atomic64_t *v) in raw_atomic64_fetch_or()
3804 s64 ret; in raw_atomic64_fetch_or()
3816 * @i: s64 value
3825 static __always_inline s64
3826 raw_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_or_acquire()
3831 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); in raw_atomic64_fetch_or_acquire()
3843 * @i: s64 value
3852 static __always_inline s64
3853 raw_atomic64_fetch_or_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_or_release()
3869 * @i: s64 value
3878 static __always_inline s64
3879 raw_atomic64_fetch_or_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_or_relaxed()
3892 * @i: s64 value
3902 raw_atomic64_xor(s64 i, atomic64_t *v) in raw_atomic64_xor()
3909 * @i: s64 value
3918 static __always_inline s64
3919 raw_atomic64_fetch_xor(s64 i, atomic64_t *v) in raw_atomic64_fetch_xor()
3924 s64 ret; in raw_atomic64_fetch_xor()
3936 * @i: s64 value
3945 static __always_inline s64
3946 raw_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_xor_acquire()
3951 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); in raw_atomic64_fetch_xor_acquire()
3963 * @i: s64 value
3972 static __always_inline s64
3973 raw_atomic64_fetch_xor_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_xor_release()
3989 * @i: s64 value
3998 static __always_inline s64
3999 raw_atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_xor_relaxed()
4013 * @new: s64 value to assign
4021 static __always_inline s64
4022 raw_atomic64_xchg(atomic64_t *v, s64 new) in raw_atomic64_xchg()
4027 s64 ret; in raw_atomic64_xchg()
4040 * @new: s64 value to assign
4048 static __always_inline s64
4049 raw_atomic64_xchg_acquire(atomic64_t *v, s64 new) in raw_atomic64_xchg_acquire()
4054 s64 ret = arch_atomic64_xchg_relaxed(v, new); in raw_atomic64_xchg_acquire()
4067 * @new: s64 value to assign
4075 static __always_inline s64
4076 raw_atomic64_xchg_release(atomic64_t *v, s64 new) in raw_atomic64_xchg_release()
4093 * @new: s64 value to assign
4101 static __always_inline s64
4102 raw_atomic64_xchg_relaxed(atomic64_t *v, s64 new) in raw_atomic64_xchg_relaxed()
4116 * @old: s64 value to compare with
4117 * @new: s64 value to assign
4125 static __always_inline s64
4126 raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in raw_atomic64_cmpxchg()
4131 s64 ret; in raw_atomic64_cmpxchg()
4144 * @old: s64 value to compare with
4145 * @new: s64 value to assign
4153 static __always_inline s64
4154 raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) in raw_atomic64_cmpxchg_acquire()
4159 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); in raw_atomic64_cmpxchg_acquire()
4172 * @old: s64 value to compare with
4173 * @new: s64 value to assign
4181 static __always_inline s64
4182 raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) in raw_atomic64_cmpxchg_release()
4199 * @old: s64 value to compare with
4200 * @new: s64 value to assign
4208 static __always_inline s64
4209 raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) in raw_atomic64_cmpxchg_relaxed()
4223 * @old: pointer to s64 value to compare with
4224 * @new: s64 value to assign
4234 raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in raw_atomic64_try_cmpxchg()
4245 s64 r, o = *old; in raw_atomic64_try_cmpxchg()
4256 * @old: pointer to s64 value to compare with
4257 * @new: s64 value to assign
4267 raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in raw_atomic64_try_cmpxchg_acquire()
4278 s64 r, o = *old; in raw_atomic64_try_cmpxchg_acquire()
4289 * @old: pointer to s64 value to compare with
4290 * @new: s64 value to assign
4300 raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in raw_atomic64_try_cmpxchg_release()
4310 s64 r, o = *old; in raw_atomic64_try_cmpxchg_release()
4321 * @old: pointer to s64 value to compare with
4322 * @new: s64 value to assign
4332 raw_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) in raw_atomic64_try_cmpxchg_relaxed()
4339 s64 r, o = *old; in raw_atomic64_try_cmpxchg_relaxed()
4349 * @i: s64 value to add
4359 raw_atomic64_sub_and_test(s64 i, atomic64_t *v) in raw_atomic64_sub_and_test()
4410 * @i: s64 value to add
4420 raw_atomic64_add_negative(s64 i, atomic64_t *v) in raw_atomic64_add_negative()
4437 * @i: s64 value to add
4447 raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) in raw_atomic64_add_negative_acquire()
4464 * @i: s64 value to add
4474 raw_atomic64_add_negative_release(s64 i, atomic64_t *v) in raw_atomic64_add_negative_release()
4490 * @i: s64 value to add
4500 raw_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) in raw_atomic64_add_negative_relaxed()
4514 * @a: s64 value to add
4515 * @u: s64 value to compare with
4523 static __always_inline s64
4524 raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in raw_atomic64_fetch_add_unless()
4529 s64 c = raw_atomic64_read(v); in raw_atomic64_fetch_add_unless()
4543 * @a: s64 value to add
4544 * @u: s64 value to compare with
4553 raw_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) in raw_atomic64_add_unless()
4598 s64 c = raw_atomic64_read(v); in raw_atomic64_inc_unless_negative()
4625 s64 c = raw_atomic64_read(v); in raw_atomic64_dec_unless_positive()
4646 static __always_inline s64
4652 s64 dec, c = raw_atomic64_read(v); in raw_atomic64_dec_if_positive()