Lines Matching +full:counter +full:- +full:0
1 /* SPDX-License-Identifier: GPL-2.0-only */
25 #define arch_atomic_read(v) READ_ONCE((v)->counter)
26 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
42 prefetchw(&v->counter); \
44 "1: ldrex %0, [%3]\n" \
45 " " #asm_op " %0, %0, %4\n" \
46 " strex %1, %0, [%3]\n" \
47 " teq %1, #0\n" \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
60 prefetchw(&v->counter); \
63 "1: ldrex %0, [%3]\n" \
64 " " #asm_op " %0, %0, %4\n" \
65 " strex %1, %0, [%3]\n" \
66 " teq %1, #0\n" \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
81 prefetchw(&v->counter); \
84 "1: ldrex %0, [%4]\n" \
85 " " #asm_op " %1, %0, %5\n" \
87 " teq %2, #0\n" \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
90 : "r" (&v->counter), "Ir" (i) \
111 prefetchw(&ptr->counter); in arch_atomic_cmpxchg_relaxed()
116 "mov %0, #0\n" in arch_atomic_cmpxchg_relaxed()
118 "strexeq %0, %5, [%3]\n" in arch_atomic_cmpxchg_relaxed()
119 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in arch_atomic_cmpxchg_relaxed()
120 : "r" (&ptr->counter), "Ir" (old), "r" (new) in arch_atomic_cmpxchg_relaxed()
134 prefetchw(&v->counter); in arch_atomic_fetch_add_unless()
137 "1: ldrex %0, [%4]\n" in arch_atomic_fetch_add_unless()
138 " teq %0, %5\n" in arch_atomic_fetch_add_unless()
140 " add %1, %0, %6\n" in arch_atomic_fetch_add_unless()
142 " teq %2, #0\n" in arch_atomic_fetch_add_unless()
145 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic_fetch_add_unless()
146 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic_fetch_add_unless()
159 #error SMP not supported on pre-ARMv6 CPUs
168 v->counter c_op i; \
179 v->counter c_op i; \
180 val = v->counter; \
193 val = v->counter; \
194 v->counter c_op i; \
216 ret = v->counter; in arch_atomic_cmpxchg()
218 v->counter = new; in arch_atomic_cmpxchg()
233 ATOMIC_OPS(sub, -=, sub)
254 s64 counter; member
265 " ldrd %0, %H0, [%1]" in arch_atomic64_read()
267 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
277 : "=Qo" (v->counter) in arch_atomic64_set()
278 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
287 " ldrexd %0, %H0, [%1]" in arch_atomic64_read()
289 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
299 prefetchw(&v->counter); in arch_atomic64_set()
301 "1: ldrexd %0, %H0, [%2]\n" in arch_atomic64_set()
302 " strexd %0, %3, %H3, [%2]\n" in arch_atomic64_set()
303 " teq %0, #0\n" in arch_atomic64_set()
305 : "=&r" (tmp), "=Qo" (v->counter) in arch_atomic64_set()
306 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
317 prefetchw(&v->counter); \
319 "1: ldrexd %0, %H0, [%3]\n" \
322 " strexd %1, %0, %H0, [%3]\n" \
323 " teq %1, #0\n" \
325 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
326 : "r" (&v->counter), "r" (i) \
337 prefetchw(&v->counter); \
340 "1: ldrexd %0, %H0, [%3]\n" \
343 " strexd %1, %0, %H0, [%3]\n" \
344 " teq %1, #0\n" \
346 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
347 : "r" (&v->counter), "r" (i) \
360 prefetchw(&v->counter); \
363 "1: ldrexd %0, %H0, [%4]\n" \
367 " teq %2, #0\n" \
369 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
370 : "r" (&v->counter), "r" (i) \
416 prefetchw(&ptr->counter); in ATOMIC64_OPS()
421 "mov %0, #0\n" in ATOMIC64_OPS()
424 "strexdeq %0, %5, %H5, [%3]" in ATOMIC64_OPS()
425 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in ATOMIC64_OPS()
426 : "r" (&ptr->counter), "r" (old), "r" (new) in ATOMIC64_OPS()
439 prefetchw(&ptr->counter); in arch_atomic64_xchg_relaxed()
442 "1: ldrexd %0, %H0, [%3]\n" in arch_atomic64_xchg_relaxed()
444 " teq %1, #0\n" in arch_atomic64_xchg_relaxed()
446 : "=&r" (result), "=&r" (tmp), "+Qo" (ptr->counter) in arch_atomic64_xchg_relaxed()
447 : "r" (&ptr->counter), "r" (new) in arch_atomic64_xchg_relaxed()
460 prefetchw(&v->counter); in arch_atomic64_dec_if_positive()
463 "1: ldrexd %0, %H0, [%3]\n" in arch_atomic64_dec_if_positive()
465 " sbc %R0, %R0, #0\n" in arch_atomic64_dec_if_positive()
466 " teq %R0, #0\n" in arch_atomic64_dec_if_positive()
468 " strexd %1, %0, %H0, [%3]\n" in arch_atomic64_dec_if_positive()
469 " teq %1, #0\n" in arch_atomic64_dec_if_positive()
472 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_dec_if_positive()
473 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
488 prefetchw(&v->counter); in arch_atomic64_fetch_add_unless()
491 "1: ldrexd %0, %H0, [%4]\n" in arch_atomic64_fetch_add_unless()
492 " teq %0, %5\n" in arch_atomic64_fetch_add_unless()
498 " teq %2, #0\n" in arch_atomic64_fetch_add_unless()
501 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_fetch_add_unless()
502 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic64_fetch_add_unless()