1 #ifndef __ARCH_H8300_ATOMIC__
2 #define __ARCH_H8300_ATOMIC__
3 
4 #include <linux/types.h>
5 
6 /*
7  * Atomic operations that C can't guarantee us.  Useful for
8  * resource counting etc..
9  */
10 
11 #define ATOMIC_INIT(i)	{ (i) }
12 
13 #define atomic_read(v)		(*(volatile int *)&(v)->counter)
14 #define atomic_set(v, i)	(((v)->counter) = i)
15 
16 #include <asm/system.h>
17 #include <linux/kernel.h>
18 
atomic_add_return(int i,atomic_t * v)19 static __inline__ int atomic_add_return(int i, atomic_t *v)
20 {
21 	unsigned long flags;
22 	int ret;
23 	local_irq_save(flags);
24 	ret = v->counter += i;
25 	local_irq_restore(flags);
26 	return ret;
27 }
28 
29 #define atomic_add(i, v) atomic_add_return(i, v)
30 #define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)
31 
atomic_sub_return(int i,atomic_t * v)32 static __inline__ int atomic_sub_return(int i, atomic_t *v)
33 {
34 	unsigned long flags;
35 	int ret;
36 	local_irq_save(flags);
37 	ret = v->counter -= i;
38 	local_irq_restore(flags);
39 	return ret;
40 }
41 
42 #define atomic_sub(i, v) atomic_sub_return(i, v)
43 #define atomic_sub_and_test(i,v) (atomic_sub_return(i, v) == 0)
44 
atomic_inc_return(atomic_t * v)45 static __inline__ int atomic_inc_return(atomic_t *v)
46 {
47 	unsigned long flags;
48 	int ret;
49 	local_irq_save(flags);
50 	v->counter++;
51 	ret = v->counter;
52 	local_irq_restore(flags);
53 	return ret;
54 }
55 
56 #define atomic_inc(v) atomic_inc_return(v)
57 
58 /*
59  * atomic_inc_and_test - increment and test
60  * @v: pointer of type atomic_t
61  *
62  * Atomically increments @v by 1
63  * and returns true if the result is zero, or false for all
64  * other cases.
65  */
66 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
67 
atomic_dec_return(atomic_t * v)68 static __inline__ int atomic_dec_return(atomic_t *v)
69 {
70 	unsigned long flags;
71 	int ret;
72 	local_irq_save(flags);
73 	--v->counter;
74 	ret = v->counter;
75 	local_irq_restore(flags);
76 	return ret;
77 }
78 
79 #define atomic_dec(v) atomic_dec_return(v)
80 
atomic_dec_and_test(atomic_t * v)81 static __inline__ int atomic_dec_and_test(atomic_t *v)
82 {
83 	unsigned long flags;
84 	int ret;
85 	local_irq_save(flags);
86 	--v->counter;
87 	ret = v->counter;
88 	local_irq_restore(flags);
89 	return ret == 0;
90 }
91 
atomic_cmpxchg(atomic_t * v,int old,int new)92 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
93 {
94 	int ret;
95 	unsigned long flags;
96 
97 	local_irq_save(flags);
98 	ret = v->counter;
99 	if (likely(ret == old))
100 		v->counter = new;
101 	local_irq_restore(flags);
102 	return ret;
103 }
104 
105 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
106 
__atomic_add_unless(atomic_t * v,int a,int u)107 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
108 {
109 	int ret;
110 	unsigned long flags;
111 
112 	local_irq_save(flags);
113 	ret = v->counter;
114 	if (ret != u)
115 		v->counter += a;
116 	local_irq_restore(flags);
117 	return ret;
118 }
119 
atomic_clear_mask(unsigned long mask,unsigned long * v)120 static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v)
121 {
122 	__asm__ __volatile__("stc ccr,r1l\n\t"
123 	                     "orc #0x80,ccr\n\t"
124 	                     "mov.l %0,er0\n\t"
125 	                     "and.l %1,er0\n\t"
126 	                     "mov.l er0,%0\n\t"
127 	                     "ldc r1l,ccr"
128                              : "=m" (*v) : "g" (~(mask)) :"er0","er1");
129 }
130 
atomic_set_mask(unsigned long mask,unsigned long * v)131 static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
132 {
133 	__asm__ __volatile__("stc ccr,r1l\n\t"
134 	                     "orc #0x80,ccr\n\t"
135 	                     "mov.l %0,er0\n\t"
136 	                     "or.l %1,er0\n\t"
137 	                     "mov.l er0,%0\n\t"
138 	                     "ldc r1l,ccr"
139                              : "=m" (*v) : "g" (mask) :"er0","er1");
140 }
141 
142 /* Atomic operations are already serializing */
143 #define smp_mb__before_atomic_dec()    barrier()
144 #define smp_mb__after_atomic_dec() barrier()
145 #define smp_mb__before_atomic_inc()    barrier()
146 #define smp_mb__after_atomic_inc() barrier()
147 
148 #endif /* __ARCH_H8300_ATOMIC __ */
149