1 #ifndef __ARCH_H8300_ATOMIC__
2 #define __ARCH_H8300_ATOMIC__
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
9 typedef struct { int counter
; } atomic_t
;
10 #define ATOMIC_INIT(i) { (i) }
12 #define atomic_read(v) ((v)->counter)
13 #define atomic_set(v, i) (((v)->counter) = i)
15 #include <asm/system.h>
16 #include <linux/kernel.h>
18 static __inline__
int atomic_add_return(int i
, atomic_t
*v
)
21 local_irq_save(flags
);
22 ret
= v
->counter
+= i
;
23 local_irq_restore(flags
);
27 #define atomic_add(i, v) atomic_add_return(i, v)
28 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
30 static __inline__
int atomic_sub_return(int i
, atomic_t
*v
)
33 local_irq_save(flags
);
34 ret
= v
->counter
-= i
;
35 local_irq_restore(flags
);
39 #define atomic_sub(i, v) atomic_sub_return(i, v)
41 static __inline__
int atomic_inc_return(atomic_t
*v
)
44 local_irq_save(flags
);
47 local_irq_restore(flags
);
51 #define atomic_inc(v) atomic_inc_return(v)
54 * atomic_inc_and_test - increment and test
55 * @v: pointer of type atomic_t
57 * Atomically increments @v by 1
58 * and returns true if the result is zero, or false for all
61 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
63 static __inline__
int atomic_dec_return(atomic_t
*v
)
66 local_irq_save(flags
);
69 local_irq_restore(flags
);
73 #define atomic_dec(v) atomic_dec_return(v)
75 static __inline__
int atomic_dec_and_test(atomic_t
*v
)
78 local_irq_save(flags
);
81 local_irq_restore(flags
);
85 static inline int atomic_cmpxchg(atomic_t
*v
, int old
, int new)
90 local_irq_save(flags
);
92 if (likely(ret
== old
))
94 local_irq_restore(flags
);
98 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
100 static inline int atomic_add_unless(atomic_t
*v
, int a
, int u
)
105 local_irq_save(flags
);
109 local_irq_restore(flags
);
112 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
114 static __inline__
void atomic_clear_mask(unsigned long mask
, unsigned long *v
)
116 __asm__
__volatile__("stc ccr,r1l\n\t"
122 : "=m" (*v
) : "g" (~(mask
)) :"er0","er1");
125 static __inline__
void atomic_set_mask(unsigned long mask
, unsigned long *v
)
127 __asm__
__volatile__("stc ccr,r1l\n\t"
133 : "=m" (*v
) : "g" (mask
) :"er0","er1");
136 /* Atomic operations are already serializing */
137 #define smp_mb__before_atomic_dec() barrier()
138 #define smp_mb__after_atomic_dec() barrier()
139 #define smp_mb__before_atomic_inc() barrier()
140 #define smp_mb__after_atomic_inc() barrier()
142 #include <asm-generic/atomic.h>
143 #endif /* __ARCH_H8300_ATOMIC __ */
This page took 0.034544 seconds and 5 git commands to generate.