summaryrefslogtreecommitdiff
path: root/include/asm-generic/atomic.h
diff options
context:
space:
mode:
authorMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>2007-05-08 00:34:19 -0700
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-05-08 11:15:19 -0700
commitbb2382c3e4395ab595278cc7b92ac3f2eaf23f66 (patch)
tree9a162bcdff0bc8a19b1ca26ac954a27b0178fb2c /include/asm-generic/atomic.h
parente96e69942312314c061eb2fdd947a7a1211d62f8 (diff)
atomic.h: complete atomic_long operations in asm-generic
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-generic/atomic.h')
-rw-r--r--include/asm-generic/atomic.h133
1 files changed, 133 insertions, 0 deletions
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h
index b7e4a0467cb1..5ae6dce1cba2 100644
--- a/include/asm-generic/atomic.h
+++ b/include/asm-generic/atomic.h
@@ -9,6 +9,7 @@
*/
#include <asm/types.h>
+#include <asm/system.h>
/*
* Suppport for atomic_long_t
@@ -66,6 +67,72 @@ static inline void atomic_long_sub(long i, atomic_long_t *l)
atomic64_sub(i, v);
}
+static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
+{
+ atomic64_t *v = (atomic64_t *)l;
+
+ return atomic64_sub_and_test(i, v);
+}
+
+static inline int atomic_long_dec_and_test(atomic_long_t *l)
+{
+ atomic64_t *v = (atomic64_t *)l;
+
+ return atomic64_dec_and_test(v);
+}
+
+static inline int atomic_long_inc_and_test(atomic_long_t *l)
+{
+ atomic64_t *v = (atomic64_t *)l;
+
+ return atomic64_inc_and_test(v);
+}
+
+static inline int atomic_long_add_negative(long i, atomic_long_t *l)
+{
+ atomic64_t *v = (atomic64_t *)l;
+
+ return atomic64_add_negative(i, v);
+}
+
+static inline long atomic_long_add_return(long i, atomic_long_t *l)
+{
+ atomic64_t *v = (atomic64_t *)l;
+
+ return (long)atomic64_add_return(i, v);
+}
+
+static inline long atomic_long_sub_return(long i, atomic_long_t *l)
+{
+ atomic64_t *v = (atomic64_t *)l;
+
+ return (long)atomic64_sub_return(i, v);
+}
+
+static inline long atomic_long_inc_return(atomic_long_t *l)
+{
+ atomic64_t *v = (atomic64_t *)l;
+
+ return (long)atomic64_inc_return(v);
+}
+
+static inline long atomic_long_dec_return(atomic_long_t *l)
+{
+ atomic64_t *v = (atomic64_t *)l;
+
+ return (long)atomic64_dec_return(v);
+}
+
+#define atomic_long_add_unless(l, a, u) \
+ atomic64_add_unless((atomic64_t *)(l), (a), (u))
+
+#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
+
+#define atomic_long_cmpxchg(l, old, new) \
+ (atomic_cmpxchg((atomic64_t *)(l), (old), (new)))
+#define atomic_long_xchg(v, new) \
+ (atomic_xchg((atomic64_t *)(l), (new)))
+
#else /* BITS_PER_LONG == 64 */
typedef atomic_t atomic_long_t;
@@ -113,6 +180,72 @@ static inline void atomic_long_sub(long i, atomic_long_t *l)
atomic_sub(i, v);
}
+static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
+{
+ atomic_t *v = (atomic_t *)l;
+
+ return atomic_sub_and_test(i, v);
+}
+
+static inline int atomic_long_dec_and_test(atomic_long_t *l)
+{
+ atomic_t *v = (atomic_t *)l;
+
+ return atomic_dec_and_test(v);
+}
+
+static inline int atomic_long_inc_and_test(atomic_long_t *l)
+{
+ atomic_t *v = (atomic_t *)l;
+
+ return atomic_inc_and_test(v);
+}
+
+static inline int atomic_long_add_negative(long i, atomic_long_t *l)
+{
+ atomic_t *v = (atomic_t *)l;
+
+ return atomic_add_negative(i, v);
+}
+
+static inline long atomic_long_add_return(long i, atomic_long_t *l)
+{
+ atomic_t *v = (atomic_t *)l;
+
+ return (long)atomic_add_return(i, v);
+}
+
+static inline long atomic_long_sub_return(long i, atomic_long_t *l)
+{
+ atomic_t *v = (atomic_t *)l;
+
+ return (long)atomic_sub_return(i, v);
+}
+
+static inline long atomic_long_inc_return(atomic_long_t *l)
+{
+ atomic_t *v = (atomic_t *)l;
+
+ return (long)atomic_inc_return(v);
+}
+
+static inline long atomic_long_dec_return(atomic_long_t *l)
+{
+ atomic_t *v = (atomic_t *)l;
+
+ return (long)atomic_dec_return(v);
+}
+
+#define atomic_long_add_unless(l, a, u) \
+ atomic_add_unless((atomic_t *)(l), (a), (u))
+
+#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
+
+#define atomic_long_cmpxchg(l, old, new) \
+ (atomic_cmpxchg((atomic_t *)(l), (old), (new)))
+#define atomic_long_xchg(v, new) \
+ (atomic_xchg((atomic_t *)(l), (new)))
+
#endif /* BITS_PER_LONG == 64 */
#endif /* _ASM_GENERIC_ATOMIC_H */