summaryrefslogtreecommitdiffstats
path: root/include/asm-i386/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-i386/atomic.h')
-rw-r--r--include/asm-i386/atomic.h84
1 files changed, 84 insertions, 0 deletions
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h
index 94a7ea264..a50b747cd 100644
--- a/include/asm-i386/atomic.h
+++ b/include/asm-i386/atomic.h
@@ -23,9 +23,33 @@ typedef struct { volatile int counter; } atomic_t;
#define ATOMIC_INIT(i) { (i) }
+/**
+ * atomic_read - read atomic variable
+ * @v: pointer of type atomic_t
+ *
+ * Atomically reads the value of @v. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
#define atomic_read(v) ((v)->counter)
+
+/**
+ * atomic_set - set atomic variable
+ * @v: pointer of type atomic_t
+ * @i: required value
+ *
+ * Atomically sets the value of @v to @i. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
#define atomic_set(v,i) (((v)->counter) = (i))
+/**
+ * atomic_add - add integer to atomic variable
+ * @i: integer value to add
+ * @v: pointer of type atomic_t
+ *
+ * Atomically adds @i to @v. Note that the guaranteed useful range
+ * of an atomic_t is only 24 bits.
+ */
static __inline__ void atomic_add(int i, atomic_t *v)
{
__asm__ __volatile__(
@@ -34,6 +58,14 @@ static __inline__ void atomic_add(int i, atomic_t *v)
:"ir" (i), "m" (v->counter));
}
+/**
+ * atomic_sub - subtract the atomic variable
+ * @i: integer value to subtract
+ * @v: pointer of type atomic_t
+ *
+ * Atomically subtracts @i from @v. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
static __inline__ void atomic_sub(int i, atomic_t *v)
{
__asm__ __volatile__(
@@ -42,6 +74,16 @@ static __inline__ void atomic_sub(int i, atomic_t *v)
:"ir" (i), "m" (v->counter));
}
+/**
+ * atomic_sub_and_test - test variable then subtract
+ * @i: integer value to subtract
+ * @v: pointer of type atomic_t
+ *
+ * Atomically subtracts @i from @v and returns
+ * true if the result is zero, or false for all
+ * other cases. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
{
unsigned char c;
@@ -53,6 +95,13 @@ static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
return c;
}
+/**
+ * atomic_inc - increment atomic variable
+ * @v: pointer of type atomic_t
+ *
+ * Atomically increments @v by 1. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
static __inline__ void atomic_inc(atomic_t *v)
{
__asm__ __volatile__(
@@ -61,6 +110,13 @@ static __inline__ void atomic_inc(atomic_t *v)
:"m" (v->counter));
}
+/**
+ * atomic_dec - decrement the atomic variable
+ * @v: pointer of type atomic_t
+ *
+ * Atomically decrements @v by 1. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
static __inline__ void atomic_dec(atomic_t *v)
{
__asm__ __volatile__(
@@ -69,6 +125,15 @@ static __inline__ void atomic_dec(atomic_t *v)
:"m" (v->counter));
}
+/**
+ * atomic_dec_and_test - decrement by 1 and test
+ * @v: pointer of type atomic_t
+ *
+ * Atomically decrements @v by 1 and
+ * returns true if the result is 0, or false for all other
+ * cases. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
static __inline__ int atomic_dec_and_test(atomic_t *v)
{
unsigned char c;
@@ -80,6 +145,15 @@ static __inline__ int atomic_dec_and_test(atomic_t *v)
return c != 0;
}
+/**
+ * atomic_inc_and_test - increment by 1 and test
+ * @v: pointer of type atomic_t
+ *
+ * Atomically increments @v by 1
+ * and returns true if the result is zero, or false for all
+ * other cases. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
static __inline__ int atomic_inc_and_test(atomic_t *v)
{
unsigned char c;
@@ -91,6 +165,16 @@ static __inline__ int atomic_inc_and_test(atomic_t *v)
return c != 0;
}
+/**
+ * atomic_add_negative - add and test if negative
+ * @v: pointer of type atomic_t
+ * @i: integer value to add
+ *
+ * Atomically adds @i to @v and returns true
+ * if the result is negative, or false when
+ * result is greater than or equal to zero. Note that the guaranteed
+ * useful range of an atomic_t is only 24 bits.
+ */
static __inline__ int atomic_add_negative(int i, atomic_t *v)
{
unsigned char c;