2005-09-22 23:20:04 +04:00
|
|
|
#ifndef _ASM_POWERPC_ATOMIC_H_
|
|
|
|
#define _ASM_POWERPC_ATOMIC_H_
|
|
|
|
|
2005-04-17 02:20:36 +04:00
|
|
|
/*
|
|
|
|
* PowerPC atomic operations
|
|
|
|
*/
|
|
|
|
|
2009-01-07 01:40:39 +03:00
|
|
|
#include <linux/types.h>
|
2005-04-17 02:20:36 +04:00
|
|
|
|
|
|
|
#ifdef __KERNEL__
|
2006-02-20 12:41:40 +03:00
|
|
|
#include <linux/compiler.h>
|
2005-09-22 23:20:04 +04:00
|
|
|
#include <asm/synch.h>
|
[PATCH] powerpc: Consolidate asm compatibility macros
This patch consolidates macros used to generate assembly for
compatibility across different CPUs or configs. A new header,
asm-powerpc/asm-compat.h contains the main compatibility macros. It
uses some preprocessor magic to make the macros suitable both for use
in .S files, and in inline asm in .c files. Headers (bitops.h,
uaccess.h, atomic.h, bug.h) which had their own such compatibility
macros are changed to use asm-compat.h.
ppc_asm.h is now for use in .S files *only*, and a #error enforces
that. As such, we're a lot more careless about namespace pollution
here than in asm-compat.h.
While we're at it, this patch adds a call to the PPC405_ERR77 macro in
futex.h which should have had it already, but didn't.
Built and booted on pSeries, Maple and iSeries (ARCH=powerpc). Built
for 32-bit powermac (ARCH=powerpc) and Walnut (ARCH=ppc).
Signed-off-by: David Gibson <dwg@au1.ibm.com>
Signed-off-by: Paul Mackerras <paulus@samba.org>
2005-11-10 04:56:55 +03:00
|
|
|
#include <asm/asm-compat.h>
|
2007-05-08 11:34:38 +04:00
|
|
|
#include <asm/system.h>
|
2005-04-17 02:20:36 +04:00
|
|
|
|
2005-09-22 23:20:04 +04:00
|
|
|
#define ATOMIC_INIT(i) { (i) }
|
2005-04-17 02:20:36 +04:00
|
|
|
|
2007-08-11 04:15:30 +04:00
|
|
|
static __inline__ int atomic_read(const atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ void atomic_set(atomic_t *v, int i)
|
|
|
|
{
|
|
|
|
__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
|
|
|
|
}
|
2005-04-17 02:20:36 +04:00
|
|
|
|
|
|
|
static __inline__ void atomic_add(int a, atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: lwarx %0,0,%3 # atomic_add\n\
|
|
|
|
add %0,%2,%0\n"
|
|
|
|
PPC405_ERR77(0,%3)
|
|
|
|
" stwcx. %0,0,%3 \n\
|
|
|
|
bne- 1b"
|
2006-07-09 02:00:28 +04:00
|
|
|
: "=&r" (t), "+m" (v->counter)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
2005-04-17 02:20:36 +04:00
|
|
|
: "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ int atomic_add_return(int a, atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
"1: lwarx %0,0,%2 # atomic_add_return\n\
|
|
|
|
add %0,%1,%0\n"
|
|
|
|
PPC405_ERR77(0,%2)
|
|
|
|
" stwcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
2005-09-22 23:20:04 +04:00
|
|
|
ISYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
|
|
|
|
|
|
|
|
static __inline__ void atomic_sub(int a, atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: lwarx %0,0,%3 # atomic_sub\n\
|
|
|
|
subf %0,%2,%0\n"
|
|
|
|
PPC405_ERR77(0,%3)
|
|
|
|
" stwcx. %0,0,%3 \n\
|
|
|
|
bne- 1b"
|
2006-07-09 02:00:28 +04:00
|
|
|
: "=&r" (t), "+m" (v->counter)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
2005-04-17 02:20:36 +04:00
|
|
|
: "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ int atomic_sub_return(int a, atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
"1: lwarx %0,0,%2 # atomic_sub_return\n\
|
|
|
|
subf %0,%1,%0\n"
|
|
|
|
PPC405_ERR77(0,%2)
|
|
|
|
" stwcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
2005-09-22 23:20:04 +04:00
|
|
|
ISYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ void atomic_inc(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: lwarx %0,0,%2 # atomic_inc\n\
|
|
|
|
addic %0,%0,1\n"
|
|
|
|
PPC405_ERR77(0,%2)
|
|
|
|
" stwcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
2006-07-09 02:00:28 +04:00
|
|
|
: "=&r" (t), "+m" (v->counter)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer");
|
2005-04-17 02:20:36 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ int atomic_inc_return(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
"1: lwarx %0,0,%1 # atomic_inc_return\n\
|
|
|
|
addic %0,%0,1\n"
|
|
|
|
PPC405_ERR77(0,%1)
|
|
|
|
" stwcx. %0,0,%1 \n\
|
|
|
|
bne- 1b"
|
2005-09-22 23:20:04 +04:00
|
|
|
ISYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer", "memory");
|
2005-04-17 02:20:36 +04:00
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* atomic_inc_and_test - increment and test
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
*
|
|
|
|
* Atomically increments @v by 1
|
|
|
|
* and returns true if the result is zero, or false for all
|
|
|
|
* other cases.
|
|
|
|
*/
|
|
|
|
#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
|
|
|
|
|
|
|
|
static __inline__ void atomic_dec(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: lwarx %0,0,%2 # atomic_dec\n\
|
|
|
|
addic %0,%0,-1\n"
|
|
|
|
PPC405_ERR77(0,%2)\
|
|
|
|
" stwcx. %0,0,%2\n\
|
|
|
|
bne- 1b"
|
2006-07-09 02:00:28 +04:00
|
|
|
: "=&r" (t), "+m" (v->counter)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer");
|
2005-04-17 02:20:36 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ int atomic_dec_return(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
"1: lwarx %0,0,%1 # atomic_dec_return\n\
|
|
|
|
addic %0,%0,-1\n"
|
|
|
|
PPC405_ERR77(0,%1)
|
|
|
|
" stwcx. %0,0,%1\n\
|
|
|
|
bne- 1b"
|
2005-09-22 23:20:04 +04:00
|
|
|
ISYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer", "memory");
|
2005-04-17 02:20:36 +04:00
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
2007-05-08 11:34:27 +04:00
|
|
|
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
2006-01-10 02:59:17 +03:00
|
|
|
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
|
2005-11-14 03:07:24 +03:00
|
|
|
|
2005-11-14 03:07:25 +03:00
|
|
|
/**
|
|
|
|
* atomic_add_unless - add unless the number is a given value
|
|
|
|
* @v: pointer of type atomic_t
|
|
|
|
* @a: the amount to add to v...
|
|
|
|
* @u: ...unless v is equal to u.
|
|
|
|
*
|
|
|
|
* Atomically adds @a to @v, so long as it was not @u.
|
|
|
|
* Returns non-zero if @v was not @u, and zero otherwise.
|
|
|
|
*/
|
2006-02-20 12:41:40 +03:00
|
|
|
static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__ (
|
|
|
|
LWSYNC_ON_SMP
|
|
|
|
"1: lwarx %0,0,%1 # atomic_add_unless\n\
|
|
|
|
cmpw 0,%0,%3 \n\
|
|
|
|
beq- 2f \n\
|
|
|
|
add %0,%2,%0 \n"
|
|
|
|
PPC405_ERR77(0,%2)
|
|
|
|
" stwcx. %0,0,%1 \n\
|
|
|
|
bne- 1b \n"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
" subf %0,%2,%0 \n\
|
|
|
|
2:"
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (&v->counter), "r" (a), "r" (u)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t != u;
|
|
|
|
}
|
|
|
|
|
2005-11-14 03:07:25 +03:00
|
|
|
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
|
|
|
|
|
2005-04-17 02:20:36 +04:00
|
|
|
#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
|
|
|
|
#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Atomically test *v and decrement if it is greater than 0.
|
2007-01-17 19:50:20 +03:00
|
|
|
* The function returns the old value of *v minus 1, even if
|
|
|
|
* the atomic variable, v, was not decremented.
|
2005-04-17 02:20:36 +04:00
|
|
|
*/
|
|
|
|
static __inline__ int atomic_dec_if_positive(atomic_t *v)
|
|
|
|
{
|
|
|
|
int t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
|
2007-01-17 19:50:20 +03:00
|
|
|
cmpwi %0,1\n\
|
|
|
|
addi %0,%0,-1\n\
|
2005-04-17 02:20:36 +04:00
|
|
|
blt- 2f\n"
|
|
|
|
PPC405_ERR77(0,%1)
|
|
|
|
" stwcx. %0,0,%1\n\
|
|
|
|
bne- 1b"
|
2005-09-22 23:20:04 +04:00
|
|
|
ISYNC_ON_SMP
|
2005-04-17 02:20:36 +04:00
|
|
|
"\n\
|
2007-01-17 19:50:20 +03:00
|
|
|
2:" : "=&b" (t)
|
2005-04-17 02:20:36 +04:00
|
|
|
: "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
2005-09-22 23:20:04 +04:00
|
|
|
#define smp_mb__before_atomic_dec() smp_mb()
|
|
|
|
#define smp_mb__after_atomic_dec() smp_mb()
|
|
|
|
#define smp_mb__before_atomic_inc() smp_mb()
|
|
|
|
#define smp_mb__after_atomic_inc() smp_mb()
|
2005-04-17 02:20:36 +04:00
|
|
|
|
2005-11-10 07:51:14 +03:00
|
|
|
#ifdef __powerpc64__
|
|
|
|
|
|
|
|
#define ATOMIC64_INIT(i) { (i) }
|
|
|
|
|
2007-08-11 04:15:30 +04:00
|
|
|
static __inline__ long atomic64_read(const atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ void atomic64_set(atomic64_t *v, long i)
|
|
|
|
{
|
|
|
|
__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
|
|
|
|
}
|
2005-11-10 07:51:14 +03:00
|
|
|
|
|
|
|
static __inline__ void atomic64_add(long a, atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: ldarx %0,0,%3 # atomic64_add\n\
|
|
|
|
add %0,%2,%0\n\
|
|
|
|
stdcx. %0,0,%3 \n\
|
|
|
|
bne- 1b"
|
2006-07-09 02:00:28 +04:00
|
|
|
: "=&r" (t), "+m" (v->counter)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
2005-11-10 07:51:14 +03:00
|
|
|
: "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ long atomic64_add_return(long a, atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-11-10 07:51:14 +03:00
|
|
|
"1: ldarx %0,0,%2 # atomic64_add_return\n\
|
|
|
|
add %0,%1,%0\n\
|
|
|
|
stdcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
|
|
|
|
|
|
|
|
static __inline__ void atomic64_sub(long a, atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: ldarx %0,0,%3 # atomic64_sub\n\
|
|
|
|
subf %0,%2,%0\n\
|
|
|
|
stdcx. %0,0,%3 \n\
|
|
|
|
bne- 1b"
|
2006-07-09 02:00:28 +04:00
|
|
|
: "=&r" (t), "+m" (v->counter)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
2005-11-10 07:51:14 +03:00
|
|
|
: "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-11-10 07:51:14 +03:00
|
|
|
"1: ldarx %0,0,%2 # atomic64_sub_return\n\
|
|
|
|
subf %0,%1,%0\n\
|
|
|
|
stdcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (a), "r" (&v->counter)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ void atomic64_inc(atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: ldarx %0,0,%2 # atomic64_inc\n\
|
|
|
|
addic %0,%0,1\n\
|
|
|
|
stdcx. %0,0,%2 \n\
|
|
|
|
bne- 1b"
|
2006-07-09 02:00:28 +04:00
|
|
|
: "=&r" (t), "+m" (v->counter)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer");
|
2005-11-10 07:51:14 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ long atomic64_inc_return(atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-11-10 07:51:14 +03:00
|
|
|
"1: ldarx %0,0,%1 # atomic64_inc_return\n\
|
|
|
|
addic %0,%0,1\n\
|
|
|
|
stdcx. %0,0,%1 \n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer", "memory");
|
2005-11-10 07:51:14 +03:00
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* atomic64_inc_and_test - increment and test
|
|
|
|
* @v: pointer of type atomic64_t
|
|
|
|
*
|
|
|
|
* Atomically increments @v by 1
|
|
|
|
* and returns true if the result is zero, or false for all
|
|
|
|
* other cases.
|
|
|
|
*/
|
|
|
|
#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
|
|
|
|
|
|
|
|
static __inline__ void atomic64_dec(atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
|
|
|
"1: ldarx %0,0,%2 # atomic64_dec\n\
|
|
|
|
addic %0,%0,-1\n\
|
|
|
|
stdcx. %0,0,%2\n\
|
|
|
|
bne- 1b"
|
2006-07-09 02:00:28 +04:00
|
|
|
: "=&r" (t), "+m" (v->counter)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer");
|
2005-11-10 07:51:14 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
static __inline__ long atomic64_dec_return(atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-11-10 07:51:14 +03:00
|
|
|
"1: ldarx %0,0,%1 # atomic64_dec_return\n\
|
|
|
|
addic %0,%0,-1\n\
|
|
|
|
stdcx. %0,0,%1\n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer", "memory");
|
2005-11-10 07:51:14 +03:00
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
|
|
|
|
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Atomically test *v and decrement if it is greater than 0.
|
|
|
|
* The function returns the old value of *v minus 1.
|
|
|
|
*/
|
|
|
|
static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__(
|
2006-01-13 07:37:17 +03:00
|
|
|
LWSYNC_ON_SMP
|
2005-11-10 07:51:14 +03:00
|
|
|
"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
|
|
|
|
addic. %0,%0,-1\n\
|
|
|
|
blt- 2f\n\
|
|
|
|
stdcx. %0,0,%1\n\
|
|
|
|
bne- 1b"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
"\n\
|
|
|
|
2:" : "=&r" (t)
|
|
|
|
: "r" (&v->counter)
|
2008-11-05 21:39:27 +03:00
|
|
|
: "cc", "xer", "memory");
|
2005-11-10 07:51:14 +03:00
|
|
|
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
2007-05-08 11:34:27 +04:00
|
|
|
#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
2007-01-25 19:15:52 +03:00
|
|
|
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
|
|
|
|
|
|
|
/**
|
|
|
|
* atomic64_add_unless - add unless the number is a given value
|
|
|
|
* @v: pointer of type atomic64_t
|
|
|
|
* @a: the amount to add to v...
|
|
|
|
* @u: ...unless v is equal to u.
|
|
|
|
*
|
|
|
|
* Atomically adds @a to @v, so long as it was not @u.
|
|
|
|
* Returns non-zero if @v was not @u, and zero otherwise.
|
|
|
|
*/
|
|
|
|
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
|
|
|
|
{
|
|
|
|
long t;
|
|
|
|
|
|
|
|
__asm__ __volatile__ (
|
|
|
|
LWSYNC_ON_SMP
|
|
|
|
"1: ldarx %0,0,%1 # atomic_add_unless\n\
|
|
|
|
cmpd 0,%0,%3 \n\
|
|
|
|
beq- 2f \n\
|
|
|
|
add %0,%2,%0 \n"
|
|
|
|
" stdcx. %0,0,%1 \n\
|
|
|
|
bne- 1b \n"
|
|
|
|
ISYNC_ON_SMP
|
|
|
|
" subf %0,%2,%0 \n\
|
|
|
|
2:"
|
|
|
|
: "=&r" (t)
|
|
|
|
: "r" (&v->counter), "r" (a), "r" (u)
|
|
|
|
: "cc", "memory");
|
|
|
|
|
|
|
|
return t != u;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
|
|
|
|
|
2005-11-10 07:51:14 +03:00
|
|
|
#endif /* __powerpc64__ */
|
|
|
|
|
2006-01-06 11:11:20 +03:00
|
|
|
#include <asm-generic/atomic.h>
|
2005-04-17 02:20:36 +04:00
|
|
|
#endif /* __KERNEL__ */
|
2005-09-22 23:20:04 +04:00
|
|
|
#endif /* _ASM_POWERPC_ATOMIC_H_ */
|