arch: Move smp_mb__{before,after}_atomic_{inc,dec}.h into asm/atomic.h

Move the barriers functions that depend on the atomic implementation
into the atomic implementation.

Reviewed-by: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Signed-off-by: Peter Zijlstra <peterz@infradead.org>
Acked-by: Vineet Gupta <vgupta@synopsys.com> [for arch/arc bits]
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Link: http://lkml.kernel.org/r/20131213150640.786183683@infradead.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
Peter Zijlstra 2013-11-08 12:01:59 +01:00 коммит произвёл Ingo Molnar
Родитель 2e4f5382d1
Коммит 1de7da377b
4 изменённых файлов: 10 добавлений и 10 удалений

Просмотреть файл

@ -190,6 +190,11 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
#endif /* !CONFIG_ARC_HAS_LLSC */ #endif /* !CONFIG_ARC_HAS_LLSC */
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
/** /**
* __atomic_add_unless - add unless the number is a given value * __atomic_add_unless - add unless the number is a given value
* @v: pointer of type atomic_t * @v: pointer of type atomic_t

Просмотреть файл

@ -30,11 +30,6 @@
#define smp_wmb() barrier() #define smp_wmb() barrier()
#endif #endif
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
#define smp_read_barrier_depends() do { } while (0) #define smp_read_barrier_depends() do { } while (0)
#endif #endif

Просмотреть файл

@ -160,8 +160,12 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
#define atomic_sub_and_test(i, v) (atomic_sub_return(i, (v)) == 0) #define atomic_sub_and_test(i, v) (atomic_sub_return(i, (v)) == 0)
#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0) #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
#define atomic_inc_return(v) (atomic_add_return(1, v)) #define atomic_inc_return(v) (atomic_add_return(1, v))
#define atomic_dec_return(v) (atomic_sub_return(1, v)) #define atomic_dec_return(v) (atomic_sub_return(1, v))
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
#endif #endif

Просмотреть файл

@ -29,10 +29,6 @@
#define smp_read_barrier_depends() barrier() #define smp_read_barrier_depends() barrier()
#define smp_wmb() barrier() #define smp_wmb() barrier()
#define smp_mb() barrier() #define smp_mb() barrier()
#define smp_mb__before_atomic_dec() barrier()
#define smp_mb__after_atomic_dec() barrier()
#define smp_mb__before_atomic_inc() barrier()
#define smp_mb__after_atomic_inc() barrier()
/* Set a value and use a memory barrier. Used by the scheduler somewhere. */ /* Set a value and use a memory barrier. Used by the scheduler somewhere. */
#define set_mb(var, value) \ #define set_mb(var, value) \