MIPS: New macro smp_mb__before_llsc.
[deliverable/linux.git] / arch / mips / include / asm / spinlock.h
index 21ef9efbde43f9e61cf726961e81e91c7252d6b5..5f16696eaa003af15d4b5b9e679f882374eac80b 100644 (file)
@@ -138,7 +138,7 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock)
 {
        int tmp;
 
-       smp_llsc_mb();
+       smp_mb__before_llsc();
 
        if (R10000_LLSC_WAR) {
                __asm__ __volatile__ (
@@ -305,7 +305,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
 {
        unsigned int tmp;
 
-       smp_llsc_mb();
+       smp_mb__before_llsc();
 
        if (R10000_LLSC_WAR) {
                __asm__ __volatile__(
This page took 0.026748 seconds and 5 git commands to generate.