Kernel/arm7 - Fixed SPINLOCK to use the swap opcode
[tpg/acess2.git] / Kernel / arch / arm7 / include / lock.h
index 9b289d1..dbcac93 100644 (file)
@@ -26,6 +26,18 @@ static inline int CPU_HAS_LOCK(struct sShortSpinlock *Lock)
 
 static inline int SHORTLOCK(struct sShortSpinlock *Lock)
 {
+       #if 0
+       while( __sync_lock_test_and_set( &Lock->Lock, 1 ) == 1 );
+       #elif 0
+       while( Lock->Lock )     ;
+       Lock->Lock = 1;
+       #elif 1
+        int    v = 1;
+       while( v )
+       {
+               __asm__ __volatile__ ("swp [%0], %1" : "=r" (v) : "r" (&lock));
+       }
+       #elif 0
        // Shamelessly copied from linux (/arch/arm/include/asm/spinlock.h) until I can fix stuff
        Uint    tmp;
        __asm__ __volatile__ (
@@ -38,6 +50,7 @@ static inline int SHORTLOCK(struct sShortSpinlock *Lock)
                : "r" (&Lock->Lock), "r" (1)
                : "cc"  // Condition codes clobbered
                );
+       #endif
        return 1;
 }
 

UCC git Repository :: git.ucc.asn.au