From: John Hodge Date: Sun, 28 Aug 2011 04:06:24 +0000 (+0800) Subject: Kernel/arm7 - Fixed SPINLOCK to use the swap opcode X-Git-Tag: rel0.11~114 X-Git-Url: https://git.ucc.asn.au/?a=commitdiff_plain;h=7ec592a682de215659959788d20b49f0225c56e2;hp=263e8355dbbcee8d38db03753811c0a8246d04e9;p=tpg%2Facess2.git Kernel/arm7 - Fixed SPINLOCK to use the swap opcode --- diff --git a/Kernel/arch/arm7/include/lock.h b/Kernel/arch/arm7/include/lock.h index e270145c..dbcac936 100644 --- a/Kernel/arch/arm7/include/lock.h +++ b/Kernel/arch/arm7/include/lock.h @@ -28,12 +28,16 @@ static inline int SHORTLOCK(struct sShortSpinlock *Lock) { #if 0 while( __sync_lock_test_and_set( &Lock->Lock, 1 ) == 1 ); - #endif - #if 1 + #elif 0 while( Lock->Lock ) ; Lock->Lock = 1; - #endif - #if 0 + #elif 1 + int v = 1; + while( v ) + { + __asm__ __volatile__ ("swp [%0], %1" : "=r" (v) : "r" (&lock)); + } + #elif 0 // Shamelessly copied from linux (/arch/arm/include/asm/spinlock.h) until I can fix stuff Uint tmp; __asm__ __volatile__ (