From 7ec592a682de215659959788d20b49f0225c56e2 Mon Sep 17 00:00:00 2001 From: John Hodge Date: Sun, 28 Aug 2011 12:06:24 +0800 Subject: [PATCH] Kernel/arm7 - Fixed SPINLOCK to use the swap opcode --- Kernel/arch/arm7/include/lock.h | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/Kernel/arch/arm7/include/lock.h b/Kernel/arch/arm7/include/lock.h index e270145c..dbcac936 100644 --- a/Kernel/arch/arm7/include/lock.h +++ b/Kernel/arch/arm7/include/lock.h @@ -28,12 +28,16 @@ static inline int SHORTLOCK(struct sShortSpinlock *Lock) { #if 0 while( __sync_lock_test_and_set( &Lock->Lock, 1 ) == 1 ); - #endif - #if 1 + #elif 0 while( Lock->Lock ) ; Lock->Lock = 1; - #endif - #if 0 + #elif 1 + int v = 1; + while( v ) + { + __asm__ __volatile__ ("swp [%0], %1" : "=r" (v) : "r" (&lock)); + } + #elif 0 // Shamelessly copied from linux (/arch/arm/include/asm/spinlock.h) until I can fix stuff Uint tmp; __asm__ __volatile__ ( -- 2.20.1