X-Git-Url: https://git.ucc.asn.au/?a=blobdiff_plain;f=Kernel%2Farch%2Farm7%2Finclude%2Flock.h;h=627726b568f7f009125e6d697616016f2bd6ecd6;hb=17aac974ab83a3521f2b49b8de33ae05a00fbe07;hp=9b289d137fd859f1f34155bc37d8278581dc68bc;hpb=e4ccf568b07857a36382433ed73ea38874843b24;p=tpg%2Facess2.git diff --git a/Kernel/arch/arm7/include/lock.h b/Kernel/arch/arm7/include/lock.h index 9b289d13..627726b5 100644 --- a/Kernel/arch/arm7/include/lock.h +++ b/Kernel/arch/arm7/include/lock.h @@ -26,18 +26,28 @@ static inline int CPU_HAS_LOCK(struct sShortSpinlock *Lock) static inline int SHORTLOCK(struct sShortSpinlock *Lock) { - // Shamelessly copied from linux (/arch/arm/include/asm/spinlock.h) until I can fix stuff + #if 1 + // Coped from linux, yes, but I know what it does now :) Uint tmp; __asm__ __volatile__ ( - "1: ldrex %0, [%1]\n" - " teq %0, #0\n" - " strexeq %0, %2, [%1]\n" // Magic? TODO: Look up - " teqeq %0, #0\n" - " bne 1b" + "1: ldrex %0, [%1]\n" // Exclusive LOAD + " teq %0, #0\n" // Check if zero + " strexeq %0, %2, [%1]\n" // Set to one if it is zero (releasing lock on the memory) + " teqeq %0, #0\n" // If the lock was avaliable, check if the write succeeded + " bne 1b" // If the lock was unavaliable, or the write failed, loop : "=&r" (tmp) // Temp : "r" (&Lock->Lock), "r" (1) : "cc" // Condition codes clobbered ); + #else + int v = 1; + while( v ) + __asm__ __volatile__ ( + "swp %0, [%1]" + : "=r" (v) : "r" (&Lock->Lock) + : "cc" + ); + #endif return 1; }