Kernel/arm7 - Fixed SPINLOCK to use the swap opcode
[tpg/acess2.git] / Kernel / arch / arm7 / include / lock.h
1 /*
2  * Acess2
3  * ARM7 Architecture
4  *
5  * lock.h - Hardware level spinlocks
6  */
7 #ifndef _LOCK_H_
8 #define _LOCK_H_
9
10 // === CODE ===
11 struct sShortSpinlock {
12          int    Lock;
13 };
14
15 // --- Spinlocks ---
16 static inline int IS_LOCKED(struct sShortSpinlock *Lock)
17 {
18         return !!Lock->Lock;
19 }
20
21 static inline int CPU_HAS_LOCK(struct sShortSpinlock *Lock)
22 {
23         // TODO: Handle multiple CPUs
24         return !!Lock->Lock;
25 }
26
27 static inline int SHORTLOCK(struct sShortSpinlock *Lock)
28 {
29         #if 0
30         while( __sync_lock_test_and_set( &Lock->Lock, 1 ) == 1 );
31         #elif 0
32         while( Lock->Lock )     ;
33         Lock->Lock = 1;
34         #elif 1
35          int    v = 1;
36         while( v )
37         {
38                 __asm__ __volatile__ ("swp [%0], %1" : "=r" (v) : "r" (&lock));
39         }
40         #elif 0
41         // Shamelessly copied from linux (/arch/arm/include/asm/spinlock.h) until I can fix stuff
42         Uint    tmp;
43         __asm__ __volatile__ (
44         "1:     ldrex   %0, [%1]\n"
45         "       teq     %0, #0\n"
46         "       strexeq %0, %2, [%1]\n" // Magic? TODO: Look up
47         "       teqeq   %0, #0\n"
48         "       bne     1b"
49                 : "=&r" (tmp)   // Temp
50                 : "r" (&Lock->Lock), "r" (1)
51                 : "cc"  // Condition codes clobbered
52                 );
53         #endif
54         return 1;
55 }
56
57 static inline void SHORTREL(struct sShortSpinlock *Lock)
58 {
59         Lock->Lock = 0;
60 }
61
62 #endif
63

UCC git Repository :: git.ucc.asn.au