Kernel/x86 - Fixed build and run after x86_64 and arm7 changes to API
[tpg/acess2.git] / Kernel / arch / x86 / lib.c
index a3f98be..3a70c75 100644 (file)
@@ -15,6 +15,7 @@
 // === IMPRORTS ===
 #if TRACE_LOCKS
 extern struct sShortSpinlock   glDebug_Lock;
+extern struct sShortSpinlock   glThreadListLock;
 #endif
 extern int     GetCPUNum(void);
 
@@ -94,6 +95,14 @@ void SHORTLOCK(struct sShortSpinlock *Lock)
        }
        #endif
        
+       #if TRACE_LOCKS
+       if( Lock != &glDebug_Lock && Lock != &glThreadListLock )
+       {
+               //Log_Log("LOCK", "%p locked by %p", Lock, __builtin_return_address(0));
+               Debug("%p obtaining %p (Called by %p)", __builtin_return_address(0), Lock, __builtin_return_address(1));
+       }
+       #endif
+       
        // Wait for another CPU to release
        while(v) {
                // CMPXCHG:
@@ -124,10 +133,11 @@ void SHORTLOCK(struct sShortSpinlock *Lock)
        #endif
        
        #if TRACE_LOCKS
-       if( Lock != &glDebug_Lock )
+       if( Lock != &glDebug_Lock && Lock != &glThreadListLock )
        {
                //Log_Log("LOCK", "%p locked by %p", Lock, __builtin_return_address(0));
-               LogF("Lock %p locked by %p\n", Lock, __builtin_return_address(0));
+               //Debug("Lock %p locked by %p\t%p", Lock, __builtin_return_address(0), __builtin_return_address(1));
+               Debug("got it");
        }
        #endif
 }
@@ -145,10 +155,10 @@ void SHORTREL(struct sShortSpinlock *Lock)
        #endif
        
        #if TRACE_LOCKS
-       if( Lock != &glDebug_Lock )
+       if( Lock != &glDebug_Lock && Lock != &glThreadListLock )
        {
                //Log_Log("LOCK", "%p released by %p", Lock, __builtin_return_address(0));
-               LogF("Lock %p released by %p\n", Lock, __builtin_return_address(0));
+               Debug("Lock %p released by %p\t%p", Lock, __builtin_return_address(0), __builtin_return_address(1));
        }
        #endif
        
@@ -333,6 +343,25 @@ void *memcpyd(void *Dest, const void *Src, size_t Num)
        return Dest;
 }
 
+Uint64 DivMod64U(Uint64 Num, Uint64 Div, Uint64 *Rem)
+{
+       Uint64  ret;
+       if( Div < 0x100000000ULL && Num < 0xFFFFFFFF * Div ) {
+               Uint32  rem, ret_32;
+               __asm__ __volatile__(
+                       "div %4"
+                       : "=a" (ret_32), "=d" (rem)
+                       : "a" ( (Uint32)(Num & 0xFFFFFFFF) ), "d" ((Uint32)(Num >> 32)), "r" (Div)
+                       );
+               if(Rem) *Rem = rem;
+               return ret_32;
+       }
+
+       ret = __udivdi3(Num, Div);
+       if(Rem) *Rem = __umoddi3(Num, Div);
+       return ret;
+}
+
 /**
  * \fn Uint64 __udivdi3(Uint64 Num, Uint64 Den)
  * \brief Divide two 64-bit integers

UCC git Repository :: git.ucc.asn.au