7 extern int GetCPUNum(void);
8 extern void *Proc_GetCurThread(void);
12 * \brief Determine if a short spinlock is locked
13 * \param Lock Lock pointer
15 int IS_LOCKED(struct sShortSpinlock *Lock)
21 * \brief Check if the current CPU has the lock
22 * \param Lock Lock pointer
24 int CPU_HAS_LOCK(struct sShortSpinlock *Lock)
26 #if STACKED_LOCKS == 1
27 return Lock->Lock == GetCPUNum() + 1;
28 #elif STACKED_LOCKS == 2
29 return Lock->Lock == Proc_GetCurThread();
36 * \brief Acquire a Short Spinlock
37 * \param Lock Lock pointer
39 * This type of mutex should only be used for very short sections of code,
40 * or in places where a Mutex_* would be overkill, such as appending
41 * an element to linked list (usually two assignement lines in C)
43 * \note This type of lock halts interrupts, so ensure that no timing
44 * functions are called while it is held. As a matter of fact, spend as
45 * little time as possible with this lock held
46 * \note If \a STACKED_LOCKS is set, this type of spinlock can be nested
48 void SHORTLOCK(struct sShortSpinlock *Lock)
54 #if STACKED_LOCKS == 1
55 int cpu = GetCPUNum() + 1;
56 #elif STACKED_LOCKS == 2
57 void *thread = Proc_GetCurThread();
61 // Save interrupt state and clear interrupts
62 __ASM__ ("pushf;\n\tpop %0" : "=r"(IF));
63 IF &= 0x200; // AND out all but the interrupt flag
66 #if STACKED_LOCKS == 1
67 if( Lock->Lock == cpu ) {
71 #elif STACKED_LOCKS == 2
72 if( Lock->Lock == thread ) {
78 // Wait for another CPU to release
81 // If r/m32 == EAX, set ZF and set r/m32 = r32
82 // Else, clear ZF and set EAX = r/m32
83 #if STACKED_LOCKS == 1
84 __ASM__("lock cmpxchgl %2, (%3)"
86 : "a"(0), "r"(cpu), "r"(&Lock->Lock)
88 #elif STACKED_LOCKS == 2
89 __ASM__("lock cmpxchgq %2, (%3)"
91 : "a"(0), "r"(thread), "r"(&Lock->Lock)
94 __ASM__("xchgl %0, (%2)":"=a"(v):"a"(1),"D"(&Lock->Lock));
98 if( v ) __ASM__("sti"); // Re-enable interrupts
102 #if LOCK_DISABLE_INTS
108 * \brief Release a short lock
109 * \param Lock Lock pointer
111 void SHORTREL(struct sShortSpinlock *Lock)
120 #if LOCK_DISABLE_INTS
121 // Lock->IF can change anytime once Lock->Lock is zeroed
134 void outb(Uint16 Port, Uint8 Data)
136 __asm__ __volatile__ ("outb %%al, %%dx"::"d"(Port),"a"(Data));
138 void outw(Uint16 Port, Uint16 Data)
140 __asm__ __volatile__ ("outw %%ax, %%dx"::"d"(Port),"a"(Data));
142 void outd(Uint16 Port, Uint32 Data)
144 __asm__ __volatile__ ("outl %%eax, %%dx"::"d"(Port),"a"(Data));
146 Uint8 inb(Uint16 Port)
149 __asm__ __volatile__ ("inb %%dx, %%al":"=a"(ret):"d"(Port));
152 Uint16 inw(Uint16 Port)
155 __asm__ __volatile__ ("inw %%dx, %%ax":"=a"(ret):"d"(Port));
158 Uint32 ind(Uint16 Port)
161 __asm__ __volatile__ ("inl %%dx, %%eax":"=a"(ret):"d"(Port));
165 // === Endianness ===
166 Uint32 BigEndian32(Uint32 Value)
170 ret |= ((Value >> 16) & 0xFF) << 8;
171 ret |= ((Value >> 8) & 0xFF) << 16;
172 ret |= ((Value >> 0) & 0xFF) << 24;
176 Uint16 BigEndian16(Uint16 Value)
178 return (Value>>8)|(Value<<8);
181 // === Memory Manipulation ===
182 int memcmp(const void *__dest, const void *__src, size_t __count)
184 if( ((tVAddr)__dest & 7) != ((tVAddr)__src & 7) ) {
185 const Uint8 *src = __src, *dst = __dest;
190 src ++; dst ++; __count --;
195 const Uint8 *src = __src;
196 const Uint8 *dst = __dest;
197 const Uint64 *src64, *dst64;
199 while( (tVAddr)src & 7 && __count ) {
202 dst ++; src ++; __count --;
208 while( __count >= 8 )
210 if( *src64 != *dst64 )
214 if(src[0] != dst[0]) return dst[0]-src[0];
215 if(src[1] != dst[1]) return dst[1]-src[1];
216 if(src[2] != dst[2]) return dst[2]-src[2];
217 if(src[3] != dst[3]) return dst[3]-src[3];
218 if(src[4] != dst[4]) return dst[4]-src[4];
219 if(src[5] != dst[5]) return dst[5]-src[5];
220 if(src[6] != dst[6]) return dst[6]-src[6];
221 if(src[7] != dst[7]) return dst[7]-src[7];
222 return -1; // This should never happen
233 if(*dst != *src) return *dst - *src;
241 void *memcpy(void *__dest, const void *__src, size_t __count)
243 if( ((tVAddr)__dest & 7) != ((tVAddr)__src & 7) )
244 __asm__ __volatile__ ("rep movsb" : : "D"(__dest),"S"(__src),"c"(__count));
246 const Uint8 *src = __src;
248 while( (tVAddr)src & 7 && __count ) {
253 __asm__ __volatile__ ("rep movsq" : : "D"(dst),"S"(src),"c"(__count/8));
256 __count = __count & 7;
263 void *memset(void *__dest, int __val, size_t __count)
265 if( __val != 0 || ((tVAddr)__dest & 7) != 0 )
266 __asm__ __volatile__ ("rep stosb" : : "D"(__dest),"a"(__val),"c"(__count));
270 __asm__ __volatile__ ("rep stosq" : : "D"(dst),"a"(0),"c"(__count/8));
272 __count = __count & 7;
279 void *memsetd(void *__dest, Uint32 __val, size_t __count)
281 __asm__ __volatile__ ("rep stosl" : : "D"(__dest),"a"(__val),"c"(__count));