7 extern int GetCPUNum(void);
11 * \brief Determine if a short spinlock is locked
12 * \param Lock Lock pointer
14 int IS_LOCKED(struct sShortSpinlock *Lock)
20 * \brief Check if the current CPU has the lock
21 * \param Lock Lock pointer
23 int CPU_HAS_LOCK(struct sShortSpinlock *Lock)
25 #if STACKED_LOCKS == 1
26 return Lock->Lock == GetCPUNum() + 1;
27 #elif STACKED_LOCKS == 2
28 return Lock->Lock == Proc_GetCurThread();
35 * \brief Acquire a Short Spinlock
36 * \param Lock Lock pointer
38 * This type of mutex should only be used for very short sections of code,
39 * or in places where a Mutex_* would be overkill, such as appending
40 * an element to linked list (usually two assignement lines in C)
42 * \note This type of lock halts interrupts, so ensure that no timing
43 * functions are called while it is held. As a matter of fact, spend as
44 * little time as possible with this lock held
45 * \note If \a STACKED_LOCKS is set, this type of spinlock can be nested
47 void SHORTLOCK(struct sShortSpinlock *Lock)
53 #if STACKED_LOCKS == 1
54 int cpu = GetCPUNum() + 1;
55 #elif STACKED_LOCKS == 2
56 void *thread = Proc_GetCurThread();
60 // Save interrupt state and clear interrupts
61 __ASM__ ("pushf;\n\tpop %%rax\n\tcli" : "=a"(IF));
62 IF &= 0x200; // AND out all but the interrupt flag
65 #if STACKED_LOCKS == 1
66 if( Lock->Lock == cpu ) {
70 #elif STACKED_LOCKS == 2
71 if( Lock->Lock == thread ) {
77 // Wait for another CPU to release
80 // If r/m32 == EAX, set ZF and set r/m32 = r32
81 // Else, clear ZF and set EAX = r/m32
82 #if STACKED_LOCKS == 1
83 __ASM__("lock cmpxchgl %2, (%3)"
85 : "a"(0), "r"(cpu), "r"(&Lock->Lock)
87 #elif STACKED_LOCKS == 2
88 __ASM__("lock cmpxchgl %2, (%3)"
90 : "a"(0), "r"(thread), "r"(&Lock->Lock)
93 __ASM__("xchgl %0, (%2)":"=a"(v):"a"(1),"D"(&Lock->Lock));
102 * \brief Release a short lock
103 * \param Lock Lock pointer
105 void SHORTREL(struct sShortSpinlock *Lock)
114 #if LOCK_DISABLE_INTS
115 // Lock->IF can change anytime once Lock->Lock is zeroed
128 void outb(Uint16 Port, Uint8 Data)
130 __asm__ __volatile__ ("outb %%al, %%dx"::"d"(Port),"a"(Data));
132 void outw(Uint16 Port, Uint16 Data)
134 __asm__ __volatile__ ("outw %%ax, %%dx"::"d"(Port),"a"(Data));
136 void outd(Uint16 Port, Uint32 Data)
138 __asm__ __volatile__ ("outl %%eax, %%dx"::"d"(Port),"a"(Data));
140 Uint8 inb(Uint16 Port)
143 __asm__ __volatile__ ("inb %%dx, %%al":"=a"(ret):"d"(Port));
146 Uint16 inw(Uint16 Port)
149 __asm__ __volatile__ ("inw %%dx, %%ax":"=a"(ret):"d"(Port));
152 Uint32 ind(Uint16 Port)
155 __asm__ __volatile__ ("inl %%dx, %%eax":"=a"(ret):"d"(Port));
159 // === Endianness ===
160 Uint32 BigEndian32(Uint32 Value)
164 ret |= ((Value >> 16) & 0xFF) << 8;
165 ret |= ((Value >> 8) & 0xFF) << 16;
166 ret |= ((Value >> 0) & 0xFF) << 24;
170 Uint16 BigEndian16(Uint16 Value)
172 return (Value>>8)|(Value<<8);
175 // === Memory Manipulation ===
176 int memcmp(const void *__dest, const void *__src, size_t __count)
178 if( ((tVAddr)__dest & 7) != ((tVAddr)__src & 7) ) {
179 const Uint8 *src = __src, *dst = __dest;
184 src ++; dst ++; __count --;
189 const Uint8 *src = __src;
190 const Uint8 *dst = __dest;
191 const Uint64 *src64, *dst64;
193 while( (tVAddr)src & 7 && __count ) {
196 dst ++; src ++; __count --;
202 while( __count >= 8 )
204 if( *src64 != *dst64 )
208 if(src[0] != dst[0]) return dst[0]-src[0];
209 if(src[1] != dst[1]) return dst[1]-src[1];
210 if(src[2] != dst[2]) return dst[2]-src[2];
211 if(src[3] != dst[3]) return dst[3]-src[3];
212 if(src[4] != dst[4]) return dst[4]-src[4];
213 if(src[5] != dst[5]) return dst[5]-src[5];
214 if(src[6] != dst[6]) return dst[6]-src[6];
215 if(src[7] != dst[7]) return dst[7]-src[7];
216 return -1; // This should never happen
227 if(*dst != *src) return *dst - *src;
235 void *memcpy(void *__dest, const void *__src, size_t __count)
237 if( ((tVAddr)__dest & 7) != ((tVAddr)__src & 7) )
238 __asm__ __volatile__ ("rep movsb" : : "D"(__dest),"S"(__src),"c"(__count));
240 const Uint8 *src = __src;
242 while( (tVAddr)src & 7 && __count ) {
247 __asm__ __volatile__ ("rep movsq" : : "D"(dst),"S"(src),"c"(__count/8));
250 __count = __count & 7;
257 void *memset(void *__dest, int __val, size_t __count)
259 if( __val != 0 || ((tVAddr)__dest & 7) != 0 )
260 __asm__ __volatile__ ("rep stosb" : : "D"(__dest),"a"(__val),"c"(__count));
264 __asm__ __volatile__ ("rep stosq" : : "D"(dst),"a"(0),"c"(__count/8));
266 __count = __count & 7;
273 void *memsetd(void *__dest, Uint32 __val, size_t __count)
275 __asm__ __volatile__ ("rep stosl" : : "D"(__dest),"a"(__val),"c"(__count));