Kernel/x86 - Distinguished tracing of unaligned memcpys from aligned
authorJohn Hodge <[email protected]>
Fri, 11 Nov 2011 04:59:49 +0000 (12:59 +0800)
committerJohn Hodge <[email protected]>
Fri, 11 Nov 2011 04:59:49 +0000 (12:59 +0800)
Kernel/arch/x86/lib.c

index 9dc631d..0b802d1 100644 (file)
@@ -324,10 +324,12 @@ int memcmp(const void *m1, const void *m2, size_t Num)
  */
 void *memcpy(void *Dest, const void *Src, size_t Num)
 {
-//     Debug("\nmemcpy:Num=0x%x by %p", Num, __builtin_return_address(0));
-       if( ((Uint)Dest & 3) || ((Uint)Src & 3) )
+       if( ((Uint)Dest & 3) || ((Uint)Src & 3) ) {
                __asm__ __volatile__ ("rep movsb" :: "D" (Dest), "S" (Src), "c" (Num));
+//             Debug("\nmemcpy:Num=0x%x by %p (UA)", Num, __builtin_return_address(0));
+       }
        else {
+//             Debug("\nmemcpy:Num=0x%x by %p", Num, __builtin_return_address(0));
                __asm__ __volatile__ (
                        "rep movsl;\n\t"
                        "mov %3, %%ecx;\n\t"

UCC git Repository :: git.ucc.asn.au