From 51d1da69cd1c30ff20a2ff1d958fe0cd149c9cbf Mon Sep 17 00:00:00 2001 From: John Hodge Date: Fri, 11 Nov 2011 12:59:49 +0800 Subject: [PATCH] Kernel/x86 - Distinguished tracing of unaligned memcpys from aligned --- Kernel/arch/x86/lib.c | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Kernel/arch/x86/lib.c b/Kernel/arch/x86/lib.c index 9dc631d0..0b802d1e 100644 --- a/Kernel/arch/x86/lib.c +++ b/Kernel/arch/x86/lib.c @@ -324,10 +324,12 @@ int memcmp(const void *m1, const void *m2, size_t Num) */ void *memcpy(void *Dest, const void *Src, size_t Num) { -// Debug("\nmemcpy:Num=0x%x by %p", Num, __builtin_return_address(0)); - if( ((Uint)Dest & 3) || ((Uint)Src & 3) ) + if( ((Uint)Dest & 3) || ((Uint)Src & 3) ) { __asm__ __volatile__ ("rep movsb" :: "D" (Dest), "S" (Src), "c" (Num)); +// Debug("\nmemcpy:Num=0x%x by %p (UA)", Num, __builtin_return_address(0)); + } else { +// Debug("\nmemcpy:Num=0x%x by %p", Num, __builtin_return_address(0)); __asm__ __volatile__ ( "rep movsl;\n\t" "mov %3, %%ecx;\n\t" -- 2.20.1