summaryrefslogtreecommitdiff
path: root/libc/sysdeps/i386/i686/memcpy.S
diff options
context:
space:
mode:
authorjoseph <joseph@7b3dc134-2b1b-0410-93df-9e9f96275f8d>2008-04-10 11:54:18 +0000
committerjoseph <joseph@7b3dc134-2b1b-0410-93df-9e9f96275f8d>2008-04-10 11:54:18 +0000
commit8f50aae34899188b08cfed79bb519d353510c702 (patch)
tree33d788dca0d0925d6b4ec8ffe667208ab28767ce /libc/sysdeps/i386/i686/memcpy.S
parent137b16f340e862d39918b2aef64253a01d3cf4ee (diff)
downloadeglibc2-8f50aae34899188b08cfed79bb519d353510c702.tar.gz
Merge changes between r5759 and r5854 from /fsf/trunk.
git-svn-id: svn://svn.eglibc.org/trunk@5855 7b3dc134-2b1b-0410-93df-9e9f96275f8d
Diffstat (limited to 'libc/sysdeps/i386/i686/memcpy.S')
-rw-r--r--libc/sysdeps/i386/i686/memcpy.S57
1 files changed, 46 insertions, 11 deletions
diff --git a/libc/sysdeps/i386/i686/memcpy.S b/libc/sysdeps/i386/i686/memcpy.S
index 00e84ec2e..ff5c66e9d 100644
--- a/libc/sysdeps/i386/i686/memcpy.S
+++ b/libc/sysdeps/i386/i686/memcpy.S
@@ -1,7 +1,7 @@
/* Copy memory block and return pointer to beginning of destination block
For Intel 80x86, x>=6.
This file is part of the GNU C Library.
- Copyright (C) 1999, 2000, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 1999, 2000, 2003, 2004, 2008 Free Software Foundation, Inc.
Contributed by Ulrich Drepper <drepper@cygnus.com>, 1999.
The GNU C Library is free software; you can redistribute it and/or
@@ -41,29 +41,64 @@ END (__memcpy_chk)
ENTRY (BP_SYM (memcpy))
ENTER
- movl LEN(%esp), %ecx
movl %edi, %eax
movl DEST(%esp), %edi
movl %esi, %edx
movl SRC(%esp), %esi
- CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx)
- CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx)
+ movl %edi, %ecx
+ xorl %esi, %ecx
+ andl $3, %ecx
+ movl LEN(%esp), %ecx
cld
- shrl $1, %ecx
- jnc 1f
+ jne .Lunaligned
+
+ cmpl $3, %ecx
+ jbe .Lunaligned
+
+ testl $3, %esi
+ je 1f
movsb
-1: shrl $1, %ecx
- jnc 2f
- movsw
-2: rep
+ decl %ecx
+ testl $3, %esi
+ je 1f
+ movsb
+ decl %ecx
+ testl $3, %esi
+ je 1f
+ movsb
+ decl %ecx
+1: pushl %eax
+ movl %ecx, %eax
+ shrl $2, %ecx
+ rep
movsl
- movl %eax, %edi
+ movl %eax, %ecx
+ andl $3, %ecx
+ rep
+ movsb
+ popl %eax
+
+.Lend: movl %eax, %edi
movl %edx, %esi
movl DEST(%esp), %eax
RETURN_BOUNDED_POINTER (DEST(%esp))
LEAVE
RET_PTR
+
+ /* When we come here the pointers do not have the same
+ alignment or the length is too short. No need to optimize for
+ aligned memory accesses. */
+.Lunaligned:
+ shrl $1, %ecx
+ jnc 1f
+ movsb
+1: shrl $1, %ecx
+ jnc 2f
+ movsw
+2: rep
+ movsl
+ jmp .Lend
END (BP_SYM (memcpy))
libc_hidden_builtin_def (memcpy)