summaryrefslogtreecommitdiff
path: root/sysdeps/i386/i686/memcpy.S
blob: e7c48d002535b120344ceedf47cd1bfc11cce9ec (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
/* Copy memory block and return pointer to beginning of destination block
   For Intel 80x86, x>=6.
   This file is part of the GNU C Library.
   Copyright (C) 1999-2019 Free Software Foundation, Inc.
   Contributed by Ulrich Drepper <drepper@cygnus.com>, 1999.

   The GNU C Library is free software; you can redistribute it and/or
   modify it under the terms of the GNU Lesser General Public
   License as published by the Free Software Foundation; either
   version 2.1 of the License, or (at your option) any later version.

   The GNU C Library is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   Lesser General Public License for more details.

   You should have received a copy of the GNU Lesser General Public
   License along with the GNU C Library; if not, see
   <http://www.gnu.org/licenses/>.  */

#include <sysdep.h>
#include "asm-syntax.h"

#define PARMS	4		/* no space for saved regs */
#define RTN	PARMS
#define DEST	RTN
#define SRC	DEST+4
#define LEN	SRC+4

	.text
#if defined PIC && IS_IN (libc)
ENTRY_CHK (__memcpy_chk)
	movl	12(%esp), %eax
	cmpl	%eax, 16(%esp)
	jb	HIDDEN_JUMPTARGET (__chk_fail)
END_CHK (__memcpy_chk)
#endif
ENTRY (memcpy)

	movl	%edi, %eax
	movl	DEST(%esp), %edi
	movl	%esi, %edx
	movl	SRC(%esp), %esi

	movl	%edi, %ecx
	xorl	%esi, %ecx
	andl	$3, %ecx
	movl	LEN(%esp), %ecx
	cld
	jne	.Lunaligned

	cmpl	$3, %ecx
	jbe	.Lunaligned

	testl	$3, %esi
	je	1f
	movsb
	decl	%ecx
	testl	$3, %esi
	je	1f
	movsb
	decl	%ecx
	testl	$3, %esi
	je	1f
	movsb
	decl	%ecx
1:	pushl	%eax
	movl	%ecx, %eax
	shrl	$2, %ecx
	andl	$3, %eax
	rep
	movsl
	movl	%eax, %ecx
	rep
	movsb
	popl	%eax

.Lend:	movl	%eax, %edi
	movl	%edx, %esi
	movl	DEST(%esp), %eax

	ret

	/* When we come here the pointers do not have the same
	   alignment or the length is too short.  No need to optimize for
	   aligned memory accesses. */
.Lunaligned:
	shrl	$1, %ecx
	jnc	1f
	movsb
1:	shrl	$1, %ecx
	jnc	2f
	movsw
2:	rep
	movsl
	jmp	.Lend
END (memcpy)
libc_hidden_builtin_def (memcpy)