From 72ce9b700cad1571715f2cfbafebdf785b1340e9 Mon Sep 17 00:00:00 2001 From: Torbjorn Granlund Date: Wed, 30 May 2012 15:10:26 +0200 Subject: Rename DOS64_ENTRY and DOS64_EXIT. --- ChangeLog | 5 +++++ mpn/x86_64/addmul_2.asm | 4 ++-- mpn/x86_64/aorrlsh1_n.asm | 4 ++-- mpn/x86_64/aorrlshC_n.asm | 4 ++-- mpn/x86_64/aorrlsh_n.asm | 4 ++-- mpn/x86_64/aors_n.asm | 12 ++++++------ mpn/x86_64/aorscnd_n.asm | 4 ++-- mpn/x86_64/atom/aorrlsh1_n.asm | 6 +++--- mpn/x86_64/atom/aorrlsh2_n.asm | 4 ++-- mpn/x86_64/atom/lshift.asm | 6 +++--- mpn/x86_64/atom/lshiftc.asm | 6 +++--- mpn/x86_64/atom/rsh1aors_n.asm | 4 ++-- mpn/x86_64/atom/rshift.asm | 6 +++--- mpn/x86_64/atom/sublsh1_n.asm | 6 +++--- mpn/x86_64/bdiv_dbm1c.asm | 4 ++-- mpn/x86_64/bdiv_q_1.asm | 8 ++++---- mpn/x86_64/bobcat/copyd.asm | 4 ++-- mpn/x86_64/bobcat/copyi.asm | 4 ++-- mpn/x86_64/bobcat/mul_basecase.asm | 10 +++++----- mpn/x86_64/bobcat/sqr_basecase.asm | 10 +++++----- mpn/x86_64/com.asm | 4 ++-- mpn/x86_64/core2/aors_n.asm | 6 +++--- mpn/x86_64/core2/aorsmul_1.asm | 6 +++--- mpn/x86_64/core2/divrem_1.asm | 6 +++--- mpn/x86_64/core2/gcd_1.asm | 4 ++-- mpn/x86_64/core2/lshift.asm | 8 ++++---- mpn/x86_64/core2/lshiftc.asm | 8 ++++---- mpn/x86_64/core2/rsh1aors_n.asm | 6 +++--- mpn/x86_64/core2/rshift.asm | 8 ++++---- mpn/x86_64/core2/sublshC_n.asm | 4 ++-- mpn/x86_64/coreinhm/aorrlsh_n.asm | 6 +++--- mpn/x86_64/coreisbr/addmul_2.asm | 4 ++-- mpn/x86_64/coreisbr/aorrlshC_n.asm | 6 +++--- mpn/x86_64/coreisbr/aorrlsh_n.asm | 6 +++--- mpn/x86_64/coreisbr/aors_n.asm | 8 ++++---- mpn/x86_64/coreisbr/rsh1aors_n.asm | 6 +++--- mpn/x86_64/div_qr_2n_pi1.asm | 4 ++-- mpn/x86_64/dive_1.asm | 6 +++--- mpn/x86_64/divrem_1.asm | 6 +++--- mpn/x86_64/divrem_2.asm | 4 ++-- mpn/x86_64/dos64.m4 | 10 +++++----- mpn/x86_64/fastsse/com.asm | 4 ++-- mpn/x86_64/fastsse/copyd-palignr.asm | 8 ++++---- mpn/x86_64/fastsse/copyd.asm | 4 ++-- mpn/x86_64/fastsse/copyi-palignr.asm | 8 ++++---- mpn/x86_64/fastsse/copyi.asm | 6 +++--- mpn/x86_64/fastsse/lshift-movdqu2.asm | 6 +++--- mpn/x86_64/fastsse/lshiftc-movdqu2.asm | 6 +++--- mpn/x86_64/fastsse/rshift-movdqu2.asm | 12 ++++++------ mpn/x86_64/fat/fat_entry.asm | 4 ++-- mpn/x86_64/gcd_1.asm | 4 ++-- mpn/x86_64/invert_limb.asm | 4 ++-- mpn/x86_64/k10/hamdist.asm | 8 ++++---- mpn/x86_64/k10/popcount.asm | 4 ++-- mpn/x86_64/k8/aorrlsh_n.asm | 4 ++-- mpn/x86_64/logops_n.asm | 12 ++++++------ mpn/x86_64/lshift.asm | 12 ++++++------ mpn/x86_64/lshiftc.asm | 4 ++-- mpn/x86_64/lshsub_n.asm | 4 ++-- mpn/x86_64/mod_1_1.asm | 8 ++++---- mpn/x86_64/mod_1_2.asm | 8 ++++---- mpn/x86_64/mod_1_4.asm | 8 ++++---- mpn/x86_64/mod_34lsub1.asm | 6 +++--- mpn/x86_64/mode1o.asm | 6 +++--- mpn/x86_64/mul_2.asm | 4 ++-- mpn/x86_64/mul_basecase.asm | 4 ++-- mpn/x86_64/mullo_basecase.asm | 10 +++++----- mpn/x86_64/mulmid_basecase.asm | 4 ++-- mpn/x86_64/nano/dive_1.asm | 6 +++--- mpn/x86_64/pentium4/aors_n.asm | 6 +++--- mpn/x86_64/pentium4/aorslshC_n.asm | 4 ++-- mpn/x86_64/pentium4/lshift.asm | 4 ++-- mpn/x86_64/pentium4/lshiftc.asm | 4 ++-- mpn/x86_64/pentium4/mod_34lsub1.asm | 6 +++--- mpn/x86_64/pentium4/rsh1aors_n.asm | 6 +++--- mpn/x86_64/pentium4/rshift.asm | 4 ++-- mpn/x86_64/popham.asm | 6 +++--- mpn/x86_64/redc_1.asm | 4 ++-- mpn/x86_64/rsh1aors_n.asm | 6 +++--- mpn/x86_64/rshift.asm | 4 ++-- mpn/x86_64/sqr_basecase.asm | 12 ++++++------ mpn/x86_64/sublsh1_n.asm | 4 ++-- mpn/x86_64/tabselect.asm | 4 ++-- mpn/x86_64/x86_64-defs.m4 | 4 ++-- 84 files changed, 251 insertions(+), 246 deletions(-) diff --git a/ChangeLog b/ChangeLog index cc3845008..005ada450 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,8 @@ +2012-05-30 Torbjorn Granlund + + * mpn/x86_64 (FUNC_ENTRY): New name for DOS64_ENTRY. + * mpn/x86_64 (FUNC_EXIT): New name for DOS64_EXIT. + 2012-05-29 Marco Bodrato * mpz/remove.c: Optimise branches. diff --git a/mpn/x86_64/addmul_2.asm b/mpn/x86_64/addmul_2.asm index f25f7dd76..f85760e08 100644 --- a/mpn/x86_64/addmul_2.asm +++ b/mpn/x86_64/addmul_2.asm @@ -57,7 +57,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_addmul_2) - DOS64_ENTRY(4) + FUNC_ENTRY(4) mov n_param, n push %rbx push %rbp @@ -168,6 +168,6 @@ L(end): xor R32(w1), R32(w1) pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/aorrlsh1_n.asm b/mpn/x86_64/aorrlsh1_n.asm index 54ac040b0..62efd7f15 100644 --- a/mpn/x86_64/aorrlsh1_n.asm +++ b/mpn/x86_64/aorrlsh1_n.asm @@ -62,7 +62,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbp mov (vp), %r8 @@ -152,6 +152,6 @@ ifdef(`OPERATION_rsblsh1_n',` movslq R32(%rbp), %rax') pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/aorrlshC_n.asm b/mpn/x86_64/aorrlshC_n.asm index 18c2d0c0c..d2f9bf701 100644 --- a/mpn/x86_64/aorrlshC_n.asm +++ b/mpn/x86_64/aorrlshC_n.asm @@ -44,7 +44,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %r12 push %r13 push %r14 @@ -144,6 +144,6 @@ ifelse(ADDSUB,add,` pop %r14 pop %r13 pop %r12 - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/aorrlsh_n.asm b/mpn/x86_64/aorrlsh_n.asm index 146248472..8b8699f83 100644 --- a/mpn/x86_64/aorrlsh_n.asm +++ b/mpn/x86_64/aorrlsh_n.asm @@ -63,7 +63,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') push %r12 push %r13 @@ -160,6 +160,6 @@ L(end): add R32(%rbx), R32(%rbx) pop %r14 pop %r13 pop %r12 - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/aors_n.asm b/mpn/x86_64/aors_n.asm index ec7ac97e6..34d68b6d4 100644 --- a/mpn/x86_64/aors_n.asm +++ b/mpn/x86_64/aors_n.asm @@ -58,7 +58,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') mov R32(n), R32(%rax) shr $2, n @@ -74,7 +74,7 @@ IFDOS(` mov 56(%rsp), %r8 ') EPILOGUE() ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) mov R32(n), R32(%rax) shr $2, n and $3, R32(%rax) @@ -91,7 +91,7 @@ L(lt4): dec R32(%rax) ADCSBB (vp), %r8 mov %r8, (rp) adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret L(2): dec R32(%rax) @@ -102,7 +102,7 @@ L(2): dec R32(%rax) mov %r8, (rp) mov %r9, 8(rp) adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret L(3): mov 16(up), %r10 @@ -113,7 +113,7 @@ L(3): mov 16(up), %r10 mov %r9, 8(rp) mov %r10, 16(rp) setc R8(%rax) - DOS64_EXIT() + FUNC_EXIT() ret ALIGN(16) @@ -151,6 +151,6 @@ L(end): lea 32(up), up dec R32(%rax) jnz L(lt4) adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/aorscnd_n.asm b/mpn/x86_64/aorscnd_n.asm index eee15e563..6e49d1646 100644 --- a/mpn/x86_64/aorscnd_n.asm +++ b/mpn/x86_64/aorscnd_n.asm @@ -68,7 +68,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %rbx push %rbp @@ -167,6 +167,6 @@ L(end): neg R32(%rax) pop %r12 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/atom/aorrlsh1_n.asm b/mpn/x86_64/atom/aorrlsh1_n.asm index a1c3963ad..ea436fc59 100644 --- a/mpn/x86_64/atom/aorrlsh1_n.asm +++ b/mpn/x86_64/atom/aorrlsh1_n.asm @@ -64,7 +64,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbp xor R32(%rbp), R32(%rbp) L(ent): mov R32(n), R32(%rax) @@ -213,11 +213,11 @@ ifdef(`OPERATION_rsblsh1_n',` movslq R32(%rbp), %rax') pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %rbp neg %r8 C set CF diff --git a/mpn/x86_64/atom/aorrlsh2_n.asm b/mpn/x86_64/atom/aorrlsh2_n.asm index 77989bbde..85b5b195c 100644 --- a/mpn/x86_64/atom/aorrlsh2_n.asm +++ b/mpn/x86_64/atom/aorrlsh2_n.asm @@ -63,7 +63,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %rbp @@ -175,6 +175,6 @@ ifdef(`OPERATION_rsblsh2_n',` pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/atom/lshift.asm b/mpn/x86_64/atom/lshift.asm index 912192f55..8406ed3fd 100644 --- a/mpn/x86_64/atom/lshift.asm +++ b/mpn/x86_64/atom/lshift.asm @@ -48,7 +48,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_lshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) lea -8(up,n,8), up lea -8(rp,n,8), rp shr R32(n) @@ -62,7 +62,7 @@ PROLOGUE(mpn_lshift) test n, n jnz L(gt1) mov %r11, (rp) - DOS64_EXIT() + FUNC_EXIT() ret L(gt1): mov -8(up), %r8 @@ -107,6 +107,6 @@ L(end): shl R8(%rcx), %r10 shl R8(%rcx), %r11 mov %r9, -8(rp) mov %r11, -16(rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/atom/lshiftc.asm b/mpn/x86_64/atom/lshiftc.asm index 6b20aff2c..53e2f607a 100644 --- a/mpn/x86_64/atom/lshiftc.asm +++ b/mpn/x86_64/atom/lshiftc.asm @@ -48,7 +48,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_lshiftc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) lea -8(up,n,8), up lea -8(rp,n,8), rp shr R32(n) @@ -63,7 +63,7 @@ PROLOGUE(mpn_lshiftc) jnz L(gt1) not %r11 mov %r11, (rp) - DOS64_EXIT() + FUNC_EXIT() ret L(gt1): mov -8(up), %r8 @@ -111,6 +111,6 @@ L(end): shl R8(%rcx), %r10 not %r11 mov %r9, -8(rp) mov %r11, -16(rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/atom/rsh1aors_n.asm b/mpn/x86_64/atom/rsh1aors_n.asm index f07249f6b..f1dbce546 100644 --- a/mpn/x86_64/atom/rsh1aors_n.asm +++ b/mpn/x86_64/atom/rsh1aors_n.asm @@ -61,7 +61,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %rbp push %r12 @@ -271,6 +271,6 @@ L(cj1): pop %r15 pop %r12 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/atom/rshift.asm b/mpn/x86_64/atom/rshift.asm index 42b9d062e..c049df6de 100644 --- a/mpn/x86_64/atom/rshift.asm +++ b/mpn/x86_64/atom/rshift.asm @@ -48,7 +48,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_rshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) shr R32(n) mov (up), %rax jnc L(evn) @@ -60,7 +60,7 @@ PROLOGUE(mpn_rshift) test n, n jnz L(gt1) mov %r11, (rp) - DOS64_EXIT() + FUNC_EXIT() ret L(gt1): mov 8(up), %r8 @@ -105,6 +105,6 @@ L(end): shr R8(cnt), %r10 shr R8(cnt), %r11 mov %r9, 8(rp) mov %r11, 16(rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/atom/sublsh1_n.asm b/mpn/x86_64/atom/sublsh1_n.asm index ae267b929..67ca7488c 100644 --- a/mpn/x86_64/atom/sublsh1_n.asm +++ b/mpn/x86_64/atom/sublsh1_n.asm @@ -49,7 +49,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_sublsh1_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbp push %r15 xor R32(%rbp), R32(%rbp) @@ -214,11 +214,11 @@ L(rtn): pop %r15 pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() PROLOGUE(mpn_sublsh1_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %rbp push %r15 diff --git a/mpn/x86_64/bdiv_dbm1c.asm b/mpn/x86_64/bdiv_dbm1c.asm index 732298482..ddcc340fd 100644 --- a/mpn/x86_64/bdiv_dbm1c.asm +++ b/mpn/x86_64/bdiv_dbm1c.asm @@ -48,7 +48,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_bdiv_dbm1c) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') mov (up), %rax mov n_param, n @@ -90,6 +90,6 @@ L(lo1): sub %rax, %r8 jnz L(top) mov %r8, %rax - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/bdiv_q_1.asm b/mpn/x86_64/bdiv_q_1.asm index 7325af3c9..b3d10f418 100644 --- a/mpn/x86_64/bdiv_q_1.asm +++ b/mpn/x86_64/bdiv_q_1.asm @@ -48,7 +48,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_bdiv_q_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx mov %rcx, %rax @@ -95,7 +95,7 @@ L(evn): bsf %rax, %rcx EPILOGUE() PROLOGUE(mpn_pi1_bdiv_q_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') IFDOS(` mov 64(%rsp), %r9 ') push %rbx @@ -151,13 +151,13 @@ L(ent): imul %r8, %rax imul %r8, %rax mov %rax, (%rdi) pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret L(one): shr R8(%rcx), %rax imul %r8, %rax mov %rax, (%rdi) pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/bobcat/copyd.asm b/mpn/x86_64/bobcat/copyd.asm index c6253a861..11cc0ee36 100644 --- a/mpn/x86_64/bobcat/copyd.asm +++ b/mpn/x86_64/bobcat/copyd.asm @@ -47,7 +47,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_copyd) - DOS64_ENTRY(3) + FUNC_ENTRY(3) sub $4, n jl L(end) ALIGN(16) @@ -75,6 +75,6 @@ L(end): cmp $-4, R32(n) mov 8(up,n,8), %r8 mov %r8, 8(rp,n,8) -L(ret): DOS64_EXIT() +L(ret): FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/bobcat/copyi.asm b/mpn/x86_64/bobcat/copyi.asm index 2433c7d55..e361e5092 100644 --- a/mpn/x86_64/bobcat/copyi.asm +++ b/mpn/x86_64/bobcat/copyi.asm @@ -47,7 +47,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_copyi) - DOS64_ENTRY(3) + FUNC_ENTRY(3) lea -32(up,n,8), up lea -32(rp,n,8), rp neg n @@ -78,6 +78,6 @@ L(end): cmp $4, R32(n) mov 16(up,n,8), %r8 mov %r8, 16(rp,n,8) -L(ret): DOS64_EXIT() +L(ret): FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/bobcat/mul_basecase.asm b/mpn/x86_64/bobcat/mul_basecase.asm index 1aad1669f..74870123c 100644 --- a/mpn/x86_64/bobcat/mul_basecase.asm +++ b/mpn/x86_64/bobcat/mul_basecase.asm @@ -78,7 +78,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_mul_basecase) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') mov (up), %rax @@ -91,7 +91,7 @@ IFDOS(` mov 56(%rsp), %r8d ') mul v0 C u0 x v0 mov %rax, (rp) mov %rdx, 8(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(u2): mul v0 C u0 x v0 @@ -106,7 +106,7 @@ L(u2): mul v0 C u0 x v0 jnz L(u2v2) mov w0, 8(rp) mov w1, 16(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(u2v2):mov 8(vp), v0 @@ -124,7 +124,7 @@ L(u2v2):mov 8(vp), v0 adc $0, %rdx mov %rax, 16(rp) mov %rdx, 24(rp) - DOS64_EXIT() + FUNC_EXIT() ret @@ -471,6 +471,6 @@ L(ret): pop %r13 pop %r12 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/bobcat/sqr_basecase.asm b/mpn/x86_64/bobcat/sqr_basecase.asm index 87a6e5175..248ec7017 100644 --- a/mpn/x86_64/bobcat/sqr_basecase.asm +++ b/mpn/x86_64/bobcat/sqr_basecase.asm @@ -75,7 +75,7 @@ ASM_START() TEXT ALIGN(64) PROLOGUE(mpn_sqr_basecase) - DOS64_ENTRY(3) + FUNC_ENTRY(3) mov (up), %rax @@ -85,7 +85,7 @@ PROLOGUE(mpn_sqr_basecase) mul %rax mov %rax, (rp) mov %rdx, 8(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(ge2): mov (up), v0 @@ -114,7 +114,7 @@ L(ge2): mov (up), v0 adc $0, %rdx mov %rax, 16(rp) mov %rdx, 24(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(g2): cmp $3, R32(un_param) @@ -159,7 +159,7 @@ L(g2): cmp $3, R32(un_param) adc w2, 24(rp) adc w3, 32(rp) adc v0, 40(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(g3): push %rbx @@ -550,6 +550,6 @@ L(esd): add %rbx, w0 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/com.asm b/mpn/x86_64/com.asm index 76929b1be..cfffb695e 100644 --- a/mpn/x86_64/com.asm +++ b/mpn/x86_64/com.asm @@ -41,7 +41,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_com) - DOS64_ENTRY(3) + FUNC_ENTRY(3) movq (up), %r8 movl R32(%rdx), R32(%rax) leaq (up,n,8), up @@ -79,6 +79,6 @@ L(e10): movq 24(up,n,8), %r9 movq %r9, 24(rp,n,8) addq $4, n jnc L(oop) -L(ret): DOS64_EXIT() +L(ret): FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/core2/aors_n.asm b/mpn/x86_64/core2/aors_n.asm index 80eaebed3..450999818 100644 --- a/mpn/x86_64/core2/aors_n.asm +++ b/mpn/x86_64/core2/aors_n.asm @@ -55,13 +55,13 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') jmp L(start) EPILOGUE() PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) xor %r8, %r8 L(start): mov (up), %r10 @@ -100,7 +100,7 @@ L(end): ADCSBB %r11, %r10 mov %r10, 8(rp) mov R32(%rcx), R32(%rax) C clear eax, ecx contains 0 adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret ALIGN(16) diff --git a/mpn/x86_64/core2/aorsmul_1.asm b/mpn/x86_64/core2/aorsmul_1.asm index 9d7894797..4f889cfe9 100644 --- a/mpn/x86_64/core2/aorsmul_1.asm +++ b/mpn/x86_64/core2/aorsmul_1.asm @@ -64,7 +64,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_1c) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %rbp lea (%rdx), %rbx @@ -83,7 +83,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %rbp lea (%rdx), %rbx @@ -163,6 +163,6 @@ L(n1): mov 8(rp), %r10 adc %rdx, %rax pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/core2/divrem_1.asm b/mpn/x86_64/core2/divrem_1.asm index 8058b2fb0..500509c57 100644 --- a/mpn/x86_64/core2/divrem_1.asm +++ b/mpn/x86_64/core2/divrem_1.asm @@ -67,7 +67,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_preinv_divrem_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') IFDOS(` mov 64(%rsp), %r9 ') xor R32(%rax), R32(%rax) @@ -90,7 +90,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(mpn_divrem_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') xor R32(%rax), R32(%rax) push %r13 @@ -222,6 +222,6 @@ L(ret): pop %rbx pop %rbp pop %r12 pop %r13 - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/core2/gcd_1.asm b/mpn/x86_64/core2/gcd_1.asm index d3f39a0b6..211655523 100644 --- a/mpn/x86_64/core2/gcd_1.asm +++ b/mpn/x86_64/core2/gcd_1.asm @@ -69,7 +69,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_gcd_1) - DOS64_ENTRY(3) + FUNC_ENTRY(3) mov (up), %rax C U low limb or v0, %rax bsf %rax, %rax C min(ctz(u0),ctz(v0)) @@ -129,6 +129,6 @@ L(mid): shr R8(%rcx), %rax C 1,7 1,6 2,8 2,8 2,8 L(end): pop %rcx mov %rdx, %rax shl R8(%rcx), %rax - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/core2/lshift.asm b/mpn/x86_64/core2/lshift.asm index 45a275fd6..f11f48e6c 100644 --- a/mpn/x86_64/core2/lshift.asm +++ b/mpn/x86_64/core2/lshift.asm @@ -44,7 +44,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_lshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) lea -8(rp,n,8), rp lea -8(up,n,8), up @@ -76,7 +76,7 @@ L(b01): mov (up), %r9 jmp L(01) L(le1): shl R8(cnt), %r9 mov %r9, (rp) - DOS64_EXIT() + FUNC_EXIT() ret L(nb01):C n = 2, 6, 10, ... @@ -95,7 +95,7 @@ L(le2): shld R8(cnt), %r9, %r8 mov %r8, (rp) shl R8(cnt), %r9 mov %r9, -8(rp) - DOS64_EXIT() + FUNC_EXIT() ret ALIGN(16) C performance critical! @@ -133,6 +133,6 @@ L(end): shld R8(cnt), %r8, %r11 mov %r8, -8(rp) shl R8(cnt), %r9 mov %r9, -16(rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/core2/lshiftc.asm b/mpn/x86_64/core2/lshiftc.asm index a58b93c76..f8ffaf7f2 100644 --- a/mpn/x86_64/core2/lshiftc.asm +++ b/mpn/x86_64/core2/lshiftc.asm @@ -44,7 +44,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_lshiftc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) lea -8(rp,n,8), rp lea -8(up,n,8), up @@ -77,7 +77,7 @@ L(b01): mov (up), %r9 L(le1): shl R8(cnt), %r9 not %r9 mov %r9, (rp) - DOS64_EXIT() + FUNC_EXIT() ret L(nb01):C n = 2, 6, 10, ... @@ -98,7 +98,7 @@ L(le2): shld R8(cnt), %r9, %r8 shl R8(cnt), %r9 not %r9 mov %r9, -8(rp) - DOS64_EXIT() + FUNC_EXIT() ret ALIGN(16) C performance critical! @@ -143,6 +143,6 @@ L(end): shld R8(cnt), %r8, %r11 shl R8(cnt), %r9 not %r9 mov %r9, -16(rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/core2/rsh1aors_n.asm b/mpn/x86_64/core2/rsh1aors_n.asm index 48792de90..bbac0f0bc 100644 --- a/mpn/x86_64/core2/rsh1aors_n.asm +++ b/mpn/x86_64/core2/rsh1aors_n.asm @@ -58,7 +58,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %rbx push %rbp @@ -71,7 +71,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %rbp @@ -153,6 +153,6 @@ L(end): shrd $1, %rbx, %rbp mov %rbp, -8(rp) pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/core2/rshift.asm b/mpn/x86_64/core2/rshift.asm index 7d2a79601..c40754db0 100644 --- a/mpn/x86_64/core2/rshift.asm +++ b/mpn/x86_64/core2/rshift.asm @@ -44,7 +44,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_rshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) mov R32(%rdx), R32(%rax) and $3, R32(%rax) jne L(nb00) @@ -74,7 +74,7 @@ L(b01): mov (up), %r9 jmp L(01) L(le1): shr R8(cnt), %r9 mov %r9, (rp) - DOS64_EXIT() + FUNC_EXIT() ret L(nb01):C n = 2, 6, 10, ... @@ -93,7 +93,7 @@ L(le2): shrd R8(cnt), %r9, %r8 mov %r8, (rp) shr R8(cnt), %r9 mov %r9, 8(rp) - DOS64_EXIT() + FUNC_EXIT() ret ALIGN(16) @@ -131,6 +131,6 @@ L(end): shrd R8(cnt), %r8, %r11 mov %r8, 8(rp) shr R8(cnt), %r9 mov %r9, 16(rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/core2/sublshC_n.asm b/mpn/x86_64/core2/sublshC_n.asm index 7c4545f5a..5bb3e466a 100644 --- a/mpn/x86_64/core2/sublshC_n.asm +++ b/mpn/x86_64/core2/sublshC_n.asm @@ -40,7 +40,7 @@ ASM_START() TEXT ALIGN(8) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %r12 @@ -142,6 +142,6 @@ L(end): shr $RSH, %r11 pop %rbx sub R32(%r11), R32(%rax) neg R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/coreinhm/aorrlsh_n.asm b/mpn/x86_64/coreinhm/aorrlsh_n.asm index e339603ea..4351a842f 100644 --- a/mpn/x86_64/coreinhm/aorrlsh_n.asm +++ b/mpn/x86_64/coreinhm/aorrlsh_n.asm @@ -69,7 +69,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') C cnt push %rbx xor R32(%rbx), R32(%rbx) C clear CF save register @@ -175,11 +175,11 @@ L(wd1): shrd %cl, %r8, %r11 IFRSB( neg %rax) pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') C cnt IFDOS(` mov 64(%rsp), %r9 ') C cy push %rbx diff --git a/mpn/x86_64/coreisbr/addmul_2.asm b/mpn/x86_64/coreisbr/addmul_2.asm index 7ece9a75e..8173e3daa 100644 --- a/mpn/x86_64/coreisbr/addmul_2.asm +++ b/mpn/x86_64/coreisbr/addmul_2.asm @@ -54,7 +54,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_addmul_2) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %r12 push %r13 @@ -200,7 +200,7 @@ L(L2): mov 24(up), %rax pop %r13 pop %r12 pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() ASM_END() diff --git a/mpn/x86_64/coreisbr/aorrlshC_n.asm b/mpn/x86_64/coreisbr/aorrlshC_n.asm index 9811d6431..f0f0db441 100644 --- a/mpn/x86_64/coreisbr/aorrlshC_n.asm +++ b/mpn/x86_64/coreisbr/aorrlshC_n.asm @@ -47,7 +47,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %rbp mov cy, %rax @@ -66,7 +66,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbp xor R32(%rbp), R32(%rbp) C limb carry mov (vp), %r8 @@ -157,6 +157,6 @@ L(end): shr $RSH, %rbp ADCSBB $0, %rbp mov %rbp, %rax pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/coreisbr/aorrlsh_n.asm b/mpn/x86_64/coreisbr/aorrlsh_n.asm index 2cdd2d72a..a8b5b5dcb 100644 --- a/mpn/x86_64/coreisbr/aorrlsh_n.asm +++ b/mpn/x86_64/coreisbr/aorrlsh_n.asm @@ -69,7 +69,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') C cnt push %rbx xor R32(%rbx), R32(%rbx) C clear CF save register @@ -190,11 +190,11 @@ L(wd1): mov %r10, 24(rp) IFRSB( neg %rax) pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') C cnt IFDOS(` mov 64(%rsp), %r9 ') C cy push %rbx diff --git a/mpn/x86_64/coreisbr/aors_n.asm b/mpn/x86_64/coreisbr/aors_n.asm index d0efa5c5a..1262606e5 100644 --- a/mpn/x86_64/coreisbr/aors_n.asm +++ b/mpn/x86_64/coreisbr/aors_n.asm @@ -56,7 +56,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) xor %r8, %r8 L(ent): mov R32(n), R32(%rax) shr $2, n @@ -74,7 +74,7 @@ L(b1): mov (up), %r10 mov %r10, (rp) mov R32(n), R32(%rax) C zero rax adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret L(gt1): neg R32(%r8) ADCSBB (vp), %r10 @@ -146,11 +146,11 @@ L(e1): ADCSBB 16(vp), %r10 mov %r10, 48(rp) mov R32(n), R32(%rax) C zero rax adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') jmp L(ent) EPILOGUE() diff --git a/mpn/x86_64/coreisbr/rsh1aors_n.asm b/mpn/x86_64/coreisbr/rsh1aors_n.asm index 074f05837..daad0ccca 100644 --- a/mpn/x86_64/coreisbr/rsh1aors_n.asm +++ b/mpn/x86_64/coreisbr/rsh1aors_n.asm @@ -58,7 +58,7 @@ ASM_START() ALIGN(16) PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %rbx push %rbp @@ -72,7 +72,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %rbp @@ -178,6 +178,6 @@ L(end): shrd $1, %rbx, %rbp mov %rbp, (rp) pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/div_qr_2n_pi1.asm b/mpn/x86_64/div_qr_2n_pi1.asm index 6028a49d3..d4b2a373c 100644 --- a/mpn/x86_64/div_qr_2n_pi1.asm +++ b/mpn/x86_64/div_qr_2n_pi1.asm @@ -52,7 +52,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_div_qr_2n_pi1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') IFDOS(` mov 64(%rsp), %r9 ') IFDOS(`define(`di_param', `72(%rsp)')') @@ -131,7 +131,7 @@ L(end): pop %r13 pop %r14 pop %r15 - DOS64_EXIT() + FUNC_EXIT() ret L(fix): C Unlikely update. u2 >= d1 diff --git a/mpn/x86_64/dive_1.asm b/mpn/x86_64/dive_1.asm index ea7c17e40..e9eed6119 100644 --- a/mpn/x86_64/dive_1.asm +++ b/mpn/x86_64/dive_1.asm @@ -46,7 +46,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_divexact_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx mov %rcx, %rax @@ -135,14 +135,14 @@ L(ent): imul %r10, %rax C 6 imul %r10, %rax mov %rax, (%rdi) pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret L(one): shr R8(%rcx), %rax imul %r10, %rax mov %rax, (%rdi) pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/divrem_1.asm b/mpn/x86_64/divrem_1.asm index 24fcde07a..ce2fcb3a2 100644 --- a/mpn/x86_64/divrem_1.asm +++ b/mpn/x86_64/divrem_1.asm @@ -67,7 +67,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_preinv_divrem_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') IFDOS(` mov 64(%rsp), %r9 ') xor R32(%rax), R32(%rax) @@ -93,7 +93,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(mpn_divrem_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') xor R32(%rax), R32(%rax) push %r13 @@ -292,6 +292,6 @@ L(ret): pop %rbx pop %rbp pop %r12 pop %r13 - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/divrem_2.asm b/mpn/x86_64/divrem_2.asm index 459644426..15914804f 100644 --- a/mpn/x86_64/divrem_2.asm +++ b/mpn/x86_64/divrem_2.asm @@ -44,7 +44,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_divrem_2) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %r15 push %r14 @@ -155,7 +155,7 @@ L(end): mov %r10, 8(%r12) pop %r14 mov %r15, %rax pop %r15 - DOS64_EXIT() + FUNC_EXIT() ret L(fix): seta %dl diff --git a/mpn/x86_64/dos64.m4 b/mpn/x86_64/dos64.m4 index c67db3eaa..a2d94d870 100644 --- a/mpn/x86_64/dos64.m4 +++ b/mpn/x86_64/dos64.m4 @@ -25,13 +25,13 @@ define(`LEA',` define(`JUMPTABSECT', `RODATA') -dnl Usage: DOS64_ENTRY(nregparmas) -dnl Usage: DOS64_EXIT() +dnl Usage: FUNC_ENTRY(nregparmas) +dnl Usage: FUNC_EXIT() -dnl DOS64_ENTRY and DOS64_EXIT provide an easy path for adoption of standard +dnl FUNC_ENTRY and FUNC_EXIT provide an easy path for adoption of standard dnl ABI assembly to the DOS64 ABI. -define(`DOS64_ENTRY', +define(`FUNC_ENTRY', `push %rdi push %rsi mov %rcx, %rdi @@ -43,7 +43,7 @@ ifelse(eval($1>=4),1,`dnl mov %r9, %rcx ')')')') -define(`DOS64_EXIT', +define(`FUNC_EXIT', `pop %rsi pop %rdi') diff --git a/mpn/x86_64/fastsse/com.asm b/mpn/x86_64/fastsse/com.asm index 004afa647..775b1c98e 100644 --- a/mpn/x86_64/fastsse/com.asm +++ b/mpn/x86_64/fastsse/com.asm @@ -56,7 +56,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_com) - DOS64_ENTRY(3) + FUNC_ENTRY(3) test n, n jz L(don) @@ -145,6 +145,6 @@ L(sma): add $14, n not %rax mov %rax, (rp) 1: -L(don): DOS64_EXIT() +L(don): FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/fastsse/copyd-palignr.asm b/mpn/x86_64/fastsse/copyd-palignr.asm index 3b69fea2c..0b058ad0b 100644 --- a/mpn/x86_64/fastsse/copyd-palignr.asm +++ b/mpn/x86_64/fastsse/copyd-palignr.asm @@ -55,7 +55,7 @@ ASM_START() TEXT ALIGN(64) PROLOGUE(mpn_copyd) - DOS64_ENTRY(3) + FUNC_ENTRY(3) lea -8(up,n,8), up lea -8(rp,n,8), rp @@ -115,7 +115,7 @@ L(am): sub $8, n mov (up), %r8 mov %r8, (rp) -1: DOS64_EXIT() +1: FUNC_EXIT() ret L(uent):sub $16, n @@ -193,7 +193,7 @@ L(uend):bt $3, R32(n) mov (up), %r8 mov %r8, (rp) -1: DOS64_EXIT() +1: FUNC_EXIT() ret C Basecase code. Needed for good small operands speed, not for @@ -230,6 +230,6 @@ L(end): bt $0, R32(n) mov -8(up), %r9 mov %r8, (rp) mov %r9, -8(rp) -1: DOS64_EXIT() +1: FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/fastsse/copyd.asm b/mpn/x86_64/fastsse/copyd.asm index 29cc76d06..c5fd7b306 100644 --- a/mpn/x86_64/fastsse/copyd.asm +++ b/mpn/x86_64/fastsse/copyd.asm @@ -54,7 +54,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_copyd) - DOS64_ENTRY(3) + FUNC_ENTRY(3) test n, n jz L(don) @@ -129,6 +129,6 @@ L(sma): test $8, R8(n) mov 8(up), %r8 mov %r8, 8(rp) 1: -L(don): DOS64_EXIT() +L(don): FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/fastsse/copyi-palignr.asm b/mpn/x86_64/fastsse/copyi-palignr.asm index 74165483d..d968b5a26 100644 --- a/mpn/x86_64/fastsse/copyi-palignr.asm +++ b/mpn/x86_64/fastsse/copyi-palignr.asm @@ -57,7 +57,7 @@ ASM_START() TEXT ALIGN(64) PROLOGUE(mpn_copyi) - DOS64_ENTRY(3) + FUNC_ENTRY(3) cmp $COPYI_SSE_THRESHOLD, n jbe L(bc) @@ -111,7 +111,7 @@ L(am): sub $8, n mov (up), %r8 mov %r8, (rp) -1: DOS64_EXIT() +1: FUNC_EXIT() ret L(uent): @@ -209,7 +209,7 @@ L(uend):bt $3, R32(n) mov (up), %r8 mov %r8, (rp) -1: DOS64_EXIT() +1: FUNC_EXIT() ret C Basecase code. Needed for good small operands speed, not for @@ -247,6 +247,6 @@ L(end): bt $0, R32(n) mov 8(up), %r9 mov %r8, 8(rp) mov %r9, 16(rp) -1: DOS64_EXIT() +1: FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/fastsse/copyi.asm b/mpn/x86_64/fastsse/copyi.asm index 0d408c3af..60c5f9ad2 100644 --- a/mpn/x86_64/fastsse/copyi.asm +++ b/mpn/x86_64/fastsse/copyi.asm @@ -56,7 +56,7 @@ ASM_START() TEXT ALIGN(64) PROLOGUE(mpn_copyi) - DOS64_ENTRY(3) + FUNC_ENTRY(3) cmp $3, n jc L(bc) @@ -126,7 +126,7 @@ L(end): bt $0, n mov (up), %r8 mov %r8, (rp) 1: - DOS64_EXIT() + FUNC_EXIT() ret C Basecase code. Needed for good small operands speed, not for @@ -148,6 +148,6 @@ L(bc): sub $2, n jnc L(ret) mov (up), %rax mov %rax, (rp) -L(ret): DOS64_EXIT() +L(ret): FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/fastsse/lshift-movdqu2.asm b/mpn/x86_64/fastsse/lshift-movdqu2.asm index 94da168ae..f2b0e8655 100644 --- a/mpn/x86_64/fastsse/lshift-movdqu2.asm +++ b/mpn/x86_64/fastsse/lshift-movdqu2.asm @@ -56,7 +56,7 @@ ASM_START() TEXT ALIGN(64) PROLOGUE(mpn_lshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) movd R32(%rcx), %xmm4 mov $64, R32(%rax) sub R32(%rcx), R32(%rax) @@ -140,7 +140,7 @@ L(end): bt $0, R32(n) psrlq %xmm5, %xmm0 por %xmm1, %xmm0 movdqa %xmm0, (rp) - DOS64_EXIT() + FUNC_EXIT() ret C Basecase @@ -166,6 +166,6 @@ L(bc): dec R32(n) L(end8):movq (ap), %xmm0 psllq %xmm4, %xmm0 movq %xmm0, (rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/fastsse/lshiftc-movdqu2.asm b/mpn/x86_64/fastsse/lshiftc-movdqu2.asm index 550a0f6a5..d05171cf0 100644 --- a/mpn/x86_64/fastsse/lshiftc-movdqu2.asm +++ b/mpn/x86_64/fastsse/lshiftc-movdqu2.asm @@ -56,7 +56,7 @@ ASM_START() TEXT ALIGN(64) PROLOGUE(mpn_lshiftc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) movd R32(%rcx), %xmm4 mov $64, R32(%rax) sub R32(%rcx), R32(%rax) @@ -148,7 +148,7 @@ L(end): bt $0, R32(n) por %xmm1, %xmm0 pxor %xmm3, %xmm0 movdqa %xmm0, (rp) - DOS64_EXIT() + FUNC_EXIT() ret C Basecase @@ -177,6 +177,6 @@ L(end8):movq (ap), %xmm0 psllq %xmm4, %xmm0 pxor %xmm3, %xmm0 movq %xmm0, (rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/fastsse/rshift-movdqu2.asm b/mpn/x86_64/fastsse/rshift-movdqu2.asm index a7851bf32..eb178315d 100644 --- a/mpn/x86_64/fastsse/rshift-movdqu2.asm +++ b/mpn/x86_64/fastsse/rshift-movdqu2.asm @@ -56,7 +56,7 @@ ASM_START() TEXT ALIGN(64) PROLOGUE(mpn_rshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) movd R32(%rcx), %xmm4 mov $64, R32(%rax) sub R32(%rcx), R32(%rax) @@ -143,13 +143,13 @@ L(end): bt $0, R32(n) psllq %xmm5, %xmm0 por %xmm1, %xmm0 movdqa %xmm0, -16(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(e1): movq -8(ap), %xmm0 psrlq %xmm4, %xmm0 movq %xmm0, -8(rp) - DOS64_EXIT() + FUNC_EXIT() ret C Basecase @@ -159,7 +159,7 @@ L(bc): dec R32(n) movq (ap), %xmm0 psrlq %xmm4, %xmm0 movq %xmm0, (rp) - DOS64_EXIT() + FUNC_EXIT() ret 1: movq (ap), %xmm1 @@ -173,7 +173,7 @@ L(bc): dec R32(n) movq 8(ap), %xmm0 psrlq %xmm4, %xmm0 movq %xmm0, 8(rp) - DOS64_EXIT() + FUNC_EXIT() ret 1: movq 8(ap), %xmm1 @@ -185,6 +185,6 @@ L(bc): dec R32(n) movq 16(ap), %xmm0 psrlq %xmm4, %xmm0 movq %xmm0, 16(rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/fat/fat_entry.asm b/mpn/x86_64/fat/fat_entry.asm index 3a407cfd4..fc4fd5daa 100644 --- a/mpn/x86_64/fat/fat_entry.asm +++ b/mpn/x86_64/fat/fat_entry.asm @@ -177,7 +177,7 @@ define(`rp', `%rdi') define(`idx', `%rsi') PROLOGUE(__gmpn_cpuid) - DOS64_ENTRY(2) + FUNC_ENTRY(2) mov %rbx, %r8 mov R32(idx), R32(%rax) cpuid @@ -185,6 +185,6 @@ PROLOGUE(__gmpn_cpuid) mov %edx, 4(rp) mov %ecx, 8(rp) mov %r8, %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/gcd_1.asm b/mpn/x86_64/gcd_1.asm index ec7e0a017..a1fc3d93a 100644 --- a/mpn/x86_64/gcd_1.asm +++ b/mpn/x86_64/gcd_1.asm @@ -68,7 +68,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_gcd_1) - DOS64_ENTRY(3) + FUNC_ENTRY(3) mov (up), %rax C U low limb mov $-1, R32(%rcx) or v0, %rax C x | y @@ -143,7 +143,7 @@ L(mid): and $MASK, R32(%rcx) C 0 L(end): pop %rcx mov %rdx, %rax shl R8(%rcx), %rax - DOS64_EXIT() + FUNC_EXIT() ret L(shift_alot): diff --git a/mpn/x86_64/invert_limb.asm b/mpn/x86_64/invert_limb.asm index d0fe95e1b..1bea6f292 100644 --- a/mpn/x86_64/invert_limb.asm +++ b/mpn/x86_64/invert_limb.asm @@ -42,7 +42,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_invert_limb) C Kn C2 Ci - DOS64_ENTRY(1) + FUNC_ENTRY(1) mov %rdi, %rax C 0 0 0 shr $55, %rax C 1 1 1 ifdef(`PIC',` @@ -98,7 +98,7 @@ ifdef(`DARWIN',` adc %rdi, %rdx sub %rdx, %rax - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() ASM_END() diff --git a/mpn/x86_64/k10/hamdist.asm b/mpn/x86_64/k10/hamdist.asm index 7fd7e03d3..4032fb59c 100644 --- a/mpn/x86_64/k10/hamdist.asm +++ b/mpn/x86_64/k10/hamdist.asm @@ -45,7 +45,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_hamdist) - DOS64_ENTRY(3) + FUNC_ENTRY(3) mov (ap), %r8 xor (bp), %r8 @@ -60,7 +60,7 @@ L(1): .byte 0xf3,0x49,0x0f,0xb8,0xc0 C popcnt %r8, %rax xor R32(%r10), R32(%r10) add $1, n js L(top) - DOS64_EXIT() + FUNC_EXIT() ret ALIGN(16) @@ -71,7 +71,7 @@ L(2): mov 8(ap,n,8), %r9 add $2, n js L(top) lea (%r10, %rax), %rax - DOS64_EXIT() + FUNC_EXIT() ret ALIGN(16) @@ -87,6 +87,6 @@ L(top): mov (ap,n,8), %r8 js L(top) lea (%r10, %rax), %rax - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/k10/popcount.asm b/mpn/x86_64/k10/popcount.asm index a556628b3..ef05319ff 100644 --- a/mpn/x86_64/k10/popcount.asm +++ b/mpn/x86_64/k10/popcount.asm @@ -46,7 +46,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_popcount) - DOS64_ENTRY(2) + FUNC_ENTRY(2) ifelse(1,1,` lea (up,n,8), up @@ -122,6 +122,6 @@ C 1 = n mod 8 add $8, n js L(top) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/k8/aorrlsh_n.asm b/mpn/x86_64/k8/aorrlsh_n.asm index 48399217e..3d9c0ae22 100644 --- a/mpn/x86_64/k8/aorrlsh_n.asm +++ b/mpn/x86_64/k8/aorrlsh_n.asm @@ -62,7 +62,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') push %r12 push %rbp @@ -201,6 +201,6 @@ L(cj1): mov %r9, 8(rp,n,8) pop %rbx pop %rbp pop %r12 - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/logops_n.asm b/mpn/x86_64/logops_n.asm index f765e4a13..5d8a0ec76 100644 --- a/mpn/x86_64/logops_n.asm +++ b/mpn/x86_64/logops_n.asm @@ -81,7 +81,7 @@ ifdef(`VARIANT_1',` TEXT ALIGN(32) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) movq (vp), %r8 movl R32(%rcx), R32(%rax) leaq (vp,n,8), vp @@ -120,7 +120,7 @@ L(e10): movq 24(vp,n,8), %r9 movq %r9, 24(rp,n,8) addq $4, n jnc L(oop) -L(ret): DOS64_EXIT() +L(ret): FUNC_EXIT() ret EPILOGUE() ') @@ -129,7 +129,7 @@ ifdef(`VARIANT_2',` TEXT ALIGN(32) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) movq (vp), %r8 notq %r8 movl R32(%rcx), R32(%rax) @@ -173,7 +173,7 @@ L(e10): movq 24(vp,n,8), %r9 movq %r9, 24(rp,n,8) addq $4, n jnc L(oop) -L(ret): DOS64_EXIT() +L(ret): FUNC_EXIT() ret EPILOGUE() ') @@ -182,7 +182,7 @@ ifdef(`VARIANT_3',` TEXT ALIGN(32) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) movq (vp), %r8 movl R32(%rcx), R32(%rax) leaq (vp,n,8), vp @@ -227,7 +227,7 @@ L(e10): movq 24(vp,n,8), %r9 movq %r9, 24(rp,n,8) addq $4, n jnc L(oop) -L(ret): DOS64_EXIT() +L(ret): FUNC_EXIT() ret EPILOGUE() ') diff --git a/mpn/x86_64/lshift.asm b/mpn/x86_64/lshift.asm index 830dc0bfd..2c4daf47d 100644 --- a/mpn/x86_64/lshift.asm +++ b/mpn/x86_64/lshift.asm @@ -43,7 +43,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_lshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) cmp $1, R8(%rcx) jne L(gen) @@ -87,7 +87,7 @@ L(t1): mov (up), %r8 dec R32(%rax) jne L(n00) adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret L(e1): test R32(%rax), R32(%rax) C clear cy L(n00): mov (up), %r8 @@ -96,7 +96,7 @@ L(n00): mov (up), %r8 adc %r8, %r8 mov %r8, (rp) L(ret): adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret L(n01): dec R32(%rax) mov 8(up), %r9 @@ -106,7 +106,7 @@ L(n01): dec R32(%rax) mov %r8, (rp) mov %r9, 8(rp) adc R32(%rax), R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret L(n10): mov 16(up), %r10 adc %r8, %r8 @@ -116,7 +116,7 @@ L(n10): mov 16(up), %r10 mov %r9, 8(rp) mov %r10, 16(rp) adc $-1, R32(%rax) - DOS64_EXIT() + FUNC_EXIT() ret L(gen): neg R32(%rcx) C put rsh count in cl @@ -230,6 +230,6 @@ L(end): L(ast): mov (up), %r10 shl R8(%rcx), %r10 mov %r10, (rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/lshiftc.asm b/mpn/x86_64/lshiftc.asm index bc2afcdbb..c3eeee60e 100644 --- a/mpn/x86_64/lshiftc.asm +++ b/mpn/x86_64/lshiftc.asm @@ -43,7 +43,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_lshiftc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) neg R32(%rcx) C put rsh count in cl mov -8(up,n,8), %rax shr R8(%rcx), %rax C function return value @@ -166,6 +166,6 @@ L(ast): mov (up), %r10 shl R8(%rcx), %r10 not %r10 mov %r10, (rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/lshsub_n.asm b/mpn/x86_64/lshsub_n.asm index c04a4e4ac..ab97dcb1c 100644 --- a/mpn/x86_64/lshsub_n.asm +++ b/mpn/x86_64/lshsub_n.asm @@ -51,7 +51,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_lshsub_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') push %r12 @@ -156,6 +156,6 @@ L(end): pop %r13 pop %r12 - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/mod_1_1.asm b/mpn/x86_64/mod_1_1.asm index 1c3968b1d..fbd3ba808 100644 --- a/mpn/x86_64/mod_1_1.asm +++ b/mpn/x86_64/mod_1_1.asm @@ -74,7 +74,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_mod_1_1p) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbp push %rbx mov %rdx, b @@ -167,7 +167,7 @@ L(ok): shr R8(%rcx), %rax pop %rbx pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret L(fix): sub b, %rax jmp L(ok) @@ -175,7 +175,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(mpn_mod_1_1p_cps) - DOS64_ENTRY(2) + FUNC_ENTRY(2) push %rbp bsr %rsi, %rcx push %rbx @@ -218,7 +218,7 @@ L(z): pop %r12 pop %rbx pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() ASM_END() diff --git a/mpn/x86_64/mod_1_2.asm b/mpn/x86_64/mod_1_2.asm index 83d6372cf..f753b87e9 100644 --- a/mpn/x86_64/mod_1_2.asm +++ b/mpn/x86_64/mod_1_2.asm @@ -38,7 +38,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_mod_1s_2p) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %r14 test $1, R8(%rsi) mov %rdx, %r14 @@ -149,7 +149,7 @@ L(1): xor R32(%rcx), R32(%rcx) pop %r12 pop %r13 pop %r14 - DOS64_EXIT() + FUNC_EXIT() ret L(one): mov (%rdi), %r8 @@ -160,7 +160,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(mpn_mod_1s_2p_cps) - DOS64_ENTRY(2) + FUNC_ENTRY(2) push %rbp bsr %rsi, %rcx push %rbx @@ -222,6 +222,6 @@ ifdef(`SHLD_SLOW',` pop %r12 pop %rbx pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/mod_1_4.asm b/mpn/x86_64/mod_1_4.asm index eedfa3016..165be8307 100644 --- a/mpn/x86_64/mod_1_4.asm +++ b/mpn/x86_64/mod_1_4.asm @@ -37,7 +37,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_mod_1s_4p) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %r15 push %r14 push %r13 @@ -161,13 +161,13 @@ L(end): mov 8(%r14), R32(%rsi) pop %r13 pop %r14 pop %r15 - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() ALIGN(16) PROLOGUE(mpn_mod_1s_4p_cps) - DOS64_ENTRY(2) + FUNC_ENTRY(2) push %rbp bsr %rsi, %rcx push %rbx @@ -253,6 +253,6 @@ ifdef(`SHLD_SLOW',` pop %r12 pop %rbx pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/mod_34lsub1.asm b/mpn/x86_64/mod_34lsub1.asm index 89386c964..44a19b843 100644 --- a/mpn/x86_64/mod_34lsub1.asm +++ b/mpn/x86_64/mod_34lsub1.asm @@ -49,7 +49,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_mod_34lsub1) - DOS64_ENTRY(2) + FUNC_ENTRY(2) mov $0x0000FFFFFFFFFFFF, %r11 @@ -73,7 +73,7 @@ PROLOGUE(mpn_mod_34lsub1) shl $16, %rdx C src[1] low add %rdx, %rax -L(one): DOS64_EXIT() +L(one): FUNC_EXIT() ret @@ -184,6 +184,6 @@ L(0): add %r9, %rax add %rdx, %rax C apply 2mod3 high add %rdi, %rax C apply 2mod3 low - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/mode1o.asm b/mpn/x86_64/mode1o.asm index 9467901eb..f0f07a4d7 100644 --- a/mpn/x86_64/mode1o.asm +++ b/mpn/x86_64/mode1o.asm @@ -70,12 +70,12 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_modexact_1_odd) - DOS64_ENTRY(3) + FUNC_ENTRY(3) mov $0, R32(%rcx) IFDOS(` jmp L(ent) ') PROLOGUE(mpn_modexact_1c_odd) - DOS64_ENTRY(4) + FUNC_ENTRY(4) L(ent): C rdi src C rsi size @@ -165,7 +165,7 @@ L(one): mul %r8 C climb = high (q * d) lea (%rcx,%rdx), %rax C climb+cbit - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE(mpn_modexact_1c_odd) diff --git a/mpn/x86_64/mul_2.asm b/mpn/x86_64/mul_2.asm index b23674abf..da4bcd9e2 100644 --- a/mpn/x86_64/mul_2.asm +++ b/mpn/x86_64/mul_2.asm @@ -60,7 +60,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_mul_2) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %rbp @@ -176,6 +176,6 @@ L(m22): mul v1 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/mul_basecase.asm b/mpn/x86_64/mul_basecase.asm index d6ad0426e..6b94d78a8 100644 --- a/mpn/x86_64/mul_basecase.asm +++ b/mpn/x86_64/mul_basecase.asm @@ -66,7 +66,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_mul_basecase) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') push %rbx push %rbp @@ -452,7 +452,7 @@ L(ret): pop %r15 pop %r12 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/mullo_basecase.asm b/mpn/x86_64/mullo_basecase.asm index b9d910536..2c2f832a2 100644 --- a/mpn/x86_64/mullo_basecase.asm +++ b/mpn/x86_64/mullo_basecase.asm @@ -58,7 +58,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_mullo_basecase) - DOS64_ENTRY(4) + FUNC_ENTRY(4) cmp $4, n jge L(gen) mov (up), %rax C u0 @@ -85,7 +85,7 @@ C .quad L(3m4) C 11 L(1): imul %r8, %rax mov %rax, (rp) - DOS64_EXIT() + FUNC_EXIT() ret L(2): mov 8(vp_param), %r11 @@ -96,7 +96,7 @@ L(2): mov 8(vp_param), %r11 lea (%r11, %rdx), %rax add %r8, %rax mov %rax, 8(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(3): mov 8(vp_param), %r9 C v1 @@ -121,7 +121,7 @@ L(3): mov 8(vp_param), %r9 C v1 add %rax, %r9 mov %rcx, 8(rp) mov %r9, 16(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(0m4): @@ -415,6 +415,6 @@ L(ret): pop %r15 pop %r13 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/mulmid_basecase.asm b/mpn/x86_64/mulmid_basecase.asm index 83450ff52..08a7bc7de 100644 --- a/mpn/x86_64/mulmid_basecase.asm +++ b/mpn/x86_64/mulmid_basecase.asm @@ -57,7 +57,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_mulmid_basecase) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') push %rbx push %rbp @@ -543,6 +543,6 @@ L(ret): pop %r15 pop %r12 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/nano/dive_1.asm b/mpn/x86_64/nano/dive_1.asm index 6cdcf18c1..3593f762e 100644 --- a/mpn/x86_64/nano/dive_1.asm +++ b/mpn/x86_64/nano/dive_1.asm @@ -45,7 +45,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_divexact_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx mov %rcx, %rax @@ -144,13 +144,13 @@ L(com): mul %r11 C carry limb in rdx imul %r10, %r9 mov %r9, (%rdi) pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret L(one): shr R8(%rcx), %rax imul %r10, %rax mov %rax, (%rdi) pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/pentium4/aors_n.asm b/mpn/x86_64/pentium4/aors_n.asm index 4a8bbdeb2..32a61581d 100644 --- a/mpn/x86_64/pentium4/aors_n.asm +++ b/mpn/x86_64/pentium4/aors_n.asm @@ -55,12 +55,12 @@ MULFUNC_PROLOGUE(mpn_add_n mpn_add_nc mpn_sub_n mpn_sub_nc) ASM_START() TEXT PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) xor %r8, %r8 IFDOS(` jmp L(ent) ') EPILOGUE() PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') L(ent): push %rbx push %r12 @@ -180,6 +180,6 @@ L(1): mov %r11, 8(rp) L(ret): mov R32(%rbx), R32(%rax) pop %r12 pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/pentium4/aorslshC_n.asm b/mpn/x86_64/pentium4/aorslshC_n.asm index e914d25e1..f24eb0b7a 100644 --- a/mpn/x86_64/pentium4/aorslshC_n.asm +++ b/mpn/x86_64/pentium4/aorslshC_n.asm @@ -45,7 +45,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %r12 push %rbp @@ -185,7 +185,7 @@ L(1): mov %r11, 8(rp) pop %r12 pop %rbx emms - DOS64_EXIT() + FUNC_EXIT() ret L(c3): mov $1, R8(%rax) jmp L(rc3) diff --git a/mpn/x86_64/pentium4/lshift.asm b/mpn/x86_64/pentium4/lshift.asm index 4f9c0c6c3..1eeb70e4e 100644 --- a/mpn/x86_64/pentium4/lshift.asm +++ b/mpn/x86_64/pentium4/lshift.asm @@ -42,7 +42,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_lshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) mov -8(up,n,8), %rax movd R32(%rcx), %mm4 neg R32(%rcx) C put rsh count in cl @@ -150,6 +150,6 @@ L(ast): movq (up), %mm2 psllq %mm4, %mm2 movq %mm2, (rp) emms - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/pentium4/lshiftc.asm b/mpn/x86_64/pentium4/lshiftc.asm index 86255fe7c..94d2cd372 100644 --- a/mpn/x86_64/pentium4/lshiftc.asm +++ b/mpn/x86_64/pentium4/lshiftc.asm @@ -42,7 +42,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_lshiftc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) mov -8(up,n,8), %rax pcmpeqd %mm6, %mm6 C 0xffff...fff movd R32(%rcx), %mm4 @@ -162,6 +162,6 @@ L(ast): movq (up), %mm2 pxor %mm6, %mm2 movq %mm2, (rp) emms - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/pentium4/mod_34lsub1.asm b/mpn/x86_64/pentium4/mod_34lsub1.asm index 7123400da..441433565 100644 --- a/mpn/x86_64/pentium4/mod_34lsub1.asm +++ b/mpn/x86_64/pentium4/mod_34lsub1.asm @@ -50,7 +50,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_mod_34lsub1) - DOS64_ENTRY(2) + FUNC_ENTRY(2) mov $0x0000FFFFFFFFFFFF, %r11 @@ -75,7 +75,7 @@ PROLOGUE(mpn_mod_34lsub1) shl $16, %rdx C src[1] low add %rdx, %rax -L(1): DOS64_EXIT() +L(1): FUNC_EXIT() ret @@ -151,6 +151,6 @@ L(combine): add %rdx, %rax C apply 2mod3 high add %rdi, %rax C apply 2mod3 low - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/pentium4/rsh1aors_n.asm b/mpn/x86_64/pentium4/rsh1aors_n.asm index a3e82f252..489e36ea2 100644 --- a/mpn/x86_64/pentium4/rsh1aors_n.asm +++ b/mpn/x86_64/pentium4/rsh1aors_n.asm @@ -62,12 +62,12 @@ MULFUNC_PROLOGUE(mpn_rsh1add_n mpn_rsh1add_nc mpn_rsh1sub_n mpn_rsh1sub_nc) ASM_START() TEXT PROLOGUE(func) - DOS64_ENTRY(4) + FUNC_ENTRY(4) xor %r8, %r8 IFDOS(` jmp L(ent) ') EPILOGUE() PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') L(ent): push %rbx push %r12 @@ -316,7 +316,7 @@ L(cj1): or %r14, %rbx pop %r13 pop %r12 pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret L(c3): mov $1, R8(%rax) jmp L(rc3) diff --git a/mpn/x86_64/pentium4/rshift.asm b/mpn/x86_64/pentium4/rshift.asm index 37f5b43ba..0228e8db3 100644 --- a/mpn/x86_64/pentium4/rshift.asm +++ b/mpn/x86_64/pentium4/rshift.asm @@ -42,7 +42,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_rshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) mov (up), %rax movd R32(%rcx), %mm4 neg R32(%rcx) C put lsh count in cl @@ -153,6 +153,6 @@ L(ast): movq (up), %mm2 psrlq %mm4, %mm2 movq %mm2, (rp) emms - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/popham.asm b/mpn/x86_64/popham.asm index 158036a90..4b69ddbb2 100644 --- a/mpn/x86_64/popham.asm +++ b/mpn/x86_64/popham.asm @@ -70,8 +70,8 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(func) - POP(` DOS64_ENTRY(2) ') - HAM(` DOS64_ENTRY(3) ') + POP(` FUNC_ENTRY(2) ') + HAM(` FUNC_ENTRY(3) ') push %r12 push %r13 HAM(` push %r14 ') @@ -161,6 +161,6 @@ L(end): HAM(` pop %r14 ') pop %r13 pop %r12 - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/redc_1.asm b/mpn/x86_64/redc_1.asm index 268cdfebd..b6404d42b 100644 --- a/mpn/x86_64/redc_1.asm +++ b/mpn/x86_64/redc_1.asm @@ -58,7 +58,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_redc_1) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %rbp push %rbx @@ -322,6 +322,6 @@ IFDOS(` mov rp, %rcx ') C rcx = rp pop %r12 pop %rbx pop %rbp - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/rsh1aors_n.asm b/mpn/x86_64/rsh1aors_n.asm index c52d01cae..9b56ed81a 100644 --- a/mpn/x86_64/rsh1aors_n.asm +++ b/mpn/x86_64/rsh1aors_n.asm @@ -59,7 +59,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(func_nc) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8 ') push %rbx @@ -72,7 +72,7 @@ EPILOGUE() ALIGN(16) PROLOGUE(func_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx xor R32(%rax), R32(%rax) @@ -173,6 +173,6 @@ L(top): add %rbx, %rbx C rotate carry limb, restore acy L(end): mov %rbx, (rp) pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/rshift.asm b/mpn/x86_64/rshift.asm index 9a19eecda..0941b460e 100644 --- a/mpn/x86_64/rshift.asm +++ b/mpn/x86_64/rshift.asm @@ -43,7 +43,7 @@ ASM_START() TEXT ALIGN(32) PROLOGUE(mpn_rshift) - DOS64_ENTRY(4) + FUNC_ENTRY(4) neg R32(%rcx) C put rsh count in cl mov (up), %rax shl R8(%rcx), %rax C function return value @@ -160,6 +160,6 @@ L(end): L(ast): mov (up), %r10 shr R8(%rcx), %r10 mov %r10, (rp) - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/sqr_basecase.asm b/mpn/x86_64/sqr_basecase.asm index 202712264..4130cba16 100644 --- a/mpn/x86_64/sqr_basecase.asm +++ b/mpn/x86_64/sqr_basecase.asm @@ -82,7 +82,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_sqr_basecase) - DOS64_ENTRY(3) + FUNC_ENTRY(3) mov R32(n_param), R32(%rcx) mov R32(n_param), R32(n) C free original n register (rdx) @@ -119,7 +119,7 @@ L(1): mov (up), %rax add $40, %rsp mov %rax, (rp) mov %rdx, 8(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(2): mov (up), %rax @@ -145,7 +145,7 @@ L(2): mov (up), %rax mov %r10, 16(rp) adc %r8, %r11 mov %r11, 24(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(3): mov (up), %rax @@ -190,7 +190,7 @@ L(3): mov (up), %rax adc %r10, 24(rp) adc %rdx, 32(rp) adc %r11, 40(rp) - DOS64_EXIT() + FUNC_EXIT() ret L(4): mov (up), %rax @@ -260,7 +260,7 @@ L(4): mov (up), %rax adc %rcx, 40(rp) adc %rdx, 48(rp) adc %rax, 56(rp) - DOS64_EXIT() + FUNC_EXIT() ret @@ -785,6 +785,6 @@ L(d1): mov %r11, 24(rp,j,8) pop %r12 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/sublsh1_n.asm b/mpn/x86_64/sublsh1_n.asm index 9b4617a5e..9ef62ba43 100644 --- a/mpn/x86_64/sublsh1_n.asm +++ b/mpn/x86_64/sublsh1_n.asm @@ -48,7 +48,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_sublsh1_n) - DOS64_ENTRY(4) + FUNC_ENTRY(4) push %rbx push %rbp @@ -144,6 +144,6 @@ L(end): add R32(%rbp), R32(%rax) pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/tabselect.asm b/mpn/x86_64/tabselect.asm index 9ecf6f44d..0b4c0d6e5 100644 --- a/mpn/x86_64/tabselect.asm +++ b/mpn/x86_64/tabselect.asm @@ -57,7 +57,7 @@ ASM_START() TEXT ALIGN(16) PROLOGUE(mpn_tabselect) - DOS64_ENTRY(4) + FUNC_ENTRY(4) IFDOS(` mov 56(%rsp), %r8d ') push %rbx push %rbp @@ -110,6 +110,6 @@ L(outer_end): pop %r12 pop %rbp pop %rbx - DOS64_EXIT() + FUNC_EXIT() ret EPILOGUE() diff --git a/mpn/x86_64/x86_64-defs.m4 b/mpn/x86_64/x86_64-defs.m4 index 24035716b..5180ad5d8 100644 --- a/mpn/x86_64/x86_64-defs.m4 +++ b/mpn/x86_64/x86_64-defs.m4 @@ -191,8 +191,8 @@ define(`JUMPTABSECT', `.section .data.rel.ro.local,"aw",@progbits') dnl These macros are defined just for DOS64, where they provide calling dnl sequence glue code. -define(`DOS64_ENTRY',`') -define(`DOS64_EXIT',`') +define(`FUNC_ENTRY',`') +define(`FUNC_EXIT',`') dnl Target ABI macros. -- cgit v1.2.1