summaryrefslogtreecommitdiff
path: root/sysdeps/powerpc/powerpc64/setjmp-common.S
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2004-02-14 05:16:57 +0000
committerUlrich Drepper <drepper@redhat.com>2004-02-14 05:16:57 +0000
commit5c76ff279fa8fd1425b86a39fe75507660cc0b5c (patch)
tree420bed38e7d88b534f57f82497cc0dd77a26b936 /sysdeps/powerpc/powerpc64/setjmp-common.S
parent9f8765bc46dbb4991dc20905c5d10d867956d489 (diff)
downloadglibc-5c76ff279fa8fd1425b86a39fe75507660cc0b5c.tar.gz
Update.
2004-02-13 Steven Munroe <sjmunroe@us.ibm.com> * sysdeps/powerpc/powerpc64/__longjmp-common.S: New file. * sysdeps/powerpc/powerpc64/__longjmp.S [NOT_IN_libc](__longjmp): Non-versioned __longjmp for rtld-__longjmp. [!NOT_IN_libc](__vmx__longjmp): Add VMX reg support and define as default version of __longjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_3, GLIBC_2_3_4)] (__novmx__longjmp): Original version of __longjmp. * sysdeps/powerpc/powerpc64/bsd-_setjmp.S [NOT_IN_libc](_setjmp): Non-versioned _setjmp for rtld-_setjmp. [!NOT_IN_libc](__vmx_setjmp): Branch to __vmx__sigsetjmp and define as default version of _setjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_3, GLIBC_2_3_4)] (__novmx_setjmp): Original version of _setjmp. * sysdeps/powerpc/powerpc64/bsd-setjmp.S (__vmxsetjmp): Branch to __vmx__sigsetjmp and define as default version of setjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_3, GLIBC_2_3_4)] (__novmxsetjmp): Original version of setjmp. * sysdeps/powerpc/powerpc64/setjmp-common.S: New file. * sysdeps/powerpc/powerpc64/setjmp.S [NOT_IN_libc](__setjmp): Non-versioned __sigsetjmp for rtld-setjmp. [!NOT_IN_libc](__vmx__sigsetjmp): Add VMX reg support and define as default version of __sigsetjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_3, GLIBC_2_3_4)] (__novmx__sigsetjmp): Original version of __sigsetjmp. * sysdeps/powerpc/powerpc32/__longjmp-common.S: New File * sysdeps/powerpc/powerpc32/__longjmp.S [NOT_IN_libc](__longjmp): Non-versioned __longjmp for rtld-__longjmp. [!NOT_IN_libc](__vmx__longjmp): Add VMX reg support and define as default version of __longjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx__longjmp): Original version of __longjmp. * sysdeps/powerpc/powerpc32/bsd-_setjmp.S [NOT_IN_libc](_setjmp): Non-versioned _setjmp for rtld-_setjmp. [!NOT_IN_libc](__vmx_setjmp): Branch to __vmx__sigsetjmp and define as default version of _setjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx_setjmp): Original version of _setjmp. * sysdeps/powerpc/powerpc32/bsd-setjmp.S (__vmxsetjmp): Branch to __vmx__sigsetjmp and define as default version of setjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmxsetjmp): Original version of setjmp. * sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S: New file. * sysdeps/powerpc/powerpc32/fpu/__longjmp.S [NOT_IN_libc](__longjmp): Non-versioned __longjmp for rtld-__longjmp. [!NOT_IN_libc](__vmx__longjmp): Add VMX reg support and define as default version of __longjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx__longjmp): Original version of __longjmp. * sysdeps/powerpc/powerpc32/fpu/setjmp-common.S: New file. * sysdeps/powerpc/powerpc32/fpu/setjmp.S [NOT_IN_libc](__setjmp): Non-versioned __sigsetjmp for rtld-setjmp. [!NOT_IN_libc](__vmx__sigsetjmp): Add VMX reg support and define as default version of __sigsetjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx__sigsetjmp): Original version of __sigsetjmp. * sysdeps/powerpc/powerpc32/setjmp-common.S: New file. * sysdeps/powerpc/powerpc32/setjmp.S [NOT_IN_libc](__setjmp): Non-versioned __sigsetjmp for rtld-setjmp. [!NOT_IN_libc](__vmx__sigsetjmp): Add VMX reg support and define as default version of __sigsetjmp. [SHARED && SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)] (__novmx__sigsetjmp): Original version of __sigsetjmp. * sysdeps/powerpc/Dist: New File. * sysdeps/powerpc/Makefile (sysdep_routines): Add novmx-longjmp.c and novmx-sigjmp.c * sysdeps/powerpc/Versions (libc): To GLIBC_2.3_4 add _longjmp, __sigsetjmp, _setjmp, longjmp, and setjmp. To GLIBC_PRIVATE add __novmx__libc_longjmp, __novmx__libc_siglongjmp, __vmx__libc_longjmp, and __vmx__libc_siglongjmp. * sysdeps/powerpc/bits/setjmp.h: Define JB_VRSAVE, JB_VRS, and adjust JB_SIZE to add VMX regs to __jmp_buf. * sysdeps/powerpc/longjmp.c: New file. * sysdeps/powerpc/novmxsetjmp.h: New file. * sysdeps/powerpc/novmx-longjmp.c: New file. * sysdeps/powerpc/novmx-sigjmp.c: New file. * sysdeps/powerpc/sigjmp.c: New file.
Diffstat (limited to 'sysdeps/powerpc/powerpc64/setjmp-common.S')
-rw-r--r--sysdeps/powerpc/powerpc64/setjmp-common.S171
1 files changed, 171 insertions, 0 deletions
diff --git a/sysdeps/powerpc/powerpc64/setjmp-common.S b/sysdeps/powerpc/powerpc64/setjmp-common.S
new file mode 100644
index 0000000000..541b5d4276
--- /dev/null
+++ b/sysdeps/powerpc/powerpc64/setjmp-common.S
@@ -0,0 +1,171 @@
+/* setjmp for PowerPC64.
+ Copyright (C) 1995-2003, 2004 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with the GNU C Library; if not, write to the Free
+ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+ 02111-1307 USA. */
+
+#include <sysdep.h>
+#define _ASM
+#define _SETJMP_H
+#ifdef __NO_VMX__
+#include <novmxsetjmp.h>
+#else
+#include <bits/setjmp.h>
+#endif
+#include <bp-sym.h>
+#include <bp-asm.h>
+
+#ifndef __NO_VMX__
+ .section ".toc","aw"
+.LC__dl_hwcap:
+#ifdef SHARED
+ .tc _rtld_global[TC],_rtld_global
+#else
+ .tc _dl_hwcap[TC],_dl_hwcap
+#endif
+ .section ".text"
+#endif
+
+ENTRY (BP_SYM (__sigsetjmp))
+ CHECK_BOUNDS_BOTH_WIDE_LIT (r3, r8, r9, JB_SIZE)
+ std r1,(JB_GPR1*8)(3)
+ mflr r0
+ std r2,(JB_GPR2*8)(3)
+ std r14,((JB_GPRS+0)*8)(3)
+ stfd fp14,((JB_FPRS+0)*8)(3)
+ std r0,(JB_LR*8)(3)
+ std r15,((JB_GPRS+1)*8)(3)
+ stfd fp15,((JB_FPRS+1)*8)(3)
+ mfcr r0
+ std r16,((JB_GPRS+2)*8)(3)
+ stfd fp16,((JB_FPRS+2)*8)(3)
+ std r0,(JB_CR*8)(3)
+ std r17,((JB_GPRS+3)*8)(3)
+ stfd fp17,((JB_FPRS+3)*8)(3)
+ std r18,((JB_GPRS+4)*8)(3)
+ stfd fp18,((JB_FPRS+4)*8)(3)
+ std r19,((JB_GPRS+5)*8)(3)
+ stfd fp19,((JB_FPRS+5)*8)(3)
+ std r20,((JB_GPRS+6)*8)(3)
+ stfd fp20,((JB_FPRS+6)*8)(3)
+ std r21,((JB_GPRS+7)*8)(3)
+ stfd fp21,((JB_FPRS+7)*8)(3)
+ std r22,((JB_GPRS+8)*8)(3)
+ stfd fp22,((JB_FPRS+8)*8)(3)
+ std r23,((JB_GPRS+9)*8)(3)
+ stfd fp23,((JB_FPRS+9)*8)(3)
+ std r24,((JB_GPRS+10)*8)(3)
+ stfd fp24,((JB_FPRS+10)*8)(3)
+ std r25,((JB_GPRS+11)*8)(3)
+ stfd fp25,((JB_FPRS+11)*8)(3)
+ std r26,((JB_GPRS+12)*8)(3)
+ stfd fp26,((JB_FPRS+12)*8)(3)
+ std r27,((JB_GPRS+13)*8)(3)
+ stfd fp27,((JB_FPRS+13)*8)(3)
+ std r28,((JB_GPRS+14)*8)(3)
+ stfd fp28,((JB_FPRS+14)*8)(3)
+ std r29,((JB_GPRS+15)*8)(3)
+ stfd fp29,((JB_FPRS+15)*8)(3)
+ std r30,((JB_GPRS+16)*8)(3)
+ stfd fp30,((JB_FPRS+16)*8)(3)
+ std r31,((JB_GPRS+17)*8)(3)
+ stfd fp31,((JB_FPRS+17)*8)(3)
+#ifndef __NO_VMX__
+ ld r5,.LC__dl_hwcap@toc(r2)
+#ifdef SHARED
+ /* Load _rtld-global._dl_hwcap. */
+ ld r5,RTLD_GLOBAL_DL_HWCAP_OFFSET(r5)
+#else
+ ld r5,0(r5) /* Load extern _dl_hwcap. */
+#endif
+ andis. r5,r5,(PPC_FEATURE_HAS_ALTIVEC >> 16)
+ beq no_vmx
+ la r5,((JB_VRS)*8)(3)
+ andi. r6,r5,0xf
+ mfspr r0,VRSAVE
+ stw r0,((JB_VRSAVE)*8)(3)
+ addi r6,r5,16
+ beq+ aligned_save_vmx
+ lvsr v0,0,r5
+ vspltisb v1,-1 /* set v1 to all 1's */
+ vspltisb v2,0 /* set v2 to all 0's */
+ vperm v3,v2,v1,v0 /* v3 contains shift mask with num all 1 bytes
+ on left = misalignment */
+
+
+ /* Special case for v20 we need to preserve what is in save area
+ below v20 before obliterating it */
+ lvx v5,0,r5
+ vperm v20,v20,v20,v0
+ vsel v5,v5,v20,v3
+ vsel v20,v20,v2,v3
+ stvx v5,0,r5
+
+#define save_2vmx_partial(savevr,prev_savevr,hivr,shiftvr,maskvr,savegpr,addgpr) \
+ addi addgpr,addgpr,32; \
+ vperm savevr,savevr,savevr,shiftvr; \
+ vsel hivr,prev_savevr,savevr,maskvr; \
+ stvx hivr,0,savegpr;
+
+ save_2vmx_partial(v21,v20,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v22,v21,v5,v0,v3,r5,r6)
+ save_2vmx_partial(v23,v22,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v24,v23,v5,v0,v3,r5,r6)
+ save_2vmx_partial(v25,v24,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v26,v25,v5,v0,v3,r5,r6)
+ save_2vmx_partial(v27,v26,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v28,v27,v5,v0,v3,r5,r6)
+ save_2vmx_partial(v29,v28,v5,v0,v3,r6,r5)
+ save_2vmx_partial(v30,v29,v5,v0,v3,r5,r6)
+
+ /* Special case for r31 we need to preserve what is in save area
+ above v31 before obliterating it */
+ addi r5,r5,32
+ vperm v31,v31,v31,v0
+ lvx v4,0,r5
+ vsel v5,v30,v31,v3
+ stvx v5,0,r6
+ vsel v4,v31,v4,v3
+ stvx v4,0,r5
+ b no_vmx
+
+aligned_save_vmx:
+ stvx 20,0,r5
+ addi r5,r5,32
+ stvx 21,0,r6
+ addi r6,r6,32
+ stvx 22,0,r5
+ addi r5,r5,32
+ stvx 23,0,r6
+ addi r6,r6,32
+ stvx 24,0,r5
+ addi r5,r5,32
+ stvx 25,0,r6
+ addi r6,r6,32
+ stvx 26,0,r5
+ addi r5,r5,32
+ stvx 27,0,r6
+ addi r6,r6,32
+ stvx 28,0,r5
+ addi r5,r5,32
+ stvx 29,0,r6
+ addi r6,r6,32
+ stvx 30,0,r5
+ stvx 31,0,r6
+no_vmx:
+#endif
+ b JUMPTARGET (BP_SYM (__sigjmp_save))
+END (BP_SYM (__sigsetjmp))