summaryrefslogtreecommitdiff
path: root/cipher/sha1-avx2-bmi2-amd64.S
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@iki.fi>2019-04-15 19:46:53 +0300
committerJussi Kivilinna <jussi.kivilinna@iki.fi>2019-04-16 23:03:36 +0300
commitd11ae95d05dc39ec6b825d1109afadd964589880 (patch)
tree6a36256a9a816cd8e49fb6be5fcb4a9b6f9d007d /cipher/sha1-avx2-bmi2-amd64.S
parent0903b215ef5a18332b740a24e6e2bfbed9e1d97b (diff)
downloadlibgcrypt-d11ae95d05dc39ec6b825d1109afadd964589880.tar.gz
Add CFI unwind assembly directives for AMD64 assembly
* configure.ac (gcry_cv_gcc_asm_cfi_directives): New. * cipher/asm-common-amd64.h (ADD_RIP, CFI_STARTPROC, CFI_ENDPROC) (CFI_REMEMBER_STATE, CFI_RESTORE_STATE, CFI_ADJUST_CFA_OFFSET) (CFI_REL_OFFSET, CFI_DEF_CFA_REGISTER, CFI_REGISTER, CFI_RESTORE) (CFI_PUSH, CFI_POP, CFI_POP_TMP_REG, CFI_LEAVE, DW_REGNO) (DW_SLEB128_7BIT, DW_SLEB128_28BIT, CFI_CFA_ON_STACK) (CFI_REG_ON_STACK): New. (ENTER_SYSV_FUNCPARAMS_0_4, EXIT_SYSV_FUNC): Add CFI directives. * cipher/arcfour-amd64.S: Add CFI directives. * cipher/blake2b-amd64-avx2.S: Add CFI directives. * cipher/blake2s-amd64-avx.S: Add CFI directives. * cipher/blowfish-amd64.S: Add CFI directives. * cipher/camellia-aesni-avx-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/camellia-aesni-avx2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/cast5-amd64.S: Add CFI directives. * cipher/chacha20-amd64-avx2.S: Add CFI directives. * cipher/chacha20-amd64-ssse3.S: Add CFI directives. * cipher/des-amd64.S: Add CFI directives. * cipher/rijndael-amd64.S: Add CFI directives. * cipher/rijndael-ssse3-amd64-asm.S: Add CFI directives. * cipher/salsa20-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/serpent-avx2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/serpent-sse2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha1-avx-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha1-avx-bmi2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha1-avx2-bmi2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha1-ssse3-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha256-avx-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha256-avx2-bmi2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha256-ssse3-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha512-avx-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha512-avx2-bmi2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/sha512-ssse3-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/twofish-amd64.S: Add CFI directives. * cipher/twofish-avx2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * cipher/whirlpool-sse2-amd64.S: Add CFI directives; Use 'asm-common-amd64.h'. * mpi/amd64/func_abi.h: Include 'config.h'. (CFI_STARTPROC, CFI_ENDPROC, CFI_ADJUST_CFA_OFFSET, CFI_REL_OFFSET) (CFI_RESTORE, CFI_PUSH, CFI_POP): New. (FUNC_ENTRY, FUNC_EXIT): Add CFI directives. -- This commit adds CFI directives that add DWARF unwinding information for debugger to backtrace when executing code from AMD64 assembly files. Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher/sha1-avx2-bmi2-amd64.S')
-rw-r--r--cipher/sha1-avx2-bmi2-amd64.S34
1 files changed, 17 insertions, 17 deletions
diff --git a/cipher/sha1-avx2-bmi2-amd64.S b/cipher/sha1-avx2-bmi2-amd64.S
index 2a2f21a5..93863230 100644
--- a/cipher/sha1-avx2-bmi2-amd64.S
+++ b/cipher/sha1-avx2-bmi2-amd64.S
@@ -34,18 +34,7 @@
defined(HAVE_GCC_INLINE_ASM_BMI2) && defined(HAVE_GCC_INLINE_ASM_AVX) && \
defined(HAVE_GCC_INLINE_ASM_AVX2) && defined(USE_SHA1)
-#ifdef __PIC__
-# define RIP (%rip)
-#else
-# define RIP
-#endif
-
-
-#ifdef HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS
-# define ELF(...) __VA_ARGS__
-#else
-# define ELF(...) /*_*/
-#endif
+#include "asm-common-amd64.h"
/* Context structure */
@@ -228,6 +217,7 @@ _gcry_sha1_transform_amd64_avx2_bmi2:
* %rsi: data (64*nblks bytes)
* %rdx: nblks (multiple of 2, larger than 0)
*/
+ CFI_STARTPROC();
vzeroupper;
@@ -235,10 +225,14 @@ _gcry_sha1_transform_amd64_avx2_bmi2:
movq %rdi, RSTATE;
movq %rsi, RDATA;
pushq %rbx;
+ CFI_PUSH(%rbx);
pushq %rbp;
+ CFI_PUSH(%rbp);
pushq %r12;
+ CFI_PUSH(%r12);
movq %rsp, ROLDSTACK;
+ CFI_DEF_CFA_REGISTER(ROLDSTACK);
subq $(WK_STACK_WORDS*4), %rsp;
andq $(~63), %rsp;
@@ -251,11 +245,11 @@ _gcry_sha1_transform_amd64_avx2_bmi2:
movl state_h4(RSTATE), e;
xorl ne, ne;
- vbroadcasti128 .Lbswap_shufb_ctl RIP, BSWAP_REG;
- vpbroadcastd .LK1 RIP, K1;
- vpbroadcastd .LK2 RIP, K2;
- vpbroadcastd .LK3 RIP, K3;
- vpbroadcastd .LK4 RIP, K4;
+ vbroadcasti128 .Lbswap_shufb_ctl rRIP, BSWAP_REG;
+ vpbroadcastd .LK1 rRIP, K1;
+ vpbroadcastd .LK2 rRIP, K2;
+ vpbroadcastd .LK3 rRIP, K3;
+ vpbroadcastd .LK4 rRIP, K4;
/* Precalc 0-31 for block 1 & 2. */
W_PRECALC_00_15_0(0, W0, Wtmp0);
@@ -557,15 +551,21 @@ _gcry_sha1_transform_amd64_avx2_bmi2:
movl e, state_h4(RSTATE);
movq ROLDSTACK, %rsp;
+ CFI_REGISTER(ROLDSTACK, %rsp);
+ CFI_DEF_CFA_REGISTER(%rsp);
popq %r12;
+ CFI_POP(%r12);
popq %rbp;
+ CFI_POP(%rbp);
popq %rbx;
+ CFI_POP(%rbx);
/* stack already burned */
xorl %eax, %eax;
ret;
+ CFI_ENDPROC();
ELF(.size _gcry_sha1_transform_amd64_avx2_bmi2,
.-_gcry_sha1_transform_amd64_avx2_bmi2;)