summaryrefslogtreecommitdiff
path: root/cipher
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@iki.fi>2023-01-17 20:27:12 +0200
committerJussi Kivilinna <jussi.kivilinna@iki.fi>2023-01-19 18:57:32 +0200
commit208b1f3a7bd8709889aa566ff030bcff57ce1cfd (patch)
tree2654a70c0ea3ed54d7cd946422cc8dd964fdb6fe /cipher
parent9d62c54de2b0cd3b1849a27f8998e1f0d43f1583 (diff)
downloadlibgcrypt-208b1f3a7bd8709889aa566ff030bcff57ce1cfd.tar.gz
amd64-asm: move constant data to read-only section for cipher algos
* cipher/camellia-aesni-avx-amd64.S: Move constant data to read-only section. * cipher/camellia-aesni-avx2-amd64.h: Likewise. * cipher/camellia-gfni-avx512-amd64.S: Likewise. * cipher/chacha20-amd64-avx2.S: Likewise. * cipher/chacha20-amd64-avx512.S: Likewise. * cipher/chacha20-amd64-ssse3.S: Likewise. * cipher/des-amd64.s: Likewise. * cipher/rijndael-ssse3-amd64-asm.S: Likewise. * cipher/rijndael-vaes-avx2-amd64.S: Likewise. * cipher/serpent-avx2-amd64.S: Likewise. * cipher/sm4-aesni-avx-amd64.S: Likewise. * cipher/sm4-aesni-avx2-amd64.S: Likewise. * cipher/sm4-gfni-avx2-amd64.S: Likewise. * cipher/sm4-gfni-avx512-amd64.S: Likewise. * cipher/twofish-avx2-amd64.S: Likewise. -- Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher')
-rw-r--r--cipher/camellia-aesni-avx-amd64.S11
-rw-r--r--cipher/camellia-aesni-avx2-amd64.h5
-rw-r--r--cipher/camellia-gfni-avx512-amd64.S4
-rw-r--r--cipher/chacha20-amd64-avx2.S9
-rw-r--r--cipher/chacha20-amd64-avx512.S6
-rw-r--r--cipher/chacha20-amd64-ssse3.S9
-rw-r--r--cipher/des-amd64.S5
-rw-r--r--cipher/rijndael-ssse3-amd64-asm.S5
-rw-r--r--cipher/rijndael-vaes-avx2-amd64.S2
-rw-r--r--cipher/serpent-avx2-amd64.S6
-rw-r--r--cipher/sm4-aesni-avx-amd64.S7
-rw-r--r--cipher/sm4-aesni-avx2-amd64.S7
-rw-r--r--cipher/sm4-gfni-avx2-amd64.S7
-rw-r--r--cipher/sm4-gfni-avx512-amd64.S4
-rw-r--r--cipher/twofish-avx2-amd64.S5
15 files changed, 74 insertions, 18 deletions
diff --git a/cipher/camellia-aesni-avx-amd64.S b/cipher/camellia-aesni-avx-amd64.S
index e15e445b..1f241e03 100644
--- a/cipher/camellia-aesni-avx-amd64.S
+++ b/cipher/camellia-aesni-avx-amd64.S
@@ -619,7 +619,10 @@
vmovdqu y6, 14 * 16(rio); \
vmovdqu y7, 15 * 16(rio);
-.text
+SECTION_RODATA
+
+ELF(.type _camellia_aesni_avx_data,@object;)
+_camellia_aesni_avx_data:
.align 16
#define SHUFB_BYTES(idx) \
@@ -763,6 +766,7 @@
.L0f0f0f0f:
.long 0x0f0f0f0f
+.text
.align 16
ELF(.type __camellia_enc_blk16,@function;)
@@ -1720,6 +1724,10 @@ ELF(.size _gcry_camellia_aesni_avx_ocb_auth,.-_gcry_camellia_aesni_avx_ocb_auth;
vpsllq $(64-(nror)), out, out; \
vpaddd t0, out, out;
+SECTION_RODATA
+
+ELF(.type _camellia_aesni_avx_keysetup_data,@object;)
+_camellia_aesni_avx_keysetup_data:
.align 16
.Linv_shift_row_and_unpcklbw:
@@ -1752,6 +1760,7 @@ ELF(.size _gcry_camellia_aesni_avx_ocb_auth,.-_gcry_camellia_aesni_avx_ocb_auth;
.Lsigma6:
.long 0xB3E6C1FD, 0xB05688C2;
+.text
.align 16
ELF(.type __camellia_avx_setup128,@function;)
diff --git a/cipher/camellia-aesni-avx2-amd64.h b/cipher/camellia-aesni-avx2-amd64.h
index b97cc2e3..c92a0559 100644
--- a/cipher/camellia-aesni-avx2-amd64.h
+++ b/cipher/camellia-aesni-avx2-amd64.h
@@ -784,7 +784,8 @@
vmovdqu y6, 14 * 32(rio); \
vmovdqu y7, 15 * 32(rio);
-.text
+SECTION_RODATA
+
.align 32
#define SHUFB_BYTES(idx) \
@@ -997,6 +998,8 @@ ELF(.type FUNC_NAME(_constants),@object;)
ELF(.size FUNC_NAME(_constants),.-FUNC_NAME(_constants);)
+.text
+
.align 16
ELF(.type FUNC_NAME(enc_blk32),@function;)
diff --git a/cipher/camellia-gfni-avx512-amd64.S b/cipher/camellia-gfni-avx512-amd64.S
index 66949d43..64fef8b6 100644
--- a/cipher/camellia-gfni-avx512-amd64.S
+++ b/cipher/camellia-gfni-avx512-amd64.S
@@ -584,7 +584,7 @@
vmovdqu64 y6, 14 * 64(rio); \
vmovdqu64 y7, 15 * 64(rio);
-.text
+SECTION_RODATA
#define SHUFB_BYTES(idx) \
0 + (idx), 4 + (idx), 8 + (idx), 12 + (idx)
@@ -691,6 +691,8 @@ ELF(.type _gcry_camellia_gfni_avx512__constants,@object;)
ELF(.size _gcry_camellia_gfni_avx512__constants,.-_gcry_camellia_gfni_avx512__constants;)
+.text
+
.align 16
ELF(.type __camellia_gfni_avx512_enc_blk64,@function;)
diff --git a/cipher/chacha20-amd64-avx2.S b/cipher/chacha20-amd64-avx2.S
index 407d651f..54e2ffab 100644
--- a/cipher/chacha20-amd64-avx2.S
+++ b/cipher/chacha20-amd64-avx2.S
@@ -33,8 +33,6 @@
(defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \
defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS))
-.text
-
#include "asm-common-amd64.h"
#include "asm-poly1305-amd64.h"
@@ -157,8 +155,11 @@
PLUS(c1,d1); PLUS(c2,d2); XOR(b1,c1); XOR(b2,c2); \
ROTATE2(b1, b2, 7, tmp1);
+SECTION_RODATA
+
+ELF(.type _chacha20_avx2_data,@object;)
.align 32
-chacha20_data:
+_chacha20_avx2_data:
.Lshuf_rol16:
.byte 2,3,0,1,6,7,4,5,10,11,8,9,14,15,12,13
.Lshuf_rol8:
@@ -168,6 +169,8 @@ chacha20_data:
.Lunsigned_cmp:
.long 0x80000000
+.text
+
.align 16
.globl _gcry_chacha20_amd64_avx2_blocks8
ELF(.type _gcry_chacha20_amd64_avx2_blocks8,@function;)
diff --git a/cipher/chacha20-amd64-avx512.S b/cipher/chacha20-amd64-avx512.S
index 4b183528..2d140815 100644
--- a/cipher/chacha20-amd64-avx512.S
+++ b/cipher/chacha20-amd64-avx512.S
@@ -33,8 +33,6 @@
(defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \
defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS))
-.text
-
#include "asm-common-amd64.h"
/* register macros */
@@ -269,6 +267,8 @@
ROTATE(x1, 7); ROTATE(y1, 7); \
WORD_SHUF(x1, shuf_x1); WORD_SHUF(y1, shuf_x1);
+SECTION_RODATA
+
.align 64
ELF(.type _gcry_chacha20_amd64_avx512_data,@object;)
_gcry_chacha20_amd64_avx512_data:
@@ -286,6 +286,8 @@ _gcry_chacha20_amd64_avx512_data:
.byte 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
ELF(.size _gcry_chacha20_amd64_avx512_data,.-_gcry_chacha20_amd64_avx512_data)
+.text
+
.align 16
.globl _gcry_chacha20_amd64_avx512_blocks
ELF(.type _gcry_chacha20_amd64_avx512_blocks,@function;)
diff --git a/cipher/chacha20-amd64-ssse3.S b/cipher/chacha20-amd64-ssse3.S
index 452d42e5..1ce5a8e6 100644
--- a/cipher/chacha20-amd64-ssse3.S
+++ b/cipher/chacha20-amd64-ssse3.S
@@ -33,8 +33,6 @@
(defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \
defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS))
-.text
-
#include "asm-common-amd64.h"
#include "asm-poly1305-amd64.h"
@@ -151,7 +149,10 @@
PLUS(c1,d1); PLUS(c2,d2); XOR(b1,c1); XOR(b2,c2); \
ROTATE2(b1, b2, 7, tmp1, tmp2);
-chacha20_data:
+SECTION_RODATA
+
+ELF(.type _chacha20_ssse3_data,@object;)
+_chacha20_ssse3_data:
.align 16
.Lshuf_rol16:
.byte 2,3,0,1,6,7,4,5,10,11,8,9,14,15,12,13
@@ -164,6 +165,8 @@ chacha20_data:
.Lunsigned_cmp:
.long 0x80000000,0x80000000,0x80000000,0x80000000
+.text
+
.align 16
.globl _gcry_chacha20_amd64_ssse3_blocks4
ELF(.type _gcry_chacha20_amd64_ssse3_blocks4,@function;)
diff --git a/cipher/des-amd64.S b/cipher/des-amd64.S
index 51e40258..e4236a92 100644
--- a/cipher/des-amd64.S
+++ b/cipher/des-amd64.S
@@ -841,7 +841,12 @@ _gcry_3des_amd64_cfb_dec:
CFI_ENDPROC();
ELF(.size _gcry_3des_amd64_cfb_dec,.-_gcry_3des_amd64_cfb_dec;)
+
+SECTION_RODATA
+ELF(.type _des_amd64_data,@object;)
+
.align 16
+_des_amd64_data:
.L_s1:
.quad 0x0010100001010400, 0x0000000000000000
.quad 0x0000100000010000, 0x0010100001010404
diff --git a/cipher/rijndael-ssse3-amd64-asm.S b/cipher/rijndael-ssse3-amd64-asm.S
index 0c5c8f46..5153cb28 100644
--- a/cipher/rijndael-ssse3-amd64-asm.S
+++ b/cipher/rijndael-ssse3-amd64-asm.S
@@ -691,8 +691,11 @@ ELF(.size _gcry_aes_ssse3_schedule_core,.-_gcry_aes_ssse3_schedule_core)
## ##
########################################################
+SECTION_RODATA
+
.align 16
-ELF(.type _aes_consts,@object)
+ELF(.type _aes_ssse3_consts,@object)
+_aes_ssse3_consts:
.Laes_consts:
_aes_consts:
# s0F
diff --git a/cipher/rijndael-vaes-avx2-amd64.S b/cipher/rijndael-vaes-avx2-amd64.S
index 13fe7ab0..aceccb96 100644
--- a/cipher/rijndael-vaes-avx2-amd64.S
+++ b/cipher/rijndael-vaes-avx2-amd64.S
@@ -3313,6 +3313,8 @@ ELF(.size _gcry_vaes_avx2_ecb_crypt_amd64,.-_gcry_vaes_avx2_ecb_crypt_amd64)
/**********************************************************************
constants
**********************************************************************/
+SECTION_RODATA
+
ELF(.type _gcry_vaes_consts,@object)
_gcry_vaes_consts:
.align 32
diff --git a/cipher/serpent-avx2-amd64.S b/cipher/serpent-avx2-amd64.S
index 26a21a36..e25e7d3b 100644
--- a/cipher/serpent-avx2-amd64.S
+++ b/cipher/serpent-avx2-amd64.S
@@ -1200,9 +1200,13 @@ _gcry_serpent_avx2_ocb_auth:
CFI_ENDPROC();
ELF(.size _gcry_serpent_avx2_ocb_auth,.-_gcry_serpent_avx2_ocb_auth;)
-.align 16
+
+SECTION_RODATA
+ELF(.type _serpent_avx2_consts,@object)
+_serpent_avx2_consts:
/* For CTR-mode IV byteswap */
+.align 16
.Lbswap128_mask:
.byte 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
diff --git a/cipher/sm4-aesni-avx-amd64.S b/cipher/sm4-aesni-avx-amd64.S
index 88f6e5c5..c09b205d 100644
--- a/cipher/sm4-aesni-avx-amd64.S
+++ b/cipher/sm4-aesni-avx-amd64.S
@@ -97,9 +97,12 @@
4-way && 8-way SM4 with AES-NI and AVX
**********************************************************************/
-.text
+SECTION_RODATA
.align 16
+ELF(.type _sm4_aesni_avx_consts,@object)
+_sm4_aesni_avx_consts:
+
/*
* Following four affine transform look-up tables are from work by
* Markku-Juhani O. Saarinen, at https://github.com/mjosaarinen/sm4ni
@@ -152,6 +155,8 @@
.L0f0f0f0f:
.long 0x0f0f0f0f
+.text
+
.align 16
.globl _gcry_sm4_aesni_avx_expand_key
ELF(.type _gcry_sm4_aesni_avx_expand_key,@function;)
diff --git a/cipher/sm4-aesni-avx2-amd64.S b/cipher/sm4-aesni-avx2-amd64.S
index 514a0b4e..acd37cff 100644
--- a/cipher/sm4-aesni-avx2-amd64.S
+++ b/cipher/sm4-aesni-avx2-amd64.S
@@ -118,9 +118,12 @@
16-way SM4 with AES-NI and AVX
**********************************************************************/
-.text
+SECTION_RODATA
.align 16
+ELF(.type _sm4_aesni_avx2_consts,@object)
+_sm4_aesni_avx2_consts:
+
/*
* Following four affine transform look-up tables are from work by
* Markku-Juhani O. Saarinen, at https://github.com/mjosaarinen/sm4ni
@@ -173,6 +176,8 @@
.L0f0f0f0f:
.long 0x0f0f0f0f
+.text
+
.align 16
ELF(.type __sm4_crypt_blk16,@function;)
__sm4_crypt_blk16:
diff --git a/cipher/sm4-gfni-avx2-amd64.S b/cipher/sm4-gfni-avx2-amd64.S
index e21bd93b..2fbaffd5 100644
--- a/cipher/sm4-gfni-avx2-amd64.S
+++ b/cipher/sm4-gfni-avx2-amd64.S
@@ -87,9 +87,12 @@
#define RB2x %xmm14
#define RB3x %xmm15
-.text
+SECTION_RODATA
.align 32
+ELF(.type _sm4_gfni_avx2_consts,@object)
+_sm4_gfni_avx2_consts:
+
/* Affine transform, SM4 field to AES field */
.Lpre_affine_s:
.byte 0x52, 0xbc, 0x2d, 0x02, 0x9e, 0x25, 0xac, 0x34
@@ -133,6 +136,8 @@
.Lbswap32_mask:
.byte 3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12
+.text
+
.align 16
.globl _gcry_sm4_gfni_avx2_expand_key
ELF(.type _gcry_sm4_gfni_avx2_expand_key,@function;)
diff --git a/cipher/sm4-gfni-avx512-amd64.S b/cipher/sm4-gfni-avx512-amd64.S
index 0f9899d4..b095f85d 100644
--- a/cipher/sm4-gfni-avx512-amd64.S
+++ b/cipher/sm4-gfni-avx512-amd64.S
@@ -103,7 +103,7 @@
#define RB2z %zmm14
#define RB3z %zmm15
-.text
+SECTION_RODATA
.align 32
/* Affine transform, SM4 field to AES field */
@@ -146,6 +146,8 @@
.quad 2, 0
.quad 3, 0
+.text
+
.align 16
.globl _gcry_sm4_gfni_avx512_expand_key
ELF(.type _gcry_sm4_gfni_avx512_expand_key,@function;)
diff --git a/cipher/twofish-avx2-amd64.S b/cipher/twofish-avx2-amd64.S
index 6c6729c0..8a6aae19 100644
--- a/cipher/twofish-avx2-amd64.S
+++ b/cipher/twofish-avx2-amd64.S
@@ -1082,10 +1082,13 @@ _gcry_twofish_avx2_ocb_auth:
CFI_ENDPROC();
ELF(.size _gcry_twofish_avx2_ocb_auth,.-_gcry_twofish_avx2_ocb_auth;)
+SECTION_RODATA
+
.align 16
/* For CTR-mode IV byteswap */
- _gcry_twofish_bswap128_mask:
+ELF(.type _gcry_twofish_bswap128_mask,@object)
+_gcry_twofish_bswap128_mask:
.Lbswap128_mask:
.byte 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
ELF(.size _gcry_twofish_bswap128_mask,.-_gcry_twofish_bswap128_mask;)