diff options
author | Jussi Kivilinna <jussi.kivilinna@iki.fi> | 2022-01-08 20:56:19 +0200 |
---|---|---|
committer | Jussi Kivilinna <jussi.kivilinna@iki.fi> | 2022-01-11 20:10:12 +0200 |
commit | 11ade08efbfbc36dbf3571f1026946269950bc40 (patch) | |
tree | 73c1739f302ca2cbac73c244b8dbd533ab516a8f /cipher/sm4-aesni-avx-amd64.S | |
parent | ff2a647d36677f6ad9edbe992a6c0ab0f7cf9510 (diff) | |
download | libgcrypt-11ade08efbfbc36dbf3571f1026946269950bc40.tar.gz |
Add straight-line speculation hardening for amd64 and i386 assembly
* cipher/asm-common-amd64.h (ret_spec_stop): New.
* cipher/arcfour-amd64.S: Use 'ret_spec_stop' for 'ret' instruction.
* cipher/blake2b-amd64-avx2.S: Likewise.
* cipher/blake2s-amd64-avx.S: Likewise.
* cipher/blowfish-amd64.S: Likewise.
* cipher/camellia-aesni-avx-amd64.S: Likewise.
* cipher/camellia-aesni-avx2-amd64.h: Likewise.
* cipher/cast5-amd64.S: Likewise.
* cipher/chacha20-amd64-avx2.S: Likewise.
* cipher/chacha20-amd64-ssse3.S: Likewise.
* cipher/des-amd64.S: Likewise.
* cipher/rijndael-aarch64.S: Likewise.
* cipher/rijndael-amd64.S: Likewise.
* cipher/rijndael-ssse3-amd64-asm.S: Likewise.
* cipher/rijndael-vaes-avx2-amd64.S: Likewise.
* cipher/salsa20-amd64.S: Likewise.
* cipher/serpent-avx2-amd64.S: Likewise.
* cipher/serpent-sse2-amd64.S: Likewise.
* cipher/sha1-avx-amd64.S: Likewise.
* cipher/sha1-avx-bmi2-amd64.S: Likewise.
* cipher/sha1-avx2-bmi2-amd64.S: Likewise.
* cipher/sha1-ssse3-amd64.S: Likewise.
* cipher/sha256-avx-amd64.S: Likewise.
* cipher/sha256-avx2-bmi2-amd64.S: Likewise.
* cipher/sha256-ssse3-amd64.S: Likewise.
* cipher/sha512-avx-amd64.S: Likewise.
* cipher/sha512-avx2-bmi2-amd64.S: Likewise.
* cipher/sha512-ssse3-amd64.S: Likewise.
* cipher/sm3-avx-bmi2-amd64.S: Likewise.
* cipher/sm4-aesni-avx-amd64.S: Likewise.
* cipher/sm4-aesni-avx2-amd64.S: Likewise.
* cipher/twofish-amd64.S: Likewise.
* cipher/twofish-avx2-amd64.S: Likewise.
* cipher/whirlpool-sse2-amd64.S: Likewise.
* mpi/amd64/func_abi.h (CFI_*): Remove, include from "asm-common-amd64.h"
instead.
(FUNC_EXIT): Use 'ret_spec_stop' for 'ret' instruction.
* mpi/asm-common-amd64.h: New.
* mpi/i386/mpih-add1.S: Use 'ret_spec_stop' for 'ret' instruction.
* mpi/i386/mpih-lshift.S: Likewise.
* mpi/i386/mpih-mul1.S: Likewise.
* mpi/i386/mpih-mul2.S: Likewise.
* mpi/i386/mpih-mul3.S: Likewise.
* mpi/i386/mpih-rshift.S: Likewise.
* mpi/i386/mpih-sub1.S: Likewise.
* mpi/i386/syntax.h (ret_spec_stop): New.
--
Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher/sm4-aesni-avx-amd64.S')
-rw-r--r-- | cipher/sm4-aesni-avx-amd64.S | 20 |
1 files changed, 10 insertions, 10 deletions
diff --git a/cipher/sm4-aesni-avx-amd64.S b/cipher/sm4-aesni-avx-amd64.S index 3610b98c..7a99e070 100644 --- a/cipher/sm4-aesni-avx-amd64.S +++ b/cipher/sm4-aesni-avx-amd64.S @@ -240,7 +240,7 @@ _gcry_sm4_aesni_avx_expand_key: #undef ROUND vzeroall; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size _gcry_sm4_aesni_avx_expand_key,.-_gcry_sm4_aesni_avx_expand_key;) @@ -345,7 +345,7 @@ sm4_aesni_avx_crypt_blk1_4: .Lblk4_store_output_done: vzeroall; xorl %eax, %eax; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size sm4_aesni_avx_crypt_blk1_4,.-sm4_aesni_avx_crypt_blk1_4;) @@ -454,7 +454,7 @@ __sm4_crypt_blk8: vpshufb RTMP2, RB2, RB2; vpshufb RTMP2, RB3, RB3; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size __sm4_crypt_blk8,.-__sm4_crypt_blk8;) @@ -508,7 +508,7 @@ _gcry_sm4_aesni_avx_crypt_blk1_8: .Lblk8_store_output_done: vzeroall; xorl %eax, %eax; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size _gcry_sm4_aesni_avx_crypt_blk1_8,.-_gcry_sm4_aesni_avx_crypt_blk1_8;) @@ -582,7 +582,7 @@ _gcry_sm4_aesni_avx_ctr_enc: vzeroall; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size _gcry_sm4_aesni_avx_ctr_enc,.-_gcry_sm4_aesni_avx_ctr_enc;) @@ -631,7 +631,7 @@ _gcry_sm4_aesni_avx_cbc_dec: vzeroall; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size _gcry_sm4_aesni_avx_cbc_dec,.-_gcry_sm4_aesni_avx_cbc_dec;) @@ -683,7 +683,7 @@ _gcry_sm4_aesni_avx_cfb_dec: vzeroall; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size _gcry_sm4_aesni_avx_cfb_dec,.-_gcry_sm4_aesni_avx_cfb_dec;) @@ -782,7 +782,7 @@ _gcry_sm4_aesni_avx_ocb_enc: vzeroall; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size _gcry_sm4_aesni_avx_ocb_enc,.-_gcry_sm4_aesni_avx_ocb_enc;) @@ -891,7 +891,7 @@ _gcry_sm4_aesni_avx_ocb_dec: vzeroall; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size _gcry_sm4_aesni_avx_ocb_dec,.-_gcry_sm4_aesni_avx_ocb_dec;) @@ -979,7 +979,7 @@ _gcry_sm4_aesni_avx_ocb_auth: vzeroall; - ret; + ret_spec_stop; CFI_ENDPROC(); ELF(.size _gcry_sm4_aesni_avx_ocb_auth,.-_gcry_sm4_aesni_avx_ocb_auth;) |