summaryrefslogtreecommitdiff
path: root/cipher/sha512-ssse3-i386.c
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@iki.fi>2021-01-20 17:02:57 +0200
committerJussi Kivilinna <jussi.kivilinna@iki.fi>2021-01-20 22:06:11 +0200
commit00df9f27181d77166ceb55f319329400bf2e6a48 (patch)
tree4a1c204e30d183888c2ea4b18eef757a01239848 /cipher/sha512-ssse3-i386.c
parent81354e911bfa3e135d3e07f6a8d9e98033cd921a (diff)
downloadlibgcrypt-00df9f27181d77166ceb55f319329400bf2e6a48.tar.gz
Split inline assembly blocks with many memory operands
* cipher/rijndael-aesni.c (aesni_ocb_checksum, aesni_ocb_enc) (aesni_ocb_dec, _gcry_aes_aesni_ocb_auth): Split assembly blocks with more than 4 memory operands to smaller blocks. * cipher/sha512-ssse3-i386.c (W2): Split big assembly block to three smaller blocks. -- On i386, with -O0, assembly blocks with many memory operands cause compiler error such as: rijndael-aesni.c:2815:7: error: 'asm' operand has impossible constraints Fix is to split assembly blocks so that number of operands per block is reduced. GnuPG-bug-id: 5257 Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher/sha512-ssse3-i386.c')
-rw-r--r--cipher/sha512-ssse3-i386.c18
1 files changed, 11 insertions, 7 deletions
diff --git a/cipher/sha512-ssse3-i386.c b/cipher/sha512-ssse3-i386.c
index 4b12cee4..0fc98d8e 100644
--- a/cipher/sha512-ssse3-i386.c
+++ b/cipher/sha512-ssse3-i386.c
@@ -228,7 +228,11 @@ static const unsigned char bshuf_mask[16] __attribute__ ((aligned (16))) =
asm volatile ("movdqu %[w_t_m_2], %%xmm2;\n\t" \
"movdqa %%xmm2, %%xmm0;\n\t" \
"movdqu %[w_t_m_15], %%xmm5;\n\t" \
- "movdqa %%xmm5, %%xmm3;\n\t" \
+ : \
+ : [w_t_m_2] "m" (w[(i)-2]), \
+ [w_t_m_15] "m" (w[(i)-15]) \
+ : "memory" ); \
+ asm volatile ("movdqa %%xmm5, %%xmm3;\n\t" \
"psrlq $(61-19), %%xmm0;\n\t" \
"psrlq $(8-7), %%xmm3;\n\t" \
"pxor %%xmm2, %%xmm0;\n\t" \
@@ -251,17 +255,17 @@ static const unsigned char bshuf_mask[16] __attribute__ ((aligned (16))) =
"movdqu %[w_t_m_16], %%xmm2;\n\t" \
"pxor %%xmm4, %%xmm3;\n\t" \
"movdqu %[w_t_m_7], %%xmm1;\n\t" \
- "paddq %%xmm3, %%xmm0;\n\t" \
+ : \
+ : [w_t_m_7] "m" (w[(i)-7]), \
+ [w_t_m_16] "m" (w[(i)-16]) \
+ : "memory" ); \
+ asm volatile ("paddq %%xmm3, %%xmm0;\n\t" \
"paddq %%xmm2, %%xmm0;\n\t" \
"paddq %%xmm1, %%xmm0;\n\t" \
"movdqu %%xmm0, %[w_t_m_0];\n\t" \
"paddq %[k], %%xmm0;\n\t" \
: [w_t_m_0] "=m" (w[(i)-0]) \
- : [k] "m" (K[i]), \
- [w_t_m_2] "m" (w[(i)-2]), \
- [w_t_m_7] "m" (w[(i)-7]), \
- [w_t_m_15] "m" (w[(i)-15]), \
- [w_t_m_16] "m" (w[(i)-16]) \
+ : [k] "m" (K[i]) \
: "memory" )
unsigned int ASM_FUNC_ATTR