summaryrefslogtreecommitdiff
path: root/cipher/cipher-gcm.c
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@iki.fi>2018-07-21 11:56:46 +0300
committerJussi Kivilinna <jussi.kivilinna@iki.fi>2018-07-21 11:56:46 +0300
commit86e5e06a97ae13b8bbf6923ecc76e02b9c429b46 (patch)
treecedf1ba84f981293efe492dd2cff3991cb6013ed /cipher/cipher-gcm.c
parent9660c3fafd732b1857bb2697c6f43aed077b9ad6 (diff)
downloadlibgcrypt-86e5e06a97ae13b8bbf6923ecc76e02b9c429b46.tar.gz
Add size optimized cipher block copy and xor functions
* cipher/bufhelp.h (buf_get_he32, buf_put_he32, buf_get_he64) (buf_put_he64): New. * cipher/cipher-internal.h (cipher_block_cpy, cipher_block_xor) (cipher_block_xor_1, cipher_block_xor_2dst, cipher_block_xor_n_copy_2) (cipher_block_xor_n_copy): New. * cipher/cipher-gcm-intel-pclmul.c (_gcry_ghash_setup_intel_pclmul): Use assembly for swapping endianness instead of buf_get_be64 and buf_cpy. * cipher/blowfish.c: Use new cipher_block_* functions for cipher block sized buf_cpy/xor* operations. * cipher/camellia-glue.c: Ditto. * cipher/cast5.c: Ditto. * cipher/cipher-aeswrap.c: Ditto. * cipher/cipher-cbc.c: Ditto. * cipher/cipher-ccm.c: Ditto. * cipher/cipher-cfb.c: Ditto. * cipher/cipher-cmac.c: Ditto. * cipher/cipher-ctr.c: Ditto. * cipher/cipher-eax.c: Ditto. * cipher/cipher-gcm.c: Ditto. * cipher/cipher-ocb.c: Ditto. * cipher/cipher-ofb.c: Ditto. * cipher/cipher-xts.c: Ditto. * cipher/des.c: Ditto. * cipher/rijndael.c: Ditto. * cipher/serpent.c: Ditto. * cipher/twofish.c: Ditto. -- This commit adds size-optimized functions for copying and xoring cipher block sized buffers. These functions also allow GCC to use inline auto-vectorization for block cipher copying and xoring on higher optimization levels. Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher/cipher-gcm.c')
-rw-r--r--cipher/cipher-gcm.c14
1 files changed, 7 insertions, 7 deletions
diff --git a/cipher/cipher-gcm.c b/cipher/cipher-gcm.c
index 6169d142..32ec9fa0 100644
--- a/cipher/cipher-gcm.c
+++ b/cipher/cipher-gcm.c
@@ -150,7 +150,7 @@ do_ghash (unsigned char *result, const unsigned char *buf, const u64 *gcmM)
u32 A;
int i;
- buf_xor (V, result, buf, 16);
+ cipher_block_xor (V, result, buf, 16);
V[0] = be_bswap64 (V[0]);
V[1] = be_bswap64 (V[1]);
@@ -259,7 +259,7 @@ do_ghash (unsigned char *result, const unsigned char *buf, const u32 *gcmM)
u32 T[3];
int i;
- buf_xor (V, result, buf, 16); /* V is big-endian */
+ cipher_block_xor (V, result, buf, 16); /* V is big-endian */
/* First round can be manually tweaked based on fact that 'tmp' is zero. */
i = 15;
@@ -342,7 +342,7 @@ do_ghash (unsigned char *hsub, unsigned char *result, const unsigned char *buf)
#else
unsigned long T[4];
- buf_xor (V, result, buf, 16);
+ cipher_block_xor (V, result, buf, 16);
for (i = 0; i < 4; i++)
{
V[i] = (V[i] & 0x00ff00ff) << 8 | (V[i] & 0xff00ff00) >> 8;
@@ -358,7 +358,7 @@ do_ghash (unsigned char *hsub, unsigned char *result, const unsigned char *buf)
for (j = 0x80; j; j >>= 1)
{
if (hsub[i] & j)
- buf_xor (p, p, V, 16);
+ cipher_block_xor (p, p, V, 16);
if (bshift (V))
V[0] ^= 0xe1000000;
}
@@ -598,7 +598,7 @@ gcm_ctr_encrypt (gcry_cipher_hd_t c, byte *outbuf, size_t outbuflen,
}
fix_ctr = 1;
- buf_cpy(ctr_copy, c->u_ctr.ctr, GCRY_GCM_BLOCK_LEN);
+ cipher_block_cpy(ctr_copy, c->u_ctr.ctr, GCRY_GCM_BLOCK_LEN);
}
}
@@ -928,8 +928,8 @@ _gcry_cipher_gcm_tag (gcry_cipher_hd_t c,
/* Add bitlengths to tag. */
do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, (byte*)bitlengths,
GCRY_GCM_BLOCK_LEN, 1);
- buf_xor (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.tagiv,
- c->u_mode.gcm.u_tag.tag, GCRY_GCM_BLOCK_LEN);
+ cipher_block_xor (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.tagiv,
+ c->u_mode.gcm.u_tag.tag, GCRY_GCM_BLOCK_LEN);
c->marks.tag = 1;
wipememory (bitlengths, sizeof (bitlengths));