summaryrefslogtreecommitdiff
path: root/cipher/cipher-cmac.c
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@iki.fi>2018-06-19 18:34:33 +0300
committerJussi Kivilinna <jussi.kivilinna@iki.fi>2018-06-19 19:29:25 +0300
commitf5168091c1930e948af8f25da11cad5dfa62c7ba (patch)
tree25076591dff28f703299337a43009b308acd937e /cipher/cipher-cmac.c
parenta69021535b472556651eb2bab65666206c56c24b (diff)
downloadlibgcrypt-f5168091c1930e948af8f25da11cad5dfa62c7ba.tar.gz
Avoid division by spec->blocksize in cipher mode handlers
* cipher/cipher-internal.h (_gcry_blocksize_shift): New. * cipher/cipher-cbc.c (_gcry_cipher_cbc_encrypt) (_gcry_cipherp_cbc_decrypt): Use bit-level operations instead of division to get number of blocks and check input length against blocksize. * cipher/cipher-cfb.c (_gcry_cipher_cfb_encrypt) (_gcry_cipher_cfb_decrypt): Ditto. * cipher/cipher-cmac.c (_gcry_cmac_write): Ditto. * cipher/cipher-ctr.c (_gcry_cipher_ctr_crypt): Ditto. * cipher/cipher-ofb.c (_gcry_cipher_ofb_encrypt) (_gcry_cipher_ofb_decrypt): Ditto. -- Integer division was causing 10 to 20 cycles per call overhead for cipher modes on x86-64. Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher/cipher-cmac.c')
-rw-r--r--cipher/cipher-cmac.c16
1 files changed, 6 insertions, 10 deletions
diff --git a/cipher/cipher-cmac.c b/cipher/cipher-cmac.c
index 30567b7f..321ab9ea 100644
--- a/cipher/cipher-cmac.c
+++ b/cipher/cipher-cmac.c
@@ -38,7 +38,8 @@ _gcry_cmac_write (gcry_cipher_hd_t c, gcry_cmac_context_t *ctx,
const byte * inbuf, size_t inlen)
{
gcry_cipher_encrypt_t enc_fn = c->spec->encrypt;
- const unsigned int blocksize = c->spec->blocksize;
+ size_t blocksize_shift = _gcry_blocksize_shift(c);
+ size_t blocksize = 1 << blocksize_shift;
byte outbuf[MAX_BLOCKSIZE];
unsigned int burn = 0;
unsigned int nblocks;
@@ -46,11 +47,6 @@ _gcry_cmac_write (gcry_cipher_hd_t c, gcry_cmac_context_t *ctx,
if (ctx->tag)
return GPG_ERR_INV_STATE;
- /* Tell compiler that we require a cipher with a 64bit or 128 bit block
- * length, to allow better optimization of this function. */
- if (blocksize > 16 || blocksize < 8 || blocksize & (8 - 1))
- return GPG_ERR_INV_CIPHER_MODE;
-
if (!inbuf)
return GPG_ERR_INV_ARG;
@@ -78,12 +74,12 @@ _gcry_cmac_write (gcry_cipher_hd_t c, gcry_cmac_context_t *ctx,
if (c->bulk.cbc_enc && inlen > blocksize)
{
- nblocks = inlen / blocksize;
- nblocks -= (nblocks * blocksize == inlen);
+ nblocks = inlen >> blocksize_shift;
+ nblocks -= ((nblocks << blocksize_shift) == inlen);
c->bulk.cbc_enc (&c->context.c, ctx->u_iv.iv, outbuf, inbuf, nblocks, 1);
- inbuf += nblocks * blocksize;
- inlen -= nblocks * blocksize;
+ inbuf += nblocks << blocksize_shift;
+ inlen -= nblocks << blocksize_shift;
wipememory (outbuf, sizeof (outbuf));
}