summaryrefslogtreecommitdiff
path: root/cipher/cipher-xts.c
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@iki.fi>2017-01-28 11:26:02 +0200
committerJussi Kivilinna <jussi.kivilinna@iki.fi>2017-01-28 11:26:02 +0200
commit4f31d816dcc1e95dc647651e92acbdfed53f5c14 (patch)
tree488686464badc1fa4065f6f9c9467b54ce8fed43 /cipher/cipher-xts.c
parent55cf1b5588705cab5f45e2817c4aa1d204dc0042 (diff)
downloadlibgcrypt-4f31d816dcc1e95dc647651e92acbdfed53f5c14.tar.gz
cipher-xts: fix pointer casting to wrong alignment and aliasing
* cipher/cipher-xts.c (xts_gfmul_byA, xts_inc128): Use buf_get_le64 and buf_put_le64 for accessing data; Change parameter pointers to 'unsigned char *' type. (_gcry_cipher_xts_crypt): Do not cast buffer pointers to 'u64 *' for helper functions. -- Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher/cipher-xts.c')
-rw-r--r--cipher/cipher-xts.c26
1 files changed, 13 insertions, 13 deletions
diff --git a/cipher/cipher-xts.c b/cipher/cipher-xts.c
index 7a7181b8..4da89e55 100644
--- a/cipher/cipher-xts.c
+++ b/cipher/cipher-xts.c
@@ -29,29 +29,29 @@
#include "./cipher-internal.h"
-static inline void xts_gfmul_byA (u64 *out, const u64 *in)
+static inline void xts_gfmul_byA (unsigned char *out, const unsigned char *in)
{
- u64 hi = le_bswap64 (in[1]);
- u64 lo = le_bswap64 (in[0]);
+ u64 hi = buf_get_le64 (in + 8);
+ u64 lo = buf_get_le64 (in + 0);
u64 carry = -(hi >> 63) & 0x87;
hi = (hi << 1) + (lo >> 63);
lo = (lo << 1) ^ carry;
- out[1] = le_bswap64 (hi);
- out[0] = le_bswap64 (lo);
+ buf_put_le64 (out + 8, hi);
+ buf_put_le64 (out + 0, lo);
}
-static inline void xts_inc128 (u64 *seqno)
+static inline void xts_inc128 (unsigned char *seqno)
{
- u64 lo = le_bswap64 (seqno[0]);
- u64 hi = le_bswap64 (seqno[1]);
+ u64 lo = buf_get_le64 (seqno + 0);
+ u64 hi = buf_get_le64 (seqno + 8);
hi += !(++lo);
- seqno[0] = le_bswap64 (lo);
- seqno[1] = le_bswap64 (hi);
+ buf_put_le64 (seqno + 0, lo);
+ buf_put_le64 (seqno + 8, hi);
}
@@ -117,7 +117,7 @@ _gcry_cipher_xts_crypt (gcry_cipher_hd_t c,
nblocks--;
/* Generate next tweak. */
- xts_gfmul_byA ((u64 *)c->u_ctr.ctr, (u64 *)c->u_ctr.ctr);
+ xts_gfmul_byA (c->u_ctr.ctr, c->u_ctr.ctr);
}
/* Handle remaining data with ciphertext stealing. */
@@ -129,7 +129,7 @@ _gcry_cipher_xts_crypt (gcry_cipher_hd_t c,
gcry_assert (inbuflen < GCRY_XTS_BLOCK_LEN * 2);
/* Generate last tweak. */
- xts_gfmul_byA (tmp.x64, (u64 *)c->u_ctr.ctr);
+ xts_gfmul_byA (tmp.x1, c->u_ctr.ctr);
/* Decrypt last block first. */
buf_xor (outbuf, inbuf, tmp.x64, GCRY_XTS_BLOCK_LEN);
@@ -158,7 +158,7 @@ _gcry_cipher_xts_crypt (gcry_cipher_hd_t c,
}
/* Auto-increment data-unit sequence number */
- xts_inc128 ((u64 *)c->u_iv.iv);
+ xts_inc128 (c->u_iv.iv);
wipememory (&tmp, sizeof(tmp));
wipememory (c->u_ctr.ctr, sizeof(c->u_ctr.ctr));