summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNiels Möller <nisse@lysator.liu.se>2018-01-08 21:11:46 +0100
committerNiels Möller <nisse@lysator.liu.se>2018-01-09 07:31:54 +0100
commit27cb643877539156a948dff4431a76ec2644e610 (patch)
tree733e0741e84f9c4a8d142b30c6aa58b8092bd3dc
parent54b2d297afa86a84fc3dc23e0529fb5120ef5a99 (diff)
downloadnettle-27cb643877539156a948dff4431a76ec2644e610.tar.gz
New helper function ctr_fill.
-rw-r--r--ChangeLog2
-rw-r--r--ctr.c52
2 files changed, 27 insertions, 27 deletions
diff --git a/ChangeLog b/ChangeLog
index aa9608d7..d7076285 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -9,6 +9,8 @@
2018-01-08 Niels Möller <nisse@lysator.liu.se>
+ * ctr.c (ctr_fill): New function. Use in ctr_crypt.
+
* ctr.c (ctr_crypt): For in-place operation, increase max buffer
size from 4 blocks to 512 bytes, similarly to CBC and CFB.
Improves in-place aes128 CTR performance by 25% on x86_64.
diff --git a/ctr.c b/ctr.c
index 42048833..f4164466 100644
--- a/ctr.c
+++ b/ctr.c
@@ -48,6 +48,20 @@
/* Don't allocate any more space than this on the stack */
#define CTR_BUFFER_LIMIT 512
+#define MIN(a,b) (((a) < (b)) ? (a) : (b))
+
+static size_t
+ctr_fill (size_t block_size, uint8_t *ctr, size_t length, uint8_t *buffer)
+{
+ size_t i;
+ for (i = 0; i + block_size <= length; i += block_size)
+ {
+ memcpy (buffer + i, ctr, block_size);
+ INCREMENT(block_size, ctr);
+ }
+ return i;
+}
+
void
ctr_crypt(const void *ctx, nettle_cipher_func *f,
size_t block_size, uint8_t *ctr,
@@ -64,28 +78,19 @@ ctr_crypt(const void *ctx, nettle_cipher_func *f,
}
else
{
- size_t left;
- uint8_t *p;
+ size_t filled = ctr_fill (block_size, ctr, length, dst);
- for (p = dst, left = length;
- left >= block_size;
- left -= block_size, p += block_size)
- {
- memcpy (p, ctr, block_size);
- INCREMENT(block_size, ctr);
- }
-
- f(ctx, length - left, dst, dst);
- memxor(dst, src, length - left);
+ f(ctx, filled, dst, dst);
+ memxor(dst, src, filled);
- if (left)
+ if (filled < length)
{
TMP_DECL(buffer, uint8_t, NETTLE_MAX_CIPHER_BLOCK_SIZE);
TMP_ALLOC(buffer, block_size);
f(ctx, block_size, buffer, ctr);
INCREMENT(block_size, ctr);
- memxor3(dst + length - left, src + length - left, buffer, left);
+ memxor3(dst + filled, src + filled, buffer, length - filled);
}
}
}
@@ -107,19 +112,12 @@ ctr_crypt(const void *ctx, nettle_cipher_func *f,
while (length >= block_size)
{
- size_t i;
- for (i = 0;
- i + block_size <= buffer_size && i + block_size <= length;
- i += block_size)
- {
- memcpy (buffer + i, ctr, block_size);
- INCREMENT(block_size, ctr);
- }
- assert (i > 0);
- f(ctx, i, buffer, buffer);
- memxor(dst, buffer, i);
- length -= i;
- dst += i;
+ size_t filled = ctr_fill (block_size, ctr, MIN(buffer_size, length), buffer);
+ assert (filled > 0);
+ f(ctx, filled, buffer, buffer);
+ memxor(dst, buffer, filled);
+ length -= filled;
+ dst += filled;
}
/* Final, possibly partial, block. */