summaryrefslogtreecommitdiff
path: root/crypto/skcipher.c
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2016-12-29 14:09:08 +0000
committerHerbert Xu <herbert@gondor.apana.org.au>2016-12-30 19:52:47 +0800
commitc821f6ab2e47946f35ee2f30781c5185e5d07f65 (patch)
treeb46ed60d6257d3302fb6fb15ce4d3d86f1e136fb /crypto/skcipher.c
parentd79b5d0bbf2e3aded13b0542160bde48e95d3d44 (diff)
downloadlinux-rt-c821f6ab2e47946f35ee2f30781c5185e5d07f65.tar.gz
crypto: skcipher - introduce walksize attribute for SIMD algos
In some cases, SIMD algorithms can only perform optimally when allowed to operate on multiple input blocks in parallel. This is especially true for bit slicing algorithms, which typically take the same amount of time processing a single block or 8 blocks in parallel. However, other SIMD algorithms may benefit as well from bigger strides. So add a walksize attribute to the skcipher algorithm definition, and wire it up to the skcipher walk API. To avoid confusion between the skcipher and AEAD attributes, rename the skcipher_walk chunksize attribute to 'stride', and set it from the walksize (in the skcipher case) or from the chunksize (in the AEAD case). Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/skcipher.c')
-rw-r--r--crypto/skcipher.c20
1 files changed, 12 insertions, 8 deletions
diff --git a/crypto/skcipher.c b/crypto/skcipher.c
index 0e1e6c35188e..6ee6a1521e0b 100644
--- a/crypto/skcipher.c
+++ b/crypto/skcipher.c
@@ -185,12 +185,12 @@ void skcipher_walk_complete(struct skcipher_walk *walk, int err)
data = p->data;
if (!data) {
data = PTR_ALIGN(&p->buffer[0], walk->alignmask + 1);
- data = skcipher_get_spot(data, walk->chunksize);
+ data = skcipher_get_spot(data, walk->stride);
}
scatterwalk_copychunks(data, &p->dst, p->len, 1);
- if (offset_in_page(p->data) + p->len + walk->chunksize >
+ if (offset_in_page(p->data) + p->len + walk->stride >
PAGE_SIZE)
free_page((unsigned long)p->data);
@@ -299,7 +299,7 @@ static int skcipher_next_copy(struct skcipher_walk *walk)
p->len = walk->nbytes;
skcipher_queue_write(walk, p);
- if (offset_in_page(walk->page) + walk->nbytes + walk->chunksize >
+ if (offset_in_page(walk->page) + walk->nbytes + walk->stride >
PAGE_SIZE)
walk->page = NULL;
else
@@ -344,7 +344,7 @@ static int skcipher_walk_next(struct skcipher_walk *walk)
SKCIPHER_WALK_DIFF);
n = walk->total;
- bsize = min(walk->chunksize, max(n, walk->blocksize));
+ bsize = min(walk->stride, max(n, walk->blocksize));
n = scatterwalk_clamp(&walk->in, n);
n = scatterwalk_clamp(&walk->out, n);
@@ -393,7 +393,7 @@ static int skcipher_copy_iv(struct skcipher_walk *walk)
unsigned a = crypto_tfm_ctx_alignment() - 1;
unsigned alignmask = walk->alignmask;
unsigned ivsize = walk->ivsize;
- unsigned bs = walk->chunksize;
+ unsigned bs = walk->stride;
unsigned aligned_bs;
unsigned size;
u8 *iv;
@@ -463,7 +463,7 @@ static int skcipher_walk_skcipher(struct skcipher_walk *walk,
SKCIPHER_WALK_SLEEP : 0;
walk->blocksize = crypto_skcipher_blocksize(tfm);
- walk->chunksize = crypto_skcipher_chunksize(tfm);
+ walk->stride = crypto_skcipher_walksize(tfm);
walk->ivsize = crypto_skcipher_ivsize(tfm);
walk->alignmask = crypto_skcipher_alignmask(tfm);
@@ -525,7 +525,7 @@ static int skcipher_walk_aead_common(struct skcipher_walk *walk,
walk->flags &= ~SKCIPHER_WALK_SLEEP;
walk->blocksize = crypto_aead_blocksize(tfm);
- walk->chunksize = crypto_aead_chunksize(tfm);
+ walk->stride = crypto_aead_chunksize(tfm);
walk->ivsize = crypto_aead_ivsize(tfm);
walk->alignmask = crypto_aead_alignmask(tfm);
@@ -821,6 +821,7 @@ static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg)
seq_printf(m, "max keysize : %u\n", skcipher->max_keysize);
seq_printf(m, "ivsize : %u\n", skcipher->ivsize);
seq_printf(m, "chunksize : %u\n", skcipher->chunksize);
+ seq_printf(m, "walksize : %u\n", skcipher->walksize);
}
#ifdef CONFIG_NET
@@ -893,11 +894,14 @@ static int skcipher_prepare_alg(struct skcipher_alg *alg)
{
struct crypto_alg *base = &alg->base;
- if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8)
+ if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 ||
+ alg->walksize > PAGE_SIZE / 8)
return -EINVAL;
if (!alg->chunksize)
alg->chunksize = base->cra_blocksize;
+ if (!alg->walksize)
+ alg->walksize = alg->chunksize;
base->cra_type = &crypto_skcipher_type2;
base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;