summaryrefslogtreecommitdiff
path: root/providers
diff options
context:
space:
mode:
authorFangming.Fang <fangming.fang@arm.com>2019-05-31 10:15:10 +0000
committerPauli <paul.dale@oracle.com>2019-12-19 12:36:07 +1000
commit31b59078c8245a4ee7f7fa4e6ea98bba7f9a29a5 (patch)
treee490a3b2bcf796c9c6e98fe86bcfb3d351a8a1b5 /providers
parent51a7c4b5f2a0b2d0f6bc0c87ec2ee44b9697dc78 (diff)
downloadopenssl-new-31b59078c8245a4ee7f7fa4e6ea98bba7f9a29a5.tar.gz
Optimize AES-GCM implementation on aarch64
Comparing to current implementation, this change can get more performance improved by tunning the loop-unrolling factor in interleave implementation as well as by enabling high level parallelism. Performance(A72) new type 16 bytes 64 bytes 256 bytes 1024 bytes 8192 bytes 16384 bytes aes-128-gcm 113065.51k 375743.00k 848359.51k 1517865.98k 1964040.19k 1986663.77k aes-192-gcm 110679.32k 364470.63k 799322.88k 1428084.05k 1826917.03k 1848967.17k aes-256-gcm 104919.86k 352939.29k 759477.76k 1330683.56k 1663175.34k 1670430.72k old type 16 bytes 64 bytes 256 bytes 1024 bytes 8192 bytes 16384 bytes aes-128-gcm 115595.32k 382348.65k 855891.29k 1236452.35k 1425670.14k 1429793.45k aes-192-gcm 112227.02k 369543.47k 810046.55k 1147948.37k 1286288.73k 1296941.06k aes-256-gcm 111543.90k 361902.36k 769543.59k 1070693.03k 1208576.68k 1207511.72k Change-Id: I28a2dca85c001a63a2a942e80c7c64f7a4fdfcf7 Reviewed-by: Bernd Edlinger <bernd.edlinger@hotmail.de> Reviewed-by: Paul Dale <paul.dale@oracle.com> (Merged from https://github.com/openssl/openssl/pull/9818)
Diffstat (limited to 'providers')
-rw-r--r--providers/implementations/ciphers/cipher_aes_gcm_hw.c2
-rw-r--r--providers/implementations/ciphers/cipher_aes_gcm_hw_armv8.inc83
-rw-r--r--providers/implementations/ciphers/ciphercommon_gcm_hw.c19
3 files changed, 96 insertions, 8 deletions
diff --git a/providers/implementations/ciphers/cipher_aes_gcm_hw.c b/providers/implementations/ciphers/cipher_aes_gcm_hw.c
index 0373917a18..08ee34ef1e 100644
--- a/providers/implementations/ciphers/cipher_aes_gcm_hw.c
+++ b/providers/implementations/ciphers/cipher_aes_gcm_hw.c
@@ -68,6 +68,8 @@ static const PROV_GCM_HW aes_gcm = {
# include "cipher_aes_gcm_hw_aesni.inc"
#elif defined(SPARC_AES_CAPABLE)
# include "cipher_aes_gcm_hw_t4.inc"
+#elif defined(AES_PMULL_CAPABLE) && defined(AES_GCM_ASM)
+# include "cipher_aes_gcm_hw_armv8.inc"
#else
const PROV_GCM_HW *PROV_AES_HW_gcm(size_t keybits)
{
diff --git a/providers/implementations/ciphers/cipher_aes_gcm_hw_armv8.inc b/providers/implementations/ciphers/cipher_aes_gcm_hw_armv8.inc
new file mode 100644
index 0000000000..4e8cc9c54e
--- /dev/null
+++ b/providers/implementations/ciphers/cipher_aes_gcm_hw_armv8.inc
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2019 The OpenSSL Project Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License 2.0 (the "License"). You may not use
+ * this file except in compliance with the License. You can obtain a copy
+ * in the file LICENSE in the source distribution or at
+ * https://www.openssl.org/source/license.html
+ */
+
+/*
+ * Crypto extention support for AES GCM.
+ * This file is included by cipher_aes_gcm_hw.c
+ */
+
+size_t armv8_aes_gcm_encrypt(const unsigned char *in, unsigned char *out, size_t len,
+ const void *key, unsigned char ivec[16], u64 *Xi)
+{
+ size_t align_bytes = 0;
+ align_bytes = len - len % 16;
+
+ AES_KEY *aes_key = (AES_KEY *)key;
+
+ switch(aes_key->rounds) {
+ case 10:
+ aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
+ break;
+ case 12:
+ aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
+ break;
+ case 14:
+ aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
+ break;
+ }
+ return align_bytes;
+}
+
+size_t armv8_aes_gcm_decrypt(const unsigned char *in, unsigned char *out, size_t len,
+ const void *key, unsigned char ivec[16], u64 *Xi)
+{
+ size_t align_bytes = 0;
+ align_bytes = len - len % 16;
+
+ AES_KEY *aes_key = (AES_KEY *)key;
+
+ switch(aes_key->rounds) {
+ case 10:
+ aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
+ break;
+ case 12:
+ aes_gcm_dec_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
+ break;
+ case 14:
+ aes_gcm_dec_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
+ break;
+ }
+ return align_bytes;
+}
+
+static int armv8_aes_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key,
+ size_t keylen)
+{
+ PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
+ AES_KEY *ks = &actx->ks.ks;
+
+ GCM_HW_SET_KEY_CTR_FN(ks, aes_v8_set_encrypt_key, aes_v8_encrypt,
+ aes_v8_ctr32_encrypt_blocks);
+ return 1;
+}
+
+
+static const PROV_GCM_HW armv8_aes_gcm = {
+ armv8_aes_gcm_initkey,
+ gcm_setiv,
+ gcm_aad_update,
+ gcm_cipher_update,
+ gcm_cipher_final,
+ gcm_one_shot
+};
+
+const PROV_GCM_HW *PROV_AES_HW_gcm(size_t keybits)
+{
+ return AES_PMULL_CAPABLE ? &armv8_aes_gcm : &aes_gcm;
+}
diff --git a/providers/implementations/ciphers/ciphercommon_gcm_hw.c b/providers/implementations/ciphers/ciphercommon_gcm_hw.c
index 60c7ac5d8f..1114c36b3f 100644
--- a/providers/implementations/ciphers/ciphercommon_gcm_hw.c
+++ b/providers/implementations/ciphers/ciphercommon_gcm_hw.c
@@ -30,14 +30,16 @@ int gcm_cipher_update(PROV_GCM_CTX *ctx, const unsigned char *in,
#if defined(AES_GCM_ASM)
size_t bulk = 0;
- if (len >= 32 && AES_GCM_ASM(ctx)) {
+ if (len >= AES_GCM_ENC_BYTES && AES_GCM_ASM(ctx)) {
size_t res = (16 - ctx->gcm.mres) % 16;
if (CRYPTO_gcm128_encrypt(&ctx->gcm, in, out, res))
return 0;
- bulk = aesni_gcm_encrypt(in + res, out + res, len - res,
- ctx->gcm.key,
- ctx->gcm.Yi.c, ctx->gcm.Xi.u);
+
+ bulk = AES_gcm_encrypt(in + res, out + res, len - res,
+ ctx->gcm.key,
+ ctx->gcm.Yi.c, ctx->gcm.Xi.u);
+
ctx->gcm.len.u[1] += bulk;
bulk += res;
}
@@ -57,15 +59,16 @@ int gcm_cipher_update(PROV_GCM_CTX *ctx, const unsigned char *in,
#if defined(AES_GCM_ASM)
size_t bulk = 0;
- if (len >= 16 && AES_GCM_ASM(ctx)) {
+ if (len >= AES_GCM_DEC_BYTES && AES_GCM_ASM(ctx)) {
size_t res = (16 - ctx->gcm.mres) % 16;
if (CRYPTO_gcm128_decrypt(&ctx->gcm, in, out, res))
return -1;
- bulk = aesni_gcm_decrypt(in + res, out + res, len - res,
- ctx->gcm.key,
- ctx->gcm.Yi.c, ctx->gcm.Xi.u);
+ bulk = AES_gcm_decrypt(in + res, out + res, len - res,
+ ctx->gcm.key,
+ ctx->gcm.Yi.c, ctx->gcm.Xi.u);
+
ctx->gcm.len.u[1] += bulk;
bulk += res;
}