diff options
author | Jussi Kivilinna <jussi.kivilinna@iki.fi> | 2020-11-07 11:45:47 +0200 |
---|---|---|
committer | Jussi Kivilinna <jussi.kivilinna@iki.fi> | 2020-12-18 20:24:07 +0200 |
commit | 5aeb091f911398217b2e9facb9bdeb05c63d7844 (patch) | |
tree | 53dafb0c75ed82d5984c157f50c1ccf2696434c5 /cipher/rijndael-s390x.c | |
parent | f4e63e92dc0b79633f48b11d292dd7bdf2752ede (diff) | |
download | libgcrypt-5aeb091f911398217b2e9facb9bdeb05c63d7844.tar.gz |
Add bulk AES-GCM acceleration for s390x/zSeries
* cipher/Makefile.am: Add 'asm-inline-s390x.h'.
* cipher/asm-inline-s390x.h: New.
* cipher/cipher-gcm.c [GCM_USE_S390X_CRYPTO] (ghash_s390x_kimd): New.
(setupM) [GCM_USE_S390X_CRYPTO]: Add setup for s390x GHASH function.
* cipher/cipher-internal.h (GCM_USE_S390X_CRYPTO): New.
* cipher/rijndael-s390x.c (u128_t, km_functions_e): Move to
'asm-inline-s390x.h'.
(aes_s390x_gcm_crypt): New.
(_gcry_aes_s390x_setup_acceleration): Use 'km_function_to_mask'; Add
setup for GCM bulk function.
--
This patch adds zSeries acceleration for GHASH and AES-GCM.
Benchmarks (z15, 5.2Ghz):
Before:
AES | nanosecs/byte mebibytes/sec cycles/byte
GCM enc | 2.64 ns/B 361.6 MiB/s 13.71 c/B
GCM dec | 2.64 ns/B 361.3 MiB/s 13.72 c/B
GCM auth | 2.58 ns/B 370.1 MiB/s 13.40 c/B
After:
AES | nanosecs/byte mebibytes/sec cycles/byte
GCM enc | 0.059 ns/B 16066 MiB/s 0.309 c/B
GCM dec | 0.059 ns/B 16114 MiB/s 0.308 c/B
GCM auth | 0.057 ns/B 16747 MiB/s 0.296 c/B
Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher/rijndael-s390x.c')
-rw-r--r-- | cipher/rijndael-s390x.c | 86 |
1 files changed, 57 insertions, 29 deletions
diff --git a/cipher/rijndael-s390x.c b/cipher/rijndael-s390x.c index 7b40b8a8..5ab019f9 100644 --- a/cipher/rijndael-s390x.c +++ b/cipher/rijndael-s390x.c @@ -25,25 +25,7 @@ #ifdef USE_S390X_CRYPTO -typedef unsigned int u128_t __attribute__ ((mode (TI))); - -enum km_functions_e -{ - KM_FUNCTION_AES_128 = 18, - KM_FUNCTION_AES_192 = 19, - KM_FUNCTION_AES_256 = 20, - KM_FUNCTION_XTS_AES_128 = 50, - KM_FUNCTION_XTS_AES_256 = 52, - - KM_ENCRYPT = 0x00, - KM_DECRYPT = 0x80, - - KMF_LCFB_16 = 16 << 24, - - KMA_LPC = 1 << 8, - KMA_LAAD = 1 << 9, - KMA_HS = 1 << 10, -}; +#include "asm-inline-s390x.h" #define ALWAYS_INLINE inline __attribute__((always_inline)) #define NO_INLINE __attribute__((noinline)) @@ -453,6 +435,45 @@ static void aes_s390x_ctr128_enc(void *context, unsigned char *ctr, wipememory (¶ms, sizeof(params)); } +static size_t aes_s390x_gcm_crypt(gcry_cipher_hd_t c, void *outbuf_arg, + const void *inbuf_arg, size_t nblocks, + int encrypt) +{ + RIJNDAEL_context *ctx = (void *)&c->context.c; + byte *out = outbuf_arg; + const byte *in = inbuf_arg; + byte *ctr = c->u_ctr.ctr; + unsigned int function; + struct aes_s390x_gcm_params_s params; + + function = ctx->kma_func | (encrypt ? KM_ENCRYPT : KM_DECRYPT) + | KMA_HS | KMA_LAAD; + + /* Prepare parameter block. */ + memset (¶ms.reserved, 0, sizeof(params.reserved)); + buf_put_be32 (¶ms.counter_value, buf_get_be32(ctr + 12) - 1); + memcpy (¶ms.tag, c->u_mode.gcm.u_tag.tag, 16); + memcpy (¶ms.hash_subkey, c->u_mode.gcm.u_ghash_key.key, 16); + params.total_aad_length = 0; + params.total_cipher_length = 0; + memcpy (¶ms.initial_counter_value, ctr, 12); + params.initial_counter_value[3] = params.counter_value; + memcpy (¶ms.key, ctx->keyschenc, 32); + + /* Update counter (CTR32). */ + buf_put_be32(ctr + 12, buf_get_be32(ctr + 12) + nblocks); + + /* Perform KMA-GCM. */ + kma_execute (function, ¶ms, out, in, nblocks * BLOCKSIZE, NULL, 0); + + /* Update tag. */ + memcpy (c->u_mode.gcm.u_tag.tag, ¶ms.tag, 16); + + wipememory (¶ms, sizeof(params)); + + return 0; +} + static void aes_s390x_xts_crypt(void *context, unsigned char *tweak, void *outbuf_arg, const void *inbuf_arg, size_t nblocks, int encrypt) @@ -1014,20 +1035,20 @@ int _gcry_aes_s390x_setup_acceleration(RIJNDAEL_context *ctx, case 16: func = KM_FUNCTION_AES_128; func_xts = KM_FUNCTION_XTS_AES_128; - func_mask = (u128_t)1 << (127 - KM_FUNCTION_AES_128); - func_xts_mask = (u128_t)1 << (127 - KM_FUNCTION_XTS_AES_128); + func_mask = km_function_to_mask(KM_FUNCTION_AES_128); + func_xts_mask = km_function_to_mask(KM_FUNCTION_XTS_AES_128); break; case 24: func = KM_FUNCTION_AES_192; func_xts = 0; - func_mask = (u128_t)1 << (127 - KM_FUNCTION_AES_192); - func_xts_mask = 0; + func_mask = km_function_to_mask(KM_FUNCTION_AES_192); + func_xts_mask = 0; /* XTS-AES192 not available. */ break; case 32: func = KM_FUNCTION_AES_256; func_xts = KM_FUNCTION_XTS_AES_256; - func_mask = (u128_t)1 << (127 - KM_FUNCTION_AES_256); - func_xts_mask = (u128_t)1 << (127 - KM_FUNCTION_AES_256); + func_mask = km_function_to_mask(KM_FUNCTION_AES_256); + func_xts_mask = km_function_to_mask(KM_FUNCTION_AES_256); break; } @@ -1079,6 +1100,11 @@ int _gcry_aes_s390x_setup_acceleration(RIJNDAEL_context *ctx, bulk_ops->cfb_dec = aes_s390x_cfb128_dec; } + if (ctx->km_func_xts) + { + bulk_ops->xts_crypt = aes_s390x_xts_crypt; + } + if (ctx->kmc_func) { if(ctx->kmac_func) @@ -1103,11 +1129,13 @@ int _gcry_aes_s390x_setup_acceleration(RIJNDAEL_context *ctx, if (ctx->kma_func) { bulk_ops->ctr_enc = aes_s390x_ctr128_enc; - } - if (ctx->km_func_xts) - { - bulk_ops->xts_crypt = aes_s390x_xts_crypt; + if (kimd_query () & km_function_to_mask (KMID_FUNCTION_GHASH)) + { + /* KIMD based GHASH implementation is required with AES-GCM + * acceleration. */ + bulk_ops->gcm_crypt = aes_s390x_gcm_crypt; + } } return 1; |