summaryrefslogtreecommitdiff
path: root/lib/accelerated/x86/aes-x86.c
diff options
context:
space:
mode:
Diffstat (limited to 'lib/accelerated/x86/aes-x86.c')
-rw-r--r--lib/accelerated/x86/aes-x86.c272
1 files changed, 132 insertions, 140 deletions
diff --git a/lib/accelerated/x86/aes-x86.c b/lib/accelerated/x86/aes-x86.c
index f3738bcde5..59e2b13280 100644
--- a/lib/accelerated/x86/aes-x86.c
+++ b/lib/accelerated/x86/aes-x86.c
@@ -32,192 +32,184 @@
#include <aes-x86.h>
#include <x86.h>
-struct aes_ctx
-{
- AES_KEY expanded_key;
- uint8_t iv[16];
- int enc;
+struct aes_ctx {
+ AES_KEY expanded_key;
+ uint8_t iv[16];
+ int enc;
};
static int
-aes_cipher_init (gnutls_cipher_algorithm_t algorithm, void **_ctx, int enc)
-{
- /* we use key size to distinguish */
- if (algorithm != GNUTLS_CIPHER_AES_128_CBC
- && algorithm != GNUTLS_CIPHER_AES_192_CBC
- && algorithm != GNUTLS_CIPHER_AES_256_CBC)
- return GNUTLS_E_INVALID_REQUEST;
-
- *_ctx = gnutls_calloc (1, sizeof (struct aes_ctx));
- if (*_ctx == NULL)
- {
- gnutls_assert ();
- return GNUTLS_E_MEMORY_ERROR;
- }
-
- ((struct aes_ctx*)(*_ctx))->enc = enc;
-
- return 0;
-}
-
-static int
-aes_cipher_setkey (void *_ctx, const void *userkey, size_t keysize)
+aes_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, int enc)
{
- struct aes_ctx *ctx = _ctx;
- int ret;
+ /* we use key size to distinguish */
+ if (algorithm != GNUTLS_CIPHER_AES_128_CBC
+ && algorithm != GNUTLS_CIPHER_AES_192_CBC
+ && algorithm != GNUTLS_CIPHER_AES_256_CBC)
+ return GNUTLS_E_INVALID_REQUEST;
- if (ctx->enc)
- ret = aesni_set_encrypt_key (userkey, keysize * 8, ALIGN16(&ctx->expanded_key));
- else
- ret = aesni_set_decrypt_key (userkey, keysize * 8, ALIGN16(&ctx->expanded_key));
+ *_ctx = gnutls_calloc(1, sizeof(struct aes_ctx));
+ if (*_ctx == NULL) {
+ gnutls_assert();
+ return GNUTLS_E_MEMORY_ERROR;
+ }
- if (ret != 0)
- return gnutls_assert_val (GNUTLS_E_ENCRYPTION_FAILED);
+ ((struct aes_ctx *) (*_ctx))->enc = enc;
- return 0;
+ return 0;
}
static int
-aes_setiv (void *_ctx, const void *iv, size_t iv_size)
+aes_cipher_setkey(void *_ctx, const void *userkey, size_t keysize)
{
- struct aes_ctx *ctx = _ctx;
+ struct aes_ctx *ctx = _ctx;
+ int ret;
+
+ if (ctx->enc)
+ ret =
+ aesni_set_encrypt_key(userkey, keysize * 8,
+ ALIGN16(&ctx->expanded_key));
+ else
+ ret =
+ aesni_set_decrypt_key(userkey, keysize * 8,
+ ALIGN16(&ctx->expanded_key));
+
+ if (ret != 0)
+ return gnutls_assert_val(GNUTLS_E_ENCRYPTION_FAILED);
+
+ return 0;
+}
+
+static int aes_setiv(void *_ctx, const void *iv, size_t iv_size)
+{
+ struct aes_ctx *ctx = _ctx;
- memcpy (ctx->iv, iv, 16);
- return 0;
+ memcpy(ctx->iv, iv, 16);
+ return 0;
}
static int
-aes_encrypt (void *_ctx, const void *src, size_t src_size,
- void *dst, size_t dst_size)
+aes_encrypt(void *_ctx, const void *src, size_t src_size,
+ void *dst, size_t dst_size)
{
- struct aes_ctx *ctx = _ctx;
+ struct aes_ctx *ctx = _ctx;
- aesni_cbc_encrypt (src, dst, src_size, ALIGN16(&ctx->expanded_key), ctx->iv, 1);
- return 0;
+ aesni_cbc_encrypt(src, dst, src_size, ALIGN16(&ctx->expanded_key),
+ ctx->iv, 1);
+ return 0;
}
static int
-aes_decrypt (void *_ctx, const void *src, size_t src_size,
- void *dst, size_t dst_size)
+aes_decrypt(void *_ctx, const void *src, size_t src_size,
+ void *dst, size_t dst_size)
{
- struct aes_ctx *ctx = _ctx;
+ struct aes_ctx *ctx = _ctx;
- aesni_cbc_encrypt (src, dst, src_size, ALIGN16(&ctx->expanded_key), ctx->iv, 0);
+ aesni_cbc_encrypt(src, dst, src_size, ALIGN16(&ctx->expanded_key),
+ ctx->iv, 0);
- return 0;
+ return 0;
}
-static void
-aes_deinit (void *_ctx)
+static void aes_deinit(void *_ctx)
{
- gnutls_free (_ctx);
+ gnutls_free(_ctx);
}
static const gnutls_crypto_cipher_st cipher_struct = {
- .init = aes_cipher_init,
- .setkey = aes_cipher_setkey,
- .setiv = aes_setiv,
- .encrypt = aes_encrypt,
- .decrypt = aes_decrypt,
- .deinit = aes_deinit,
+ .init = aes_cipher_init,
+ .setkey = aes_cipher_setkey,
+ .setiv = aes_setiv,
+ .encrypt = aes_encrypt,
+ .decrypt = aes_decrypt,
+ .deinit = aes_deinit,
};
-static unsigned
-check_optimized_aes (void)
+static unsigned check_optimized_aes(void)
{
- unsigned int a, b, c, d;
- gnutls_cpuid (1, &a, &b, &c, &d);
+ unsigned int a, b, c, d;
+ gnutls_cpuid(1, &a, &b, &c, &d);
- return (c & 0x2000000);
+ return (c & 0x2000000);
}
#ifdef ASM_X86_64
-static unsigned
-check_pclmul (void)
+static unsigned check_pclmul(void)
{
- unsigned int a, b, c, d;
- gnutls_cpuid (1, &a, &b, &c, &d);
+ unsigned int a, b, c, d;
+ gnutls_cpuid(1, &a, &b, &c, &d);
- return (c & 0x2);
+ return (c & 0x2);
}
#endif
-static unsigned
-check_intel_or_amd (void)
+static unsigned check_intel_or_amd(void)
{
- unsigned int a, b, c, d;
- gnutls_cpuid (0, &a, &b, &c, &d);
-
- if ((memcmp (&b, "Genu", 4) == 0 &&
- memcmp (&d, "ineI", 4) == 0 &&
- memcmp (&c, "ntel", 4) == 0) ||
- (memcmp (&b, "Auth", 4) == 0 &&
- memcmp (&d, "enti", 4) == 0 && memcmp (&c, "cAMD", 4) == 0))
- {
- return 1;
- }
-
- return 0;
+ unsigned int a, b, c, d;
+ gnutls_cpuid(0, &a, &b, &c, &d);
+
+ if ((memcmp(&b, "Genu", 4) == 0 &&
+ memcmp(&d, "ineI", 4) == 0 &&
+ memcmp(&c, "ntel", 4) == 0) ||
+ (memcmp(&b, "Auth", 4) == 0 &&
+ memcmp(&d, "enti", 4) == 0 && memcmp(&c, "cAMD", 4) == 0)) {
+ return 1;
+ }
+
+ return 0;
}
-void
-register_x86_crypto (void)
+void register_x86_crypto(void)
{
- int ret;
-
- if (check_intel_or_amd () == 0)
- return;
-
- if (check_optimized_aes ())
- {
- _gnutls_debug_log ("Intel AES accelerator was detected\n");
- ret =
- gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_128_CBC, 80,
- &cipher_struct);
- if (ret < 0)
- {
- gnutls_assert ();
- }
-
- ret =
- gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_192_CBC, 80,
- &cipher_struct);
- if (ret < 0)
- {
- gnutls_assert ();
- }
-
- ret =
- gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_256_CBC, 80,
- &cipher_struct);
- if (ret < 0)
- {
- gnutls_assert ();
- }
-
+ int ret;
+
+ if (check_intel_or_amd() == 0)
+ return;
+
+ if (check_optimized_aes()) {
+ _gnutls_debug_log("Intel AES accelerator was detected\n");
+ ret =
+ gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_128_CBC, 80, &cipher_struct);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_192_CBC, 80, &cipher_struct);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_256_CBC, 80, &cipher_struct);
+ if (ret < 0) {
+ gnutls_assert();
+ }
#ifdef ASM_X86_64
- if (check_pclmul ())
- {
- /* register GCM ciphers */
- _gnutls_debug_log ("Intel GCM accelerator was detected\n");
- ret =
- gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_128_GCM,
- 80, &aes_gcm_struct);
- if (ret < 0)
- {
- gnutls_assert ();
- }
-
- ret =
- gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_256_GCM,
- 80, &aes_gcm_struct);
- if (ret < 0)
- {
- gnutls_assert ();
- }
- }
+ if (check_pclmul()) {
+ /* register GCM ciphers */
+ _gnutls_debug_log
+ ("Intel GCM accelerator was detected\n");
+ ret =
+ gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_128_GCM, 80,
+ &aes_gcm_struct);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_256_GCM, 80,
+ &aes_gcm_struct);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+ }
#endif
- }
+ }
- return;
+ return;
}