summaryrefslogtreecommitdiff
path: root/cipher/rijndael-ppc-common.h
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@iki.fi>2020-02-02 19:52:08 +0200
committerJussi Kivilinna <jussi.kivilinna@iki.fi>2020-02-02 19:52:08 +0200
commit89776d45c824032409f581e5fd1db6bf149df57f (patch)
tree0e9908f0995967bbe9f2cbe48a4e9c3281620421 /cipher/rijndael-ppc-common.h
parent114bbc45e9717f9ad9641f64d8df8690db8da434 (diff)
downloadlibgcrypt-89776d45c824032409f581e5fd1db6bf149df57f.tar.gz
rinjdael-aes: use zero offset vector load/store when possible
* cipher/rijndael-ppc-common.h (asm_aligned_ld, asm_aligned_st): Use zero offset instruction variant when input offset is constant zero. * cipher/rijndael-ppc.c (asm_load_be_noswap) (asm_store_be_noswap): Likewise. -- Signed-off-by: Jussi Kivilinna <jussi.kivilinna@iki.fi>
Diffstat (limited to 'cipher/rijndael-ppc-common.h')
-rw-r--r--cipher/rijndael-ppc-common.h32
1 files changed, 24 insertions, 8 deletions
diff --git a/cipher/rijndael-ppc-common.h b/cipher/rijndael-ppc-common.h
index 165dd9f7..bbbeaac0 100644
--- a/cipher/rijndael-ppc-common.h
+++ b/cipher/rijndael-ppc-common.h
@@ -188,20 +188,36 @@ static ASM_FUNC_ATTR_INLINE block
asm_aligned_ld(unsigned long offset, const void *ptr)
{
block vec;
- __asm__ volatile ("lvx %0,%1,%2\n\t"
- : "=v" (vec)
- : "r" (offset), "r" ((uintptr_t)ptr)
- : "memory", "r0");
+#if __GNUC__ >= 4
+ if (__builtin_constant_p (offset) && offset == 0)
+ __asm__ volatile ("lvx %0,0,%1\n\t"
+ : "=v" (vec)
+ : "r" ((uintptr_t)ptr)
+ : "memory");
+ else
+#endif
+ __asm__ volatile ("lvx %0,%1,%2\n\t"
+ : "=v" (vec)
+ : "r" (offset), "r" ((uintptr_t)ptr)
+ : "memory", "r0");
return vec;
}
static ASM_FUNC_ATTR_INLINE void
asm_aligned_st(block vec, unsigned long offset, void *ptr)
{
- __asm__ volatile ("stvx %0,%1,%2\n\t"
- :
- : "v" (vec), "r" (offset), "r" ((uintptr_t)ptr)
- : "memory", "r0");
+#if __GNUC__ >= 4
+ if (__builtin_constant_p (offset) && offset == 0)
+ __asm__ volatile ("stvx %0,0,%1\n\t"
+ :
+ : "v" (vec), "r" ((uintptr_t)ptr)
+ : "memory");
+ else
+#endif
+ __asm__ volatile ("stvx %0,%1,%2\n\t"
+ :
+ : "v" (vec), "r" (offset), "r" ((uintptr_t)ptr)
+ : "memory", "r0");
}
static ASM_FUNC_ATTR_INLINE block