summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNikos Mavrogiannopoulos <nmav@gnutls.org>2013-12-14 11:57:02 +0100
committerNikos Mavrogiannopoulos <nmav@gnutls.org>2013-12-14 12:13:40 +0100
commit691ab9fa8de99f602ad77f38c0323b00d297fd1b (patch)
tree2761151c43f7c7ca12790cb1168f882a6807d8a9
parentae89ad7815e7a022d68baa1288b517f3e0a111c5 (diff)
downloadgnutls-691ab9fa8de99f602ad77f38c0323b00d297fd1b.tar.gz
Added Appro's SSSE3 SHA implementations
-rw-r--r--cfg.mk175
-rw-r--r--devel/perlasm/openssl-cpuid-x86.pl477
-rw-r--r--devel/perlasm/sha1-ssse3-x86.pl1266
-rwxr-xr-xdevel/perlasm/sha1-ssse3-x86_64.pl1815
-rw-r--r--devel/perlasm/sha256-ssse3-x86.pl1125
-rw-r--r--devel/perlasm/sha512-ssse3-x86.pl910
-rwxr-xr-xdevel/perlasm/sha512-ssse3-x86_64.pl2152
-rw-r--r--lib/accelerated/x86/Makefile.am19
-rw-r--r--lib/accelerated/x86/aes-padlock.h3
-rw-r--r--lib/accelerated/x86/aes-x86.c101
-rw-r--r--lib/accelerated/x86/coff/aesni-x86.s (renamed from lib/accelerated/x86/coff/appro-aes-x86-coff.s)2
-rw-r--r--lib/accelerated/x86/coff/aesni-x86_64.s (renamed from lib/accelerated/x86/coff/appro-aes-x86-64-coff.s)2
-rw-r--r--lib/accelerated/x86/coff/cpuid-x86.s (renamed from lib/accelerated/x86/coff/cpuid-x86-coff.s)2
-rw-r--r--lib/accelerated/x86/coff/cpuid-x86_64.s (renamed from lib/accelerated/x86/coff/cpuid-x86-64-coff.s)2
-rw-r--r--lib/accelerated/x86/coff/e_padlock-x86.s (renamed from lib/accelerated/x86/coff/padlock-x86-coff.s)2
-rw-r--r--lib/accelerated/x86/coff/e_padlock-x86_64.s (renamed from lib/accelerated/x86/coff/padlock-x86-64-coff.s)2
-rw-r--r--lib/accelerated/x86/coff/ghash-x86_64.s (renamed from lib/accelerated/x86/coff/appro-aes-gcm-x86-64-coff.s)2
-rw-r--r--lib/accelerated/x86/coff/openssl-cpuid-x86.s396
-rw-r--r--lib/accelerated/x86/coff/openssl-cpuid-x86_64.s361
-rw-r--r--lib/accelerated/x86/coff/sha1-ssse3-x86.s1420
-rw-r--r--lib/accelerated/x86/coff/sha1-ssse3-x86_64.s2693
-rw-r--r--lib/accelerated/x86/coff/sha256-avx-x86_64.s2645
-rw-r--r--lib/accelerated/x86/coff/sha256-ssse3-x86.s3402
-rw-r--r--lib/accelerated/x86/coff/sha512-ssse3-x86.s605
-rw-r--r--lib/accelerated/x86/coff/sha512-ssse3-x86_64.s3025
-rw-r--r--lib/accelerated/x86/elf/aesni-x86.s (renamed from lib/accelerated/x86/elf/appro-aes-x86.s)0
-rw-r--r--lib/accelerated/x86/elf/aesni-x86_64.s (renamed from lib/accelerated/x86/elf/appro-aes-x86-64.s)0
-rw-r--r--lib/accelerated/x86/elf/cpuid-x86_64.s (renamed from lib/accelerated/x86/elf/cpuid-x86-64.s)0
-rw-r--r--lib/accelerated/x86/elf/e_padlock-x86.s (renamed from lib/accelerated/x86/elf/padlock-x86.s)0
-rw-r--r--lib/accelerated/x86/elf/e_padlock-x86_64.s (renamed from lib/accelerated/x86/elf/padlock-x86-64.s)0
-rw-r--r--lib/accelerated/x86/elf/ghash-x86_64.s (renamed from lib/accelerated/x86/elf/appro-aes-gcm-x86-64.s)0
-rw-r--r--lib/accelerated/x86/elf/sha1-ssse3-x86.s1421
-rw-r--r--lib/accelerated/x86/elf/sha1-ssse3-x86_64.s2515
-rw-r--r--lib/accelerated/x86/elf/sha256-avx-x86_64.s2614
-rw-r--r--lib/accelerated/x86/elf/sha256-ssse3-x86.s3403
-rw-r--r--lib/accelerated/x86/elf/sha512-ssse3-x86.s606
-rw-r--r--lib/accelerated/x86/elf/sha512-ssse3-x86_64.s2881
-rw-r--r--lib/accelerated/x86/files.mk6
-rw-r--r--lib/accelerated/x86/hmac-x86.c300
-rw-r--r--lib/accelerated/x86/macosx/aesni-x86.s (renamed from lib/accelerated/x86/macosx/appro-aes-x86-macosx.s)4
-rw-r--r--lib/accelerated/x86/macosx/aesni-x86_64.s (renamed from lib/accelerated/x86/macosx/appro-aes-x86-64-macosx.s)4
-rw-r--r--lib/accelerated/x86/macosx/cpuid-x86-64-macosx.s75
-rw-r--r--lib/accelerated/x86/macosx/cpuid-x86-macosx.s87
-rw-r--r--lib/accelerated/x86/macosx/cpuid-x86.s70
-rw-r--r--lib/accelerated/x86/macosx/cpuid-x86_64.s58
-rw-r--r--lib/accelerated/x86/macosx/e_padlock-x86.s (renamed from lib/accelerated/x86/macosx/padlock-x86-macosx.s)4
-rw-r--r--lib/accelerated/x86/macosx/e_padlock-x86_64.s (renamed from lib/accelerated/x86/macosx/padlock-x86-64-macosx.s)4
-rw-r--r--lib/accelerated/x86/macosx/ghash-x86_64.s (renamed from lib/accelerated/x86/macosx/appro-aes-gcm-x86-64-macosx.s)4
-rw-r--r--lib/accelerated/x86/macosx/openssl-cpuid-x86.s399
-rw-r--r--lib/accelerated/x86/macosx/openssl-cpuid-x86_64.s365
-rw-r--r--lib/accelerated/x86/macosx/sha1-ssse3-x86.s1419
-rw-r--r--lib/accelerated/x86/macosx/sha1-ssse3-x86_64.s2515
-rw-r--r--lib/accelerated/x86/macosx/sha256-avx-x86_64.s2614
-rw-r--r--lib/accelerated/x86/macosx/sha256-ssse3-x86.s3405
-rw-r--r--lib/accelerated/x86/macosx/sha512-ssse3-x86.s604
-rw-r--r--lib/accelerated/x86/macosx/sha512-ssse3-x86_64.s2881
-rw-r--r--lib/accelerated/x86/sha-padlock.h3
-rw-r--r--lib/accelerated/x86/sha-x86.c365
-rw-r--r--lib/accelerated/x86/sha-x86.h22
59 files changed, 50954 insertions, 298 deletions
diff --git a/cfg.mk b/cfg.mk
index ceebda93b6..8959d6e23b 100644
--- a/cfg.mk
+++ b/cfg.mk
@@ -163,143 +163,102 @@ upload-web:
cvs commit -m "Update." manual/ reference/ \
doxygen/ devel/ cyclo/
-ASM_SOURCES:= \
- lib/accelerated/x86/elf/cpuid-x86-64.s \
- lib/accelerated/x86/elf/cpuid-x86.s \
- lib/accelerated/x86/elf/appro-aes-gcm-x86-64.s \
- lib/accelerated/x86/elf/appro-aes-x86-64.s \
- lib/accelerated/x86/elf/appro-aes-x86.s \
- lib/accelerated/x86/elf/padlock-x86-64.s \
- lib/accelerated/x86/elf/padlock-x86.s \
- lib/accelerated/x86/coff/cpuid-x86-coff.s \
- lib/accelerated/x86/coff/cpuid-x86-64-coff.s \
- lib/accelerated/x86/coff/appro-aes-gcm-x86-64-coff.s \
- lib/accelerated/x86/coff/appro-aes-x86-64-coff.s \
- lib/accelerated/x86/coff/appro-aes-x86-coff.s \
- lib/accelerated/x86/coff/padlock-x86-64-coff.s \
- lib/accelerated/x86/coff/padlock-x86-coff.s \
- lib/accelerated/x86/macosx/cpuid-x86-64-macosx.s \
- lib/accelerated/x86/macosx/cpuid-x86-macosx.s \
- lib/accelerated/x86/macosx/appro-aes-gcm-x86-64-macosx.s \
- lib/accelerated/x86/macosx/appro-aes-x86-64-macosx.s \
- lib/accelerated/x86/macosx/appro-aes-x86-macosx.s \
- lib/accelerated/x86/macosx/padlock-x86-64-macosx.s \
- lib/accelerated/x86/macosx/padlock-x86-macosx.s
-
-asm-sources: $(ASM_SOURCES)
+ASM_SOURCES_XXX := \
+ lib/accelerated/x86/XXX/cpuid-x86_64.s \
+ lib/accelerated/x86/XXX/cpuid-x86.s \
+ lib/accelerated/x86/XXX/ghash-x86_64.s \
+ lib/accelerated/x86/XXX/aesni-x86_64.s \
+ lib/accelerated/x86/XXX/aesni-x86.s \
+ lib/accelerated/x86/XXX/e_padlock-x86_64.s \
+ lib/accelerated/x86/XXX/e_padlock-x86.s \
+ lib/accelerated/x86/XXX/sha1-ssse3-x86.s \
+ lib/accelerated/x86/XXX/sha1-ssse3-x86_64.s \
+ lib/accelerated/x86/XXX/sha256-ssse3-x86.s \
+ lib/accelerated/x86/XXX/sha512-ssse3-x86.s \
+ lib/accelerated/x86/XXX/sha512-ssse3-x86_64.s \
+
+ASM_SOURCES_ELF := $(subst XXX,elf,$(ASM_SOURCES_XXX))
+ASM_SOURCES_COFF := $(subst XXX,coff,$(ASM_SOURCES_XXX))
+ASM_SOURCES_MACOSX := $(subst XXX,macosx,$(ASM_SOURCES_XXX))
+
+asm-sources: $(ASM_SOURCES_ELF) $(ASM_SOURCES_COFF) $(ASM_SOURCES_MACOSX) lib/accelerated/x86/files.mk
asm-sources-clean:
- rm -f $(ASM_SOURCES)
-
-lib/accelerated/x86/elf/cpuid-x86-64.s: devel/perlasm/cpuid-x86_64.pl
+ rm -f $(ASM_SOURCES_ELF) $(ASM_SOURCES_COFF) $(ASM_SOURCES_MACOSX) lib/accelerated/x86/files.mk
+
+X86_FILES=XXX/aesni-x86.s XXX/cpuid-x86.s XXX/e_padlock-x86.s XXX/sha1-ssse3-x86.s \
+ XXX/sha256-ssse3-x86.s XXX/sha512-ssse3-x86.s
+
+X86_64_FILES=XXX/aesni-x86_64.s XXX/cpuid-x86_64.s XXX/e_padlock-x86_64.s XXX/ghash-x86_64.s \
+ XXX/sha1-ssse3-x86_64.s XXX/sha512-ssse3-x86_64.s
+
+X86_FILES_ELF := $(subst XXX,elf,$(X86_FILES))
+X86_FILES_COFF := $(subst XXX,coff,$(X86_FILES))
+X86_FILES_MACOSX := $(subst XXX,macosx,$(X86_FILES))
+X86_64_FILES_ELF := $(subst XXX,elf,$(X86_64_FILES))
+X86_64_FILES_COFF := $(subst XXX,coff,$(X86_64_FILES))
+X86_64_FILES_MACOSX := $(subst XXX,macosx,$(X86_64_FILES))
+
+lib/accelerated/x86/files.mk:
+ echo X86_FILES_ELF=$(X86_FILES_ELF) > $@.tmp
+ echo X86_FILES_COFF=$(X86_FILES_COFF) >> $@.tmp
+ echo X86_FILES_MACOSX=$(X86_FILES_MACOSX) >> $@.tmp
+ echo X86_64_FILES_ELF=$(X86_64_FILES_ELF) >> $@.tmp
+ echo X86_64_FILES_COFF=$(X86_64_FILES_COFF) >> $@.tmp
+ echo X86_64_FILES_MACOSX=$(X86_64_FILES_MACOSX) >> $@.tmp
+ mv $@.tmp $@
+
+# CPUID is handled differently (other license)
+lib/accelerated/x86/elf/cpuid-%.s: devel/perlasm/cpuid-%.pl $(objects)
cat devel/perlasm/license-gnutls.txt > $@
perl $< elf >> $@
echo "" >> $@
echo ".section .note.GNU-stack,\"\",%progbits" >> $@
-
-lib/accelerated/x86/elf/cpuid-x86.s: devel/perlasm/cpuid-x86.pl
+lib/accelerated/x86/coff/cpuid-x86.s: devel/perlasm/cpuid-x86.pl $(objects)
cat devel/perlasm/license-gnutls.txt > $@
- perl $< elf >> $@
+ perl $< coff >> $@
echo "" >> $@
echo ".section .note.GNU-stack,\"\",%progbits" >> $@
-lib/accelerated/x86/elf/appro-aes-gcm-x86-64.s: devel/perlasm/ghash-x86_64.pl
- cat devel/perlasm/license.txt > $@
- perl $< elf >> $@
+lib/accelerated/x86/coff/cpuid-x86_64.s: devel/perlasm/cpuid-x86_64.pl $(objects)
+ cat devel/perlasm/license-gnutls.txt > $@
+ perl $< mingw64 >> $@
echo "" >> $@
echo ".section .note.GNU-stack,\"\",%progbits" >> $@
-lib/accelerated/x86/elf/appro-aes-x86-64.s: devel/perlasm/aesni-x86_64.pl
- cat devel/perlasm/license.txt > $@
- perl $< elf >> $@
+lib/accelerated/x86/macosx/cpuid-%.s: devel/perlasm/cpuid-%.pl $(objects)
+ cat devel/perlasm/license-gnutls.txt > $@
+ perl $< macosx >> $@
echo "" >> $@
echo ".section .note.GNU-stack,\"\",%progbits" >> $@
-lib/accelerated/x86/elf/appro-aes-x86.s: devel/perlasm/aesni-x86.pl
- cat devel/perlasm/license.txt > $@
- perl $< elf >> $@
- echo "" >> $@
- echo ".section .note.GNU-stack,\"\",%progbits" >> $@
-lib/accelerated/x86/elf/padlock-x86-64.s: devel/perlasm/e_padlock-x86_64.pl
+# Appro's code
+lib/accelerated/x86/elf/%.s: devel/perlasm/%.pl $(objects)
cat devel/perlasm/license.txt > $@
perl $< elf >> $@
echo "" >> $@
echo ".section .note.GNU-stack,\"\",%progbits" >> $@
+ sed -i 's/OPENSSL_ia32cap_P/_gnutls_x86_cpuid_s/g' $@
-lib/accelerated/x86/elf/padlock-x86.s: devel/perlasm/e_padlock-x86.pl
+lib/accelerated/x86/coff/%-x86.s: devel/perlasm/%-x86.pl $(objects)
cat devel/perlasm/license.txt > $@
- perl $< elf >> $@
+ perl $< coff >> $@
echo "" >> $@
echo ".section .note.GNU-stack,\"\",%progbits" >> $@
+ sed -i 's/OPENSSL_ia32cap_P/_gnutls_x86_cpuid_s/g' $@
-lib/accelerated/x86/coff/appro-aes-gcm-x86-64-coff.s: devel/perlasm/ghash-x86_64.pl
- cat devel/perlasm/license.txt > $@
- perl $< mingw64 >> $@
-
-lib/accelerated/x86/coff/appro-aes-x86-64-coff.s: devel/perlasm/aesni-x86_64.pl
- cat devel/perlasm/license.txt > $@
- perl $< mingw64 >> $@
-
-lib/accelerated/x86/coff/appro-aes-x86-coff.s: devel/perlasm/aesni-x86.pl
- cat devel/perlasm/license.txt > $@
- perl $< coff >> $@
-
-lib/accelerated/x86/coff/padlock-x86-64-coff.s: devel/perlasm/e_padlock-x86_64.pl
+lib/accelerated/x86/coff/%-x86_64.s: devel/perlasm/%-x86_64.pl $(objects)
cat devel/perlasm/license.txt > $@
perl $< mingw64 >> $@
+ echo "" >> $@
+ echo ".section .note.GNU-stack,\"\",%progbits" >> $@
+ sed -i 's/OPENSSL_ia32cap_P/_gnutls_x86_cpuid_s/g' $@
-lib/accelerated/x86/coff/padlock-x86-coff.s: devel/perlasm/e_padlock-x86.pl
+lib/accelerated/x86/macosx/%.s: devel/perlasm/%.pl $(objects)
cat devel/perlasm/license.txt > $@
- perl $< coff >> $@
-
-lib/accelerated/x86/coff/cpuid-x86-64-coff.s: devel/perlasm/cpuid-x86_64.pl
- cat devel/perlasm/license-gnutls.txt > $@
- perl $< mingw64 >> $@
-
-lib/accelerated/x86/coff/cpuid-x86-coff.s: devel/perlasm/cpuid-x86.pl
- cat devel/perlasm/license-gnutls.txt > $@
- perl $< coff >> $@
-
-lib/accelerated/x86/macosx/appro-aes-gcm-x86-64-macosx.s: devel/perlasm/ghash-x86_64.pl
- echo '/*' > $@
- cat devel/perlasm/license.txt >> $@
- echo '*/' >> $@
- perl $< macosx >> $@
-
-lib/accelerated/x86/macosx/appro-aes-x86-64-macosx.s: devel/perlasm/aesni-x86_64.pl
- echo '/*' > $@
- cat devel/perlasm/license.txt >> $@
- echo '*/' >> $@
- perl $< macosx >> $@
-
-lib/accelerated/x86/macosx/appro-aes-x86-macosx.s: devel/perlasm/aesni-x86.pl
- echo '/*' > $@
- cat devel/perlasm/license.txt >> $@
- echo '*/' >> $@
- perl $< macosx >> $@
-
-lib/accelerated/x86/macosx/padlock-x86-64-macosx.s: devel/perlasm/e_padlock-x86_64.pl
- echo '/*' > $@
- cat devel/perlasm/license.txt >> $@
- echo '*/' >> $@
- perl $< macosx >> $@
-
-lib/accelerated/x86/macosx/padlock-x86-macosx.s: devel/perlasm/e_padlock-x86.pl
- echo '/*' > $@
- cat devel/perlasm/license.txt >> $@
- echo '*/' >> $@
- perl $< macosx >> $@
-
-lib/accelerated/x86/macosx/cpuid-x86-64-macosx.s: devel/perlasm/cpuid-x86_64.pl
- echo '/*' > $@
- cat devel/perlasm/license.txt >> $@
- echo '*/' >> $@
- perl $< macosx >> $@
-
-lib/accelerated/x86/macosx/cpuid-x86-macosx.s: devel/perlasm/cpuid-x86.pl
- echo '/*' > $@
- cat devel/perlasm/license.txt >> $@
- echo '*/' >> $@
perl $< macosx >> $@
+ echo "" >> $@
+ echo ".section .note.GNU-stack,\"\",%progbits" >> $@
+ sed -i 's/OPENSSL_ia32cap_P/_gnutls_x86_cpuid_s/g' $@
diff --git a/devel/perlasm/openssl-cpuid-x86.pl b/devel/perlasm/openssl-cpuid-x86.pl
new file mode 100644
index 0000000000..ef1216a8b2
--- /dev/null
+++ b/devel/perlasm/openssl-cpuid-x86.pl
@@ -0,0 +1,477 @@
+#!/usr/bin/env perl
+
+$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
+push(@INC, "${dir}perlasm", "perlasm");
+require "x86asm.pl";
+
+&asm_init($ARGV[0],"x86cpuid");
+
+for (@ARGV) { $sse2=1 if (/-DOPENSSL_IA32_SSE2/); }
+
+&function_begin("OPENSSL_ia32_cpuid");
+ &xor ("edx","edx");
+ &pushf ();
+ &pop ("eax");
+ &mov ("ecx","eax");
+ &xor ("eax",1<<21);
+ &push ("eax");
+ &popf ();
+ &pushf ();
+ &pop ("eax");
+ &xor ("ecx","eax");
+ &xor ("eax","eax");
+ &bt ("ecx",21);
+ &jnc (&label("nocpuid"));
+ &mov ("esi",&wparam(0));
+ &mov (&DWP(8,"esi"),"eax"); # clear 3rd word
+ &cpuid ();
+ &mov ("edi","eax"); # max value for standard query level
+
+ &xor ("eax","eax");
+ &cmp ("ebx",0x756e6547); # "Genu"
+ &setne (&LB("eax"));
+ &mov ("ebp","eax");
+ &cmp ("edx",0x49656e69); # "ineI"
+ &setne (&LB("eax"));
+ &or ("ebp","eax");
+ &cmp ("ecx",0x6c65746e); # "ntel"
+ &setne (&LB("eax"));
+ &or ("ebp","eax"); # 0 indicates Intel CPU
+ &jz (&label("intel"));
+
+ &cmp ("ebx",0x68747541); # "Auth"
+ &setne (&LB("eax"));
+ &mov ("esi","eax");
+ &cmp ("edx",0x69746E65); # "enti"
+ &setne (&LB("eax"));
+ &or ("esi","eax");
+ &cmp ("ecx",0x444D4163); # "cAMD"
+ &setne (&LB("eax"));
+ &or ("esi","eax"); # 0 indicates AMD CPU
+ &jnz (&label("intel"));
+
+ # AMD specific
+ &mov ("eax",0x80000000);
+ &cpuid ();
+ &cmp ("eax",0x80000001);
+ &jb (&label("intel"));
+ &mov ("esi","eax");
+ &mov ("eax",0x80000001);
+ &cpuid ();
+ &or ("ebp","ecx");
+ &and ("ebp",1<<11|1); # isolate XOP bit
+ &cmp ("esi",0x80000008);
+ &jb (&label("intel"));
+
+ &mov ("eax",0x80000008);
+ &cpuid ();
+ &movz ("esi",&LB("ecx")); # number of cores - 1
+ &inc ("esi"); # number of cores
+
+ &mov ("eax",1);
+ &xor ("ecx","ecx");
+ &cpuid ();
+ &bt ("edx",28);
+ &jnc (&label("generic"));
+ &shr ("ebx",16);
+ &and ("ebx",0xff);
+ &cmp ("ebx","esi");
+ &ja (&label("generic"));
+ &and ("edx",0xefffffff); # clear hyper-threading bit
+ &jmp (&label("generic"));
+
+&set_label("intel");
+ &cmp ("edi",7);
+ &jb (&label("cacheinfo"));
+
+ &mov ("esi",&wparam(0));
+ &mov ("eax",7);
+ &xor ("ecx","ecx");
+ &cpuid ();
+ &mov (&DWP(8,"esi"),"ebx");
+
+&set_label("cacheinfo");
+ &cmp ("edi",4);
+ &mov ("edi",-1);
+ &jb (&label("nocacheinfo"));
+
+ &mov ("eax",4);
+ &mov ("ecx",0); # query L1D
+ &cpuid ();
+ &mov ("edi","eax");
+ &shr ("edi",14);
+ &and ("edi",0xfff); # number of cores -1 per L1D
+
+&set_label("nocacheinfo");
+ &mov ("eax",1);
+ &xor ("ecx","ecx");
+ &cpuid ();
+ &and ("edx",0xbfefffff); # force reserved bits #20, #30 to 0
+ &cmp ("ebp",0);
+ &jne (&label("notintel"));
+ &or ("edx",1<<30); # set reserved bit#30 on Intel CPUs
+ &and (&HB("eax"),15); # familiy ID
+ &cmp (&HB("eax"),15); # P4?
+ &jne (&label("notintel"));
+ &or ("edx",1<<20); # set reserved bit#20 to engage RC4_CHAR
+&set_label("notintel");
+ &bt ("edx",28); # test hyper-threading bit
+ &jnc (&label("generic"));
+ &and ("edx",0xefffffff);
+ &cmp ("edi",0);
+ &je (&label("generic"));
+
+ &or ("edx",0x10000000);
+ &shr ("ebx",16);
+ &cmp (&LB("ebx"),1);
+ &ja (&label("generic"));
+ &and ("edx",0xefffffff); # clear hyper-threading bit if not
+
+&set_label("generic");
+ &and ("ebp",1<<11); # isolate AMD XOP flag
+ &and ("ecx",0xfffff7ff); # force 11th bit to 0
+ &mov ("esi","edx");
+ &or ("ebp","ecx"); # merge AMD XOP flag
+
+ &bt ("ecx",27); # check OSXSAVE bit
+ &jnc (&label("clear_avx"));
+ &xor ("ecx","ecx");
+ &data_byte(0x0f,0x01,0xd0); # xgetbv
+ &and ("eax",6);
+ &cmp ("eax",6);
+ &je (&label("done"));
+ &cmp ("eax",2);
+ &je (&label("clear_avx"));
+&set_label("clear_xmm");
+ &and ("ebp",0xfdfffffd); # clear AESNI and PCLMULQDQ bits
+ &and ("esi",0xfeffffff); # clear FXSR
+&set_label("clear_avx");
+ &and ("ebp",0xefffe7ff); # clear AVX, FMA and AMD XOP bits
+ &mov ("edi",&wparam(0));
+ &and (&DWP(8,"edi"),0xffffffdf); # clear AVX2
+&set_label("done");
+ &mov ("eax","esi");
+ &mov ("edx","ebp");
+&set_label("nocpuid");
+&function_end("OPENSSL_ia32_cpuid");
+
+&external_label("OPENSSL_ia32cap_P");
+
+&function_begin_B("OPENSSL_rdtsc","EXTRN\t_OPENSSL_ia32cap_P:DWORD");
+ &xor ("eax","eax");
+ &xor ("edx","edx");
+ &picmeup("ecx","OPENSSL_ia32cap_P");
+ &bt (&DWP(0,"ecx"),4);
+ &jnc (&label("notsc"));
+ &rdtsc ();
+&set_label("notsc");
+ &ret ();
+&function_end_B("OPENSSL_rdtsc");
+
+# This works in Ring 0 only [read DJGPP+MS-DOS+privileged DPMI host],
+# but it's safe to call it on any [supported] 32-bit platform...
+# Just check for [non-]zero return value...
+&function_begin_B("OPENSSL_instrument_halt","EXTRN\t_OPENSSL_ia32cap_P:DWORD");
+ &picmeup("ecx","OPENSSL_ia32cap_P");
+ &bt (&DWP(0,"ecx"),4);
+ &jnc (&label("nohalt")); # no TSC
+
+ &data_word(0x9058900e); # push %cs; pop %eax
+ &and ("eax",3);
+ &jnz (&label("nohalt")); # not enough privileges
+
+ &pushf ();
+ &pop ("eax");
+ &bt ("eax",9);
+ &jnc (&label("nohalt")); # interrupts are disabled
+
+ &rdtsc ();
+ &push ("edx");
+ &push ("eax");
+ &halt ();
+ &rdtsc ();
+
+ &sub ("eax",&DWP(0,"esp"));
+ &sbb ("edx",&DWP(4,"esp"));
+ &add ("esp",8);
+ &ret ();
+
+&set_label("nohalt");
+ &xor ("eax","eax");
+ &xor ("edx","edx");
+ &ret ();
+&function_end_B("OPENSSL_instrument_halt");
+
+# Essentially there is only one use for this function. Under DJGPP:
+#
+# #include <go32.h>
+# ...
+# i=OPENSSL_far_spin(_dos_ds,0x46c);
+# ...
+# to obtain the number of spins till closest timer interrupt.
+
+&function_begin_B("OPENSSL_far_spin");
+ &pushf ();
+ &pop ("eax");
+ &bt ("eax",9);
+ &jnc (&label("nospin")); # interrupts are disabled
+
+ &mov ("eax",&DWP(4,"esp"));
+ &mov ("ecx",&DWP(8,"esp"));
+ &data_word (0x90d88e1e); # push %ds, mov %eax,%ds
+ &xor ("eax","eax");
+ &mov ("edx",&DWP(0,"ecx"));
+ &jmp (&label("spin"));
+
+ &align (16);
+&set_label("spin");
+ &inc ("eax");
+ &cmp ("edx",&DWP(0,"ecx"));
+ &je (&label("spin"));
+
+ &data_word (0x1f909090); # pop %ds
+ &ret ();
+
+&set_label("nospin");
+ &xor ("eax","eax");
+ &xor ("edx","edx");
+ &ret ();
+&function_end_B("OPENSSL_far_spin");
+
+&function_begin_B("OPENSSL_wipe_cpu","EXTRN\t_OPENSSL_ia32cap_P:DWORD");
+ &xor ("eax","eax");
+ &xor ("edx","edx");
+ &picmeup("ecx","OPENSSL_ia32cap_P");
+ &mov ("ecx",&DWP(0,"ecx"));
+ &bt (&DWP(0,"ecx"),1);
+ &jnc (&label("no_x87"));
+ if ($sse2) {
+ &and ("ecx",1<<26|1<<24); # check SSE2 and FXSR bits
+ &cmp ("ecx",1<<26|1<<24);
+ &jne (&label("no_sse2"));
+ &pxor ("xmm0","xmm0");
+ &pxor ("xmm1","xmm1");
+ &pxor ("xmm2","xmm2");
+ &pxor ("xmm3","xmm3");
+ &pxor ("xmm4","xmm4");
+ &pxor ("xmm5","xmm5");
+ &pxor ("xmm6","xmm6");
+ &pxor ("xmm7","xmm7");
+ &set_label("no_sse2");
+ }
+ # just a bunch of fldz to zap the fp/mm bank followed by finit...
+ &data_word(0xeed9eed9,0xeed9eed9,0xeed9eed9,0xeed9eed9,0x90e3db9b);
+&set_label("no_x87");
+ &lea ("eax",&DWP(4,"esp"));
+ &ret ();
+&function_end_B("OPENSSL_wipe_cpu");
+
+&function_begin_B("OPENSSL_atomic_add");
+ &mov ("edx",&DWP(4,"esp")); # fetch the pointer, 1st arg
+ &mov ("ecx",&DWP(8,"esp")); # fetch the increment, 2nd arg
+ &push ("ebx");
+ &nop ();
+ &mov ("eax",&DWP(0,"edx"));
+&set_label("spin");
+ &lea ("ebx",&DWP(0,"eax","ecx"));
+ &nop ();
+ &data_word(0x1ab10ff0); # lock; cmpxchg %ebx,(%edx) # %eax is envolved and is always reloaded
+ &jne (&label("spin"));
+ &mov ("eax","ebx"); # OpenSSL expects the new value
+ &pop ("ebx");
+ &ret ();
+&function_end_B("OPENSSL_atomic_add");
+
+# This function can become handy under Win32 in situations when
+# we don't know which calling convention, __stdcall or __cdecl(*),
+# indirect callee is using. In C it can be deployed as
+#
+#ifdef OPENSSL_CPUID_OBJ
+# type OPENSSL_indirect_call(void *f,...);
+# ...
+# OPENSSL_indirect_call(func,[up to $max arguments]);
+#endif
+#
+# (*) it's designed to work even for __fastcall if number of
+# arguments is 1 or 2!
+&function_begin_B("OPENSSL_indirect_call");
+ {
+ my ($max,$i)=(7,); # $max has to be chosen as 4*n-1
+ # in order to preserve eventual
+ # stack alignment
+ &push ("ebp");
+ &mov ("ebp","esp");
+ &sub ("esp",$max*4);
+ &mov ("ecx",&DWP(12,"ebp"));
+ &mov (&DWP(0,"esp"),"ecx");
+ &mov ("edx",&DWP(16,"ebp"));
+ &mov (&DWP(4,"esp"),"edx");
+ for($i=2;$i<$max;$i++)
+ {
+ # Some copies will be redundant/bogus...
+ &mov ("eax",&DWP(12+$i*4,"ebp"));
+ &mov (&DWP(0+$i*4,"esp"),"eax");
+ }
+ &call_ptr (&DWP(8,"ebp"));# make the call...
+ &mov ("esp","ebp"); # ... and just restore the stack pointer
+ # without paying attention to what we called,
+ # (__cdecl *func) or (__stdcall *one).
+ &pop ("ebp");
+ &ret ();
+ }
+&function_end_B("OPENSSL_indirect_call");
+
+&function_begin_B("OPENSSL_cleanse");
+ &mov ("edx",&wparam(0));
+ &mov ("ecx",&wparam(1));
+ &xor ("eax","eax");
+ &cmp ("ecx",7);
+ &jae (&label("lot"));
+ &cmp ("ecx",0);
+ &je (&label("ret"));
+&set_label("little");
+ &mov (&BP(0,"edx"),"al");
+ &sub ("ecx",1);
+ &lea ("edx",&DWP(1,"edx"));
+ &jnz (&label("little"));
+&set_label("ret");
+ &ret ();
+
+&set_label("lot",16);
+ &test ("edx",3);
+ &jz (&label("aligned"));
+ &mov (&BP(0,"edx"),"al");
+ &lea ("ecx",&DWP(-1,"ecx"));
+ &lea ("edx",&DWP(1,"edx"));
+ &jmp (&label("lot"));
+&set_label("aligned");
+ &mov (&DWP(0,"edx"),"eax");
+ &lea ("ecx",&DWP(-4,"ecx"));
+ &test ("ecx",-4);
+ &lea ("edx",&DWP(4,"edx"));
+ &jnz (&label("aligned"));
+ &cmp ("ecx",0);
+ &jne (&label("little"));
+ &ret ();
+&function_end_B("OPENSSL_cleanse");
+
+{
+my $lasttick = "esi";
+my $lastdiff = "ebx";
+my $out = "edi";
+my $cnt = "ecx";
+my $max = "ebp";
+
+&function_begin("OPENSSL_instrument_bus");
+ &mov ("eax",0);
+ if ($sse2) {
+ &picmeup("edx","OPENSSL_ia32cap_P");
+ &bt (&DWP(0,"edx"),4);
+ &jnc (&label("nogo")); # no TSC
+ &bt (&DWP(0,"edx"),19);
+ &jnc (&label("nogo")); # no CLFLUSH
+
+ &mov ($out,&wparam(0)); # load arguments
+ &mov ($cnt,&wparam(1));
+
+ # collect 1st tick
+ &rdtsc ();
+ &mov ($lasttick,"eax"); # lasttick = tick
+ &mov ($lastdiff,0); # lastdiff = 0
+ &clflush(&DWP(0,$out));
+ &data_byte(0xf0); # lock
+ &add (&DWP(0,$out),$lastdiff);
+ &jmp (&label("loop"));
+
+&set_label("loop",16);
+ &rdtsc ();
+ &mov ("edx","eax"); # put aside tick (yes, I neglect edx)
+ &sub ("eax",$lasttick); # diff
+ &mov ($lasttick,"edx"); # lasttick = tick
+ &mov ($lastdiff,"eax"); # lastdiff = diff
+ &clflush(&DWP(0,$out));
+ &data_byte(0xf0); # lock
+ &add (&DWP(0,$out),"eax"); # accumulate diff
+ &lea ($out,&DWP(4,$out)); # ++$out
+ &sub ($cnt,1); # --$cnt
+ &jnz (&label("loop"));
+
+ &mov ("eax",&wparam(1));
+&set_label("nogo");
+ }
+&function_end("OPENSSL_instrument_bus");
+
+&function_begin("OPENSSL_instrument_bus2");
+ &mov ("eax",0);
+ if ($sse2) {
+ &picmeup("edx","OPENSSL_ia32cap_P");
+ &bt (&DWP(0,"edx"),4);
+ &jnc (&label("nogo")); # no TSC
+ &bt (&DWP(0,"edx"),19);
+ &jnc (&label("nogo")); # no CLFLUSH
+
+ &mov ($out,&wparam(0)); # load arguments
+ &mov ($cnt,&wparam(1));
+ &mov ($max,&wparam(2));
+
+ &rdtsc (); # collect 1st tick
+ &mov ($lasttick,"eax"); # lasttick = tick
+ &mov ($lastdiff,0); # lastdiff = 0
+
+ &clflush(&DWP(0,$out));
+ &data_byte(0xf0); # lock
+ &add (&DWP(0,$out),$lastdiff);
+
+ &rdtsc (); # collect 1st diff
+ &mov ("edx","eax"); # put aside tick (yes, I neglect edx)
+ &sub ("eax",$lasttick); # diff
+ &mov ($lasttick,"edx"); # lasttick = tick
+ &mov ($lastdiff,"eax"); # lastdiff = diff
+ &jmp (&label("loop2"));
+
+&set_label("loop2",16);
+ &clflush(&DWP(0,$out));
+ &data_byte(0xf0); # lock
+ &add (&DWP(0,$out),"eax"); # accumulate diff
+
+ &sub ($max,1);
+ &jz (&label("done2"));
+
+ &rdtsc ();
+ &mov ("edx","eax"); # put aside tick (yes, I neglect edx)
+ &sub ("eax",$lasttick); # diff
+ &mov ($lasttick,"edx"); # lasttick = tick
+ &cmp ("eax",$lastdiff);
+ &mov ($lastdiff,"eax"); # lastdiff = diff
+ &mov ("edx",0);
+ &setne ("dl");
+ &sub ($cnt,"edx"); # conditional --$cnt
+ &lea ($out,&DWP(0,$out,"edx",4)); # conditional ++$out
+ &jnz (&label("loop2"));
+
+&set_label("done2");
+ &mov ("eax",&wparam(1));
+ &sub ("eax",$cnt);
+&set_label("nogo");
+ }
+&function_end("OPENSSL_instrument_bus2");
+}
+
+&function_begin_B("OPENSSL_ia32_rdrand");
+ &mov ("ecx",8);
+&set_label("loop");
+ &rdrand ("eax");
+ &jc (&label("break"));
+ &loop (&label("loop"));
+&set_label("break");
+ &cmp ("eax",0);
+ &cmove ("eax","ecx");
+ &ret ();
+&function_end_B("OPENSSL_ia32_rdrand");
+
+&initseg("OPENSSL_cpuid_setup");
+
+&hidden("OPENSSL_cpuid_setup");
+&hidden("OPENSSL_ia32cap_P");
+
+&asm_finish();
diff --git a/devel/perlasm/sha1-ssse3-x86.pl b/devel/perlasm/sha1-ssse3-x86.pl
new file mode 100644
index 0000000000..632dbbe122
--- /dev/null
+++ b/devel/perlasm/sha1-ssse3-x86.pl
@@ -0,0 +1,1266 @@
+#!/usr/bin/env perl
+
+# ====================================================================
+# [Re]written by Andy Polyakov <appro@openssl.org> for the OpenSSL
+# project. The module is, however, dual licensed under OpenSSL and
+# CRYPTOGAMS licenses depending on where you obtain it. For further
+# details see http://www.openssl.org/~appro/cryptogams/.
+# ====================================================================
+
+# "[Re]written" was achieved in two major overhauls. In 2004 BODY_*
+# functions were re-implemented to address P4 performance issue [see
+# commentary below], and in 2006 the rest was rewritten in order to
+# gain freedom to liberate licensing terms.
+
+# January, September 2004.
+#
+# It was noted that Intel IA-32 C compiler generates code which
+# performs ~30% *faster* on P4 CPU than original *hand-coded*
+# SHA1 assembler implementation. To address this problem (and
+# prove that humans are still better than machines:-), the
+# original code was overhauled, which resulted in following
+# performance changes:
+#
+# compared with original compared with Intel cc
+# assembler impl. generated code
+# Pentium -16% +48%
+# PIII/AMD +8% +16%
+# P4 +85%(!) +45%
+#
+# As you can see Pentium came out as looser:-( Yet I reckoned that
+# improvement on P4 outweights the loss and incorporate this
+# re-tuned code to 0.9.7 and later.
+# ----------------------------------------------------------------
+# <appro@fy.chalmers.se>
+
+# August 2009.
+#
+# George Spelvin has tipped that F_40_59(b,c,d) can be rewritten as
+# '(c&d) + (b&(c^d))', which allows to accumulate partial results
+# and lighten "pressure" on scratch registers. This resulted in
+# >12% performance improvement on contemporary AMD cores (with no
+# degradation on other CPUs:-). Also, the code was revised to maximize
+# "distance" between instructions producing input to 'lea' instruction
+# and the 'lea' instruction itself, which is essential for Intel Atom
+# core and resulted in ~15% improvement.
+
+# October 2010.
+#
+# Add SSSE3, Supplemental[!] SSE3, implementation. The idea behind it
+# is to offload message schedule denoted by Wt in NIST specification,
+# or Xupdate in OpenSSL source, to SIMD unit. The idea is not novel,
+# and in SSE2 context was first explored by Dean Gaudet in 2004, see
+# http://arctic.org/~dean/crypto/sha1.html. Since then several things
+# have changed that made it interesting again:
+#
+# a) XMM units became faster and wider;
+# b) instruction set became more versatile;
+# c) an important observation was made by Max Locktykhin, which made
+# it possible to reduce amount of instructions required to perform
+# the operation in question, for further details see
+# http://software.intel.com/en-us/articles/improving-the-performance-of-the-secure-hash-algorithm-1/.
+
+# April 2011.
+#
+# Add AVX code path, probably most controversial... The thing is that
+# switch to AVX alone improves performance by as little as 4% in
+# comparison to SSSE3 code path. But below result doesn't look like
+# 4% improvement... Trouble is that Sandy Bridge decodes 'ro[rl]' as
+# pair of µ-ops, and it's the additional µ-ops, two per round, that
+# make it run slower than Core2 and Westmere. But 'sh[rl]d' is decoded
+# as single µ-op by Sandy Bridge and it's replacing 'ro[rl]' with
+# equivalent 'sh[rl]d' that is responsible for the impressive 5.1
+# cycles per processed byte. But 'sh[rl]d' is not something that used
+# to be fast, nor does it appear to be fast in upcoming Bulldozer
+# [according to its optimization manual]. Which is why AVX code path
+# is guarded by *both* AVX and synthetic bit denoting Intel CPUs.
+# One can argue that it's unfair to AMD, but without 'sh[rl]d' it
+# makes no sense to keep the AVX code path. If somebody feels that
+# strongly, it's probably more appropriate to discuss possibility of
+# using vector rotate XOP on AMD...
+
+######################################################################
+# Current performance is summarized in following table. Numbers are
+# CPU clock cycles spent to process single byte (less is better).
+#
+# x86 SSSE3 AVX
+# Pentium 15.7 -
+# PIII 11.5 -
+# P4 10.6 -
+# AMD K8 7.1 -
+# Core2 7.3 6.0/+22% -
+# Atom 12.5 9.3(*)/+35% -
+# Westmere 7.3 5.5/+33% -
+# Sandy Bridge 8.8 6.2/+40% 5.1(**)/+73%
+# Ivy Bridge 7.2 4.8/+51% 4.7(**)/+53%
+# Bulldozer 11.6 6.0/+92%
+# VIA Nano 10.6 7.4/+43%
+#
+# (*) Loop is 1056 instructions long and expected result is ~8.25.
+# It remains mystery [to me] why ILP is limited to 1.7.
+#
+# (**) As per above comment, the result is for AVX *plus* sh[rl]d.
+
+$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
+push(@INC,"${dir}","${dir}../../perlasm");
+require "x86asm.pl";
+
+&asm_init($ARGV[0],"sha1-586.pl",$ARGV[$#ARGV] eq "386");
+
+$xmm=$ymm=0;
+for (@ARGV) { $xmm=1 if (/-DOPENSSL_IA32_SSE2/); }
+
+$ymm=1 if ($xmm &&
+ `$ENV{CC} -Wa,-v -c -o /dev/null -x assembler /dev/null 2>&1`
+ =~ /GNU assembler version ([2-9]\.[0-9]+)/ &&
+ $1>=2.19); # first version supporting AVX
+
+$ymm=1 if ($xmm && !$ymm && $ARGV[0] eq "win32n" &&
+ `nasm -v 2>&1` =~ /NASM version ([2-9]\.[0-9]+)/ &&
+ $1>=2.03); # first version supporting AVX
+
+$ymm=1 if ($xmm && !$ymm && $ARGV[0] eq "win32" &&
+ `ml 2>&1` =~ /Version ([0-9]+)\./ &&
+ $1>=10); # first version supporting AVX
+
+&external_label("OPENSSL_ia32cap_P") if ($xmm);
+
+
+$A="eax";
+$B="ebx";
+$C="ecx";
+$D="edx";
+$E="edi";
+$T="esi";
+$tmp1="ebp";
+
+@V=($A,$B,$C,$D,$E,$T);
+
+$alt=0; # 1 denotes alternative IALU implementation, which performs
+ # 8% *worse* on P4, same on Westmere and Atom, 2% better on
+ # Sandy Bridge...
+
+sub BODY_00_15
+ {
+ local($n,$a,$b,$c,$d,$e,$f)=@_;
+
+ &comment("00_15 $n");
+
+ &mov($f,$c); # f to hold F_00_19(b,c,d)
+ if ($n==0) { &mov($tmp1,$a); }
+ else { &mov($a,$tmp1); }
+ &rotl($tmp1,5); # tmp1=ROTATE(a,5)
+ &xor($f,$d);
+ &add($tmp1,$e); # tmp1+=e;
+ &mov($e,&swtmp($n%16)); # e becomes volatile and is loaded
+ # with xi, also note that e becomes
+ # f in next round...
+ &and($f,$b);
+ &rotr($b,2); # b=ROTATE(b,30)
+ &xor($f,$d); # f holds F_00_19(b,c,d)
+ &lea($tmp1,&DWP(0x5a827999,$tmp1,$e)); # tmp1+=K_00_19+xi
+
+ if ($n==15) { &mov($e,&swtmp(($n+1)%16));# pre-fetch f for next round
+ &add($f,$tmp1); } # f+=tmp1
+ else { &add($tmp1,$f); } # f becomes a in next round
+ &mov($tmp1,$a) if ($alt && $n==15);
+ }
+
+sub BODY_16_19
+ {
+ local($n,$a,$b,$c,$d,$e,$f)=@_;
+
+ &comment("16_19 $n");
+
+if ($alt) {
+ &xor($c,$d);
+ &xor($f,&swtmp(($n+2)%16)); # f to hold Xupdate(xi,xa,xb,xc,xd)
+ &and($tmp1,$c); # tmp1 to hold F_00_19(b,c,d), b&=c^d
+ &xor($f,&swtmp(($n+8)%16));
+ &xor($tmp1,$d); # tmp1=F_00_19(b,c,d)
+ &xor($f,&swtmp(($n+13)%16)); # f holds xa^xb^xc^xd
+ &rotl($f,1); # f=ROTATE(f,1)
+ &add($e,$tmp1); # e+=F_00_19(b,c,d)
+ &xor($c,$d); # restore $c
+ &mov($tmp1,$a); # b in next round
+ &rotr($b,$n==16?2:7); # b=ROTATE(b,30)
+ &mov(&swtmp($n%16),$f); # xi=f
+ &rotl($a,5); # ROTATE(a,5)
+ &lea($f,&DWP(0x5a827999,$f,$e));# f+=F_00_19(b,c,d)+e
+ &mov($e,&swtmp(($n+1)%16)); # pre-fetch f for next round
+ &add($f,$a); # f+=ROTATE(a,5)
+} else {
+ &mov($tmp1,$c); # tmp1 to hold F_00_19(b,c,d)
+ &xor($f,&swtmp(($n+2)%16)); # f to hold Xupdate(xi,xa,xb,xc,xd)
+ &xor($tmp1,$d);
+ &xor($f,&swtmp(($n+8)%16));
+ &and($tmp1,$b);
+ &xor($f,&swtmp(($n+13)%16)); # f holds xa^xb^xc^xd
+ &rotl($f,1); # f=ROTATE(f,1)
+ &xor($tmp1,$d); # tmp1=F_00_19(b,c,d)
+ &add($e,$tmp1); # e+=F_00_19(b,c,d)
+ &mov($tmp1,$a);
+ &rotr($b,2); # b=ROTATE(b,30)
+ &mov(&swtmp($n%16),$f); # xi=f
+ &rotl($tmp1,5); # ROTATE(a,5)
+ &lea($f,&DWP(0x5a827999,$f,$e));# f+=F_00_19(b,c,d)+e
+ &mov($e,&swtmp(($n+1)%16)); # pre-fetch f for next round
+ &add($f,$tmp1); # f+=ROTATE(a,5)
+}
+ }
+
+sub BODY_20_39
+ {
+ local($n,$a,$b,$c,$d,$e,$f)=@_;
+ local $K=($n<40)?0x6ed9eba1:0xca62c1d6;
+
+ &comment("20_39 $n");
+
+if ($alt) {
+ &xor($tmp1,$c); # tmp1 to hold F_20_39(b,c,d), b^=c
+ &xor($f,&swtmp(($n+2)%16)); # f to hold Xupdate(xi,xa,xb,xc,xd)
+ &xor($tmp1,$d); # tmp1 holds F_20_39(b,c,d)
+ &xor($f,&swtmp(($n+8)%16));
+ &add($e,$tmp1); # e+=F_20_39(b,c,d)
+ &xor($f,&swtmp(($n+13)%16)); # f holds xa^xb^xc^xd
+ &rotl($f,1); # f=ROTATE(f,1)
+ &mov($tmp1,$a); # b in next round
+ &rotr($b,7); # b=ROTATE(b,30)
+ &mov(&swtmp($n%16),$f) if($n<77);# xi=f
+ &rotl($a,5); # ROTATE(a,5)
+ &xor($b,$c) if($n==39);# warm up for BODY_40_59
+ &and($tmp1,$b) if($n==39);
+ &lea($f,&DWP($K,$f,$e)); # f+=e+K_XX_YY
+ &mov($e,&swtmp(($n+1)%16)) if($n<79);# pre-fetch f for next round
+ &add($f,$a); # f+=ROTATE(a,5)
+ &rotr($a,5) if ($n==79);
+} else {
+ &mov($tmp1,$b); # tmp1 to hold F_20_39(b,c,d)
+ &xor($f,&swtmp(($n+2)%16)); # f to hold Xupdate(xi,xa,xb,xc,xd)
+ &xor($tmp1,$c);
+ &xor($f,&swtmp(($n+8)%16));
+ &xor($tmp1,$d); # tmp1 holds F_20_39(b,c,d)
+ &xor($f,&swtmp(($n+13)%16)); # f holds xa^xb^xc^xd
+ &rotl($f,1); # f=ROTATE(f,1)
+ &add($e,$tmp1); # e+=F_20_39(b,c,d)
+ &rotr($b,2); # b=ROTATE(b,30)
+ &mov($tmp1,$a);
+ &rotl($tmp1,5); # ROTATE(a,5)
+ &mov(&swtmp($n%16),$f) if($n<77);# xi=f
+ &lea($f,&DWP($K,$f,$e)); # f+=e+K_XX_YY
+ &mov($e,&swtmp(($n+1)%16)) if($n<79);# pre-fetch f for next round
+ &add($f,$tmp1); # f+=ROTATE(a,5)
+}
+ }
+
+sub BODY_40_59
+ {
+ local($n,$a,$b,$c,$d,$e,$f)=@_;
+
+ &comment("40_59 $n");
+
+if ($alt) {
+ &add($e,$tmp1); # e+=b&(c^d)
+ &xor($f,&swtmp(($n+2)%16)); # f to hold Xupdate(xi,xa,xb,xc,xd)
+ &mov($tmp1,$d);
+ &xor($f,&swtmp(($n+8)%16));
+ &xor($c,$d); # restore $c
+ &xor($f,&swtmp(($n+13)%16)); # f holds xa^xb^xc^xd
+ &rotl($f,1); # f=ROTATE(f,1)
+ &and($tmp1,$c);
+ &rotr($b,7); # b=ROTATE(b,30)
+ &add($e,$tmp1); # e+=c&d
+ &mov($tmp1,$a); # b in next round
+ &mov(&swtmp($n%16),$f); # xi=f
+ &rotl($a,5); # ROTATE(a,5)
+ &xor($b,$c) if ($n<59);
+ &and($tmp1,$b) if ($n<59);# tmp1 to hold F_40_59(b,c,d)
+ &lea($f,&DWP(0x8f1bbcdc,$f,$e));# f+=K_40_59+e+(b&(c^d))
+ &mov($e,&swtmp(($n+1)%16)); # pre-fetch f for next round
+ &add($f,$a); # f+=ROTATE(a,5)
+} else {
+ &mov($tmp1,$c); # tmp1 to hold F_40_59(b,c,d)
+ &xor($f,&swtmp(($n+2)%16)); # f to hold Xupdate(xi,xa,xb,xc,xd)
+ &xor($tmp1,$d);
+ &xor($f,&swtmp(($n+8)%16));
+ &and($tmp1,$b);
+ &xor($f,&swtmp(($n+13)%16)); # f holds xa^xb^xc^xd
+ &rotl($f,1); # f=ROTATE(f,1)
+ &add($tmp1,$e); # b&(c^d)+=e
+ &rotr($b,2); # b=ROTATE(b,30)
+ &mov($e,$a); # e becomes volatile
+ &rotl($e,5); # ROTATE(a,5)
+ &mov(&swtmp($n%16),$f); # xi=f
+ &lea($f,&DWP(0x8f1bbcdc,$f,$tmp1));# f+=K_40_59+e+(b&(c^d))
+ &mov($tmp1,$c);
+ &add($f,$e); # f+=ROTATE(a,5)
+ &and($tmp1,$d);
+ &mov($e,&swtmp(($n+1)%16)); # pre-fetch f for next round
+ &add($f,$tmp1); # f+=c&d
+}
+ }
+
+&function_begin("sha1_block_data_order");
+if ($xmm) {
+ &static_label("ssse3_shortcut");
+ &static_label("avx_shortcut") if ($ymm);
+ &static_label("K_XX_XX");
+
+ &call (&label("pic_point")); # make it PIC!
+ &set_label("pic_point");
+ &blindpop($tmp1);
+ &picmeup($T,"OPENSSL_ia32cap_P",$tmp1,&label("pic_point"));
+ &lea ($tmp1,&DWP(&label("K_XX_XX")."-".&label("pic_point"),$tmp1));
+
+ &mov ($A,&DWP(0,$T));
+ &mov ($D,&DWP(4,$T));
+ &test ($D,1<<9); # check SSSE3 bit
+ &jz (&label("x86"));
+ &test ($A,1<<24); # check FXSR bit
+ &jz (&label("x86"));
+ if ($ymm) {
+ &and ($D,1<<28); # mask AVX bit
+ &and ($A,1<<30); # mask "Intel CPU" bit
+ &or ($A,$D);
+ &cmp ($A,1<<28|1<<30);
+ &je (&label("avx_shortcut"));
+ }
+ &jmp (&label("ssse3_shortcut"));
+ &set_label("x86",16);
+}
+ &mov($tmp1,&wparam(0)); # SHA_CTX *c
+ &mov($T,&wparam(1)); # const void *input
+ &mov($A,&wparam(2)); # size_t num
+ &stack_push(16+3); # allocate X[16]
+ &shl($A,6);
+ &add($A,$T);
+ &mov(&wparam(2),$A); # pointer beyond the end of input
+ &mov($E,&DWP(16,$tmp1));# pre-load E
+ &jmp(&label("loop"));
+
+&set_label("loop",16);
+
+ # copy input chunk to X, but reversing byte order!
+ for ($i=0; $i<16; $i+=4)
+ {
+ &mov($A,&DWP(4*($i+0),$T));
+ &mov($B,&DWP(4*($i+1),$T));
+ &mov($C,&DWP(4*($i+2),$T));
+ &mov($D,&DWP(4*($i+3),$T));
+ &bswap($A);
+ &bswap($B);
+ &bswap($C);
+ &bswap($D);
+ &mov(&swtmp($i+0),$A);
+ &mov(&swtmp($i+1),$B);
+ &mov(&swtmp($i+2),$C);
+ &mov(&swtmp($i+3),$D);
+ }
+ &mov(&wparam(1),$T); # redundant in 1st spin
+
+ &mov($A,&DWP(0,$tmp1)); # load SHA_CTX
+ &mov($B,&DWP(4,$tmp1));
+ &mov($C,&DWP(8,$tmp1));
+ &mov($D,&DWP(12,$tmp1));
+ # E is pre-loaded
+
+ for($i=0;$i<16;$i++) { &BODY_00_15($i,@V); unshift(@V,pop(@V)); }
+ for(;$i<20;$i++) { &BODY_16_19($i,@V); unshift(@V,pop(@V)); }
+ for(;$i<40;$i++) { &BODY_20_39($i,@V); unshift(@V,pop(@V)); }
+ for(;$i<60;$i++) { &BODY_40_59($i,@V); unshift(@V,pop(@V)); }
+ for(;$i<80;$i++) { &BODY_20_39($i,@V); unshift(@V,pop(@V)); }
+
+ (($V[5] eq $D) and ($V[0] eq $E)) or die; # double-check
+
+ &mov($tmp1,&wparam(0)); # re-load SHA_CTX*
+ &mov($D,&wparam(1)); # D is last "T" and is discarded
+
+ &add($E,&DWP(0,$tmp1)); # E is last "A"...
+ &add($T,&DWP(4,$tmp1));
+ &add($A,&DWP(8,$tmp1));
+ &add($B,&DWP(12,$tmp1));
+ &add($C,&DWP(16,$tmp1));
+
+ &mov(&DWP(0,$tmp1),$E); # update SHA_CTX
+ &add($D,64); # advance input pointer
+ &mov(&DWP(4,$tmp1),$T);
+ &cmp($D,&wparam(2)); # have we reached the end yet?
+ &mov(&DWP(8,$tmp1),$A);
+ &mov($E,$C); # C is last "E" which needs to be "pre-loaded"
+ &mov(&DWP(12,$tmp1),$B);
+ &mov($T,$D); # input pointer
+ &mov(&DWP(16,$tmp1),$C);
+ &jb(&label("loop"));
+
+ &stack_pop(16+3);
+&function_end("sha1_block_data_order");
+
+if ($xmm) {
+######################################################################
+# The SSSE3 implementation.
+#
+# %xmm[0-7] are used as ring @X[] buffer containing quadruples of last
+# 32 elements of the message schedule or Xupdate outputs. First 4
+# quadruples are simply byte-swapped input, next 4 are calculated
+# according to method originally suggested by Dean Gaudet (modulo
+# being implemented in SSSE3). Once 8 quadruples or 32 elements are
+# collected, it switches to routine proposed by Max Locktyukhin.
+#
+# Calculations inevitably require temporary reqisters, and there are
+# no %xmm registers left to spare. For this reason part of the ring
+# buffer, X[2..4] to be specific, is offloaded to 3 quadriples ring
+# buffer on the stack. Keep in mind that X[2] is alias X[-6], X[3] -
+# X[-5], and X[4] - X[-4]...
+#
+# Another notable optimization is aggressive stack frame compression
+# aiming to minimize amount of 9-byte instructions...
+#
+# Yet another notable optimization is "jumping" $B variable. It means
+# that there is no register permanently allocated for $B value. This
+# allowed to eliminate one instruction from body_20_39...
+#
+my $Xi=4; # 4xSIMD Xupdate round, start pre-seeded
+my @X=map("xmm$_",(4..7,0..3)); # pre-seeded for $Xi=4
+my @V=($A,$B,$C,$D,$E);
+my $j=0; # hash round
+my $rx=0;
+my @T=($T,$tmp1);
+my $inp;
+
+my $_rol=sub { &rol(@_) };
+my $_ror=sub { &ror(@_) };
+
+&function_begin("_sha1_block_data_order_ssse3");
+ &call (&label("pic_point")); # make it PIC!
+ &set_label("pic_point");
+ &blindpop($tmp1);
+ &lea ($tmp1,&DWP(&label("K_XX_XX")."-".&label("pic_point"),$tmp1));
+&set_label("ssse3_shortcut");
+
+ &movdqa (@X[3],&QWP(0,$tmp1)); # K_00_19
+ &movdqa (@X[4],&QWP(16,$tmp1)); # K_20_39
+ &movdqa (@X[5],&QWP(32,$tmp1)); # K_40_59
+ &movdqa (@X[6],&QWP(48,$tmp1)); # K_60_79
+ &movdqa (@X[2],&QWP(64,$tmp1)); # pbswap mask
+
+ &mov ($E,&wparam(0)); # load argument block
+ &mov ($inp=@T[1],&wparam(1));
+ &mov ($D,&wparam(2));
+ &mov (@T[0],"esp");
+
+ # stack frame layout
+ #
+ # +0 X[0]+K X[1]+K X[2]+K X[3]+K # XMM->IALU xfer area
+ # X[4]+K X[5]+K X[6]+K X[7]+K
+ # X[8]+K X[9]+K X[10]+K X[11]+K
+ # X[12]+K X[13]+K X[14]+K X[15]+K
+ #
+ # +64 X[0] X[1] X[2] X[3] # XMM->XMM backtrace area
+ # X[4] X[5] X[6] X[7]
+ # X[8] X[9] X[10] X[11] # even borrowed for K_00_19
+ #
+ # +112 K_20_39 K_20_39 K_20_39 K_20_39 # constants
+ # K_40_59 K_40_59 K_40_59 K_40_59
+ # K_60_79 K_60_79 K_60_79 K_60_79
+ # K_00_19 K_00_19 K_00_19 K_00_19
+ # pbswap mask
+ #
+ # +192 ctx # argument block
+ # +196 inp
+ # +200 end
+ # +204 esp
+ &sub ("esp",208);
+ &and ("esp",-64);
+
+ &movdqa (&QWP(112+0,"esp"),@X[4]); # copy constants
+ &movdqa (&QWP(112+16,"esp"),@X[5]);
+ &movdqa (&QWP(112+32,"esp"),@X[6]);
+ &shl ($D,6); # len*64
+ &movdqa (&QWP(112+48,"esp"),@X[3]);
+ &add ($D,$inp); # end of input
+ &movdqa (&QWP(112+64,"esp"),@X[2]);
+ &add ($inp,64);
+ &mov (&DWP(192+0,"esp"),$E); # save argument block
+ &mov (&DWP(192+4,"esp"),$inp);
+ &mov (&DWP(192+8,"esp"),$D);
+ &mov (&DWP(192+12,"esp"),@T[0]); # save original %esp
+
+ &mov ($A,&DWP(0,$E)); # load context
+ &mov ($B,&DWP(4,$E));
+ &mov ($C,&DWP(8,$E));
+ &mov ($D,&DWP(12,$E));
+ &mov ($E,&DWP(16,$E));
+ &mov (@T[0],$B); # magic seed
+
+ &movdqu (@X[-4&7],&QWP(-64,$inp)); # load input to %xmm[0-3]
+ &movdqu (@X[-3&7],&QWP(-48,$inp));
+ &movdqu (@X[-2&7],&QWP(-32,$inp));
+ &movdqu (@X[-1&7],&QWP(-16,$inp));
+ &pshufb (@X[-4&7],@X[2]); # byte swap
+ &pshufb (@X[-3&7],@X[2]);
+ &pshufb (@X[-2&7],@X[2]);
+ &movdqa (&QWP(112-16,"esp"),@X[3]); # borrow last backtrace slot
+ &pshufb (@X[-1&7],@X[2]);
+ &paddd (@X[-4&7],@X[3]); # add K_00_19
+ &paddd (@X[-3&7],@X[3]);
+ &paddd (@X[-2&7],@X[3]);
+ &movdqa (&QWP(0,"esp"),@X[-4&7]); # X[]+K xfer to IALU
+ &psubd (@X[-4&7],@X[3]); # restore X[]
+ &movdqa (&QWP(0+16,"esp"),@X[-3&7]);
+ &psubd (@X[-3&7],@X[3]);
+ &movdqa (&QWP(0+32,"esp"),@X[-2&7]);
+ &mov (@T[1],$C);
+ &psubd (@X[-2&7],@X[3]);
+ &xor (@T[1],$D);
+ &movdqa (@X[0],@X[-3&7]);
+ &and (@T[0],@T[1]);
+ &jmp (&label("loop"));
+
+######################################################################
+# SSE instruction sequence is first broken to groups of indepentent
+# instructions, independent in respect to their inputs and shifter
+# (not all architectures have more than one). Then IALU instructions
+# are "knitted in" between the SSE groups. Distance is maintained for
+# SSE latency of 2 in hope that it fits better upcoming AMD Bulldozer
+# [which allegedly also implements SSSE3]...
+#
+# Temporary registers usage. X[2] is volatile at the entry and at the
+# end is restored from backtrace ring buffer. X[3] is expected to
+# contain current K_XX_XX constant and is used to caclulate X[-1]+K
+# from previous round, it becomes volatile the moment the value is
+# saved to stack for transfer to IALU. X[4] becomes volatile whenever
+# X[-4] is accumulated and offloaded to backtrace ring buffer, at the
+# end it is loaded with next K_XX_XX [which becomes X[3] in next
+# round]...
+#
+sub Xupdate_ssse3_16_31() # recall that $Xi starts wtih 4
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 40 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &palignr(@X[0],@X[-4&7],8); # compose "X[-14]" in "X[0]"
+ &movdqa (@X[2],@X[-1&7]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &paddd (@X[3],@X[-1&7]);
+ &movdqa (&QWP(64+16*(($Xi-4)%3),"esp"),@X[-4&7]);# save X[] to backtrace buffer
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrldq (@X[2],4); # "X[-3]", 3 dwords
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor (@X[0],@X[-4&7]); # "X[0]"^="X[-16]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pxor (@X[2],@X[-2&7]); # "X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pxor (@X[0],@X[2]); # "X[0]"^="X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (&QWP(0+16*(($Xi-1)&3),"esp"),@X[3]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &movdqa (@X[4],@X[0]);
+ &movdqa (@X[2],@X[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pslldq (@X[4],12); # "X[0]"<<96, extract one dword
+ &paddd (@X[0],@X[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &psrld (@X[2],31);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (@X[3],@X[4]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &psrld (@X[4],30);
+ &por (@X[0],@X[2]); # "X[0]"<<<=1
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (@X[2],&QWP(64+16*(($Xi-6)%3),"esp")) if ($Xi>5); # restore X[] from backtrace buffer
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pslld (@X[3],2);
+ &pxor (@X[0],@X[4]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (@X[4],&QWP(112-16+16*(($Xi)/5),"esp")); # K_XX_XX
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pxor (@X[0],@X[3]); # "X[0]"^=("X[0]"<<96)<<<2
+ &movdqa (@X[1],@X[-2&7]) if ($Xi<7);
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ foreach (@insns) { eval; } # remaining instructions [if any]
+
+ $Xi++; push(@X,shift(@X)); # "rotate" X[]
+}
+
+sub Xupdate_ssse3_32_79()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 to 44 instructions
+ my ($a,$b,$c,$d,$e);
+
+ &movdqa (@X[2],@X[-1&7]) if ($Xi==8);
+ eval(shift(@insns)); # body_20_39
+ &pxor (@X[0],@X[-4&7]); # "X[0]"="X[-32]"^"X[-16]"
+ &palignr(@X[2],@X[-2&7],8); # compose "X[-6]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+
+ &pxor (@X[0],@X[-7&7]); # "X[0]"^="X[-28]"
+ &movdqa (&QWP(64+16*(($Xi-4)%3),"esp"),@X[-4&7]); # save X[] to backtrace buffer
+ eval(shift(@insns));
+ eval(shift(@insns));
+ if ($Xi%5) {
+ &movdqa (@X[4],@X[3]); # "perpetuate" K_XX_XX...
+ } else { # ... or load next one
+ &movdqa (@X[4],&QWP(112-16+16*($Xi/5),"esp"));
+ }
+ &paddd (@X[3],@X[-1&7]);
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &pxor (@X[0],@X[2]); # "X[0]"^="X[-6]"
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+
+ &movdqa (@X[2],@X[0]);
+ &movdqa (&QWP(0+16*(($Xi-1)&3),"esp"),@X[3]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &pslld (@X[0],2);
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ &psrld (@X[2],30);
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &por (@X[0],@X[2]); # "X[0]"<<<=2
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ &movdqa (@X[2],&QWP(64+16*(($Xi-6)%3),"esp")) if($Xi<19); # restore X[] from backtrace buffer
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ &movdqa (@X[3],@X[0]) if ($Xi<19);
+ eval(shift(@insns));
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ $Xi++; push(@X,shift(@X)); # "rotate" X[]
+}
+
+sub Xuplast_ssse3_80()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ &paddd (@X[3],@X[-1&7]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &movdqa (&QWP(0+16*(($Xi-1)&3),"esp"),@X[3]); # X[]+K xfer IALU
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ &mov ($inp=@T[1],&DWP(192+4,"esp"));
+ &cmp ($inp,&DWP(192+8,"esp"));
+ &je (&label("done"));
+
+ &movdqa (@X[3],&QWP(112+48,"esp")); # K_00_19
+ &movdqa (@X[2],&QWP(112+64,"esp")); # pbswap mask
+ &movdqu (@X[-4&7],&QWP(0,$inp)); # load input
+ &movdqu (@X[-3&7],&QWP(16,$inp));
+ &movdqu (@X[-2&7],&QWP(32,$inp));
+ &movdqu (@X[-1&7],&QWP(48,$inp));
+ &add ($inp,64);
+ &pshufb (@X[-4&7],@X[2]); # byte swap
+ &mov (&DWP(192+4,"esp"),$inp);
+ &movdqa (&QWP(112-16,"esp"),@X[3]); # borrow last backtrace slot
+
+ $Xi=0;
+}
+
+sub Xloop_ssse3()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pshufb (@X[($Xi-3)&7],@X[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &paddd (@X[($Xi-4)&7],@X[3]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (&QWP(0+16*$Xi,"esp"),@X[($Xi-4)&7]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psubd (@X[($Xi-4)&7],@X[3]);
+
+ foreach (@insns) { eval; }
+ $Xi++;
+}
+
+sub Xtail_ssse3()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ foreach (@insns) { eval; }
+}
+
+sub body_00_19 () { # ((c^d)&b)^d
+ # on start @T[0]=(c^d)&b
+ return &body_20_39() if ($rx==19); $rx++;
+ (
+ '($a,$b,$c,$d,$e)=@V;'.
+ '&$_ror ($b,$j?7:2);', # $b>>>2
+ '&xor (@T[0],$d);',
+ '&mov (@T[1],$a);', # $b in next round
+
+ '&add ($e,&DWP(4*($j&15),"esp"));', # X[]+K xfer
+ '&xor ($b,$c);', # $c^$d for next round
+
+ '&$_rol ($a,5);',
+ '&add ($e,@T[0]);',
+ '&and (@T[1],$b);', # ($b&($c^$d)) for next round
+
+ '&xor ($b,$c);', # restore $b
+ '&add ($e,$a);' .'$j++; unshift(@V,pop(@V)); unshift(@T,pop(@T));'
+ );
+}
+
+sub body_20_39 () { # b^d^c
+ # on entry @T[0]=b^d
+ return &body_40_59() if ($rx==39); $rx++;
+ (
+ '($a,$b,$c,$d,$e)=@V;'.
+ '&add ($e,&DWP(4*($j&15),"esp"));', # X[]+K xfer
+ '&xor (@T[0],$d) if($j==19);'.
+ '&xor (@T[0],$c) if($j> 19);', # ($b^$d^$c)
+ '&mov (@T[1],$a);', # $b in next round
+
+ '&$_rol ($a,5);',
+ '&add ($e,@T[0]);',
+ '&xor (@T[1],$c) if ($j< 79);', # $b^$d for next round
+
+ '&$_ror ($b,7);', # $b>>>2
+ '&add ($e,$a);' .'$j++; unshift(@V,pop(@V)); unshift(@T,pop(@T));'
+ );
+}
+
+sub body_40_59 () { # ((b^c)&(c^d))^c
+ # on entry @T[0]=(b^c), (c^=d)
+ $rx++;
+ (
+ '($a,$b,$c,$d,$e)=@V;'.
+ '&add ($e,&DWP(4*($j&15),"esp"));', # X[]+K xfer
+ '&and (@T[0],$c) if ($j>=40);', # (b^c)&(c^d)
+ '&xor ($c,$d) if ($j>=40);', # restore $c
+
+ '&$_ror ($b,7);', # $b>>>2
+ '&mov (@T[1],$a);', # $b for next round
+ '&xor (@T[0],$c);',
+
+ '&$_rol ($a,5);',
+ '&add ($e,@T[0]);',
+ '&xor (@T[1],$c) if ($j==59);'.
+ '&xor (@T[1],$b) if ($j< 59);', # b^c for next round
+
+ '&xor ($b,$c) if ($j< 59);', # c^d for next round
+ '&add ($e,$a);' .'$j++; unshift(@V,pop(@V)); unshift(@T,pop(@T));'
+ );
+}
+
+&set_label("loop",16);
+ &Xupdate_ssse3_16_31(\&body_00_19);
+ &Xupdate_ssse3_16_31(\&body_00_19);
+ &Xupdate_ssse3_16_31(\&body_00_19);
+ &Xupdate_ssse3_16_31(\&body_00_19);
+ &Xupdate_ssse3_32_79(\&body_00_19);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xuplast_ssse3_80(\&body_20_39); # can jump to "done"
+
+ $saved_j=$j; @saved_V=@V;
+
+ &Xloop_ssse3(\&body_20_39);
+ &Xloop_ssse3(\&body_20_39);
+ &Xloop_ssse3(\&body_20_39);
+
+ &mov (@T[1],&DWP(192,"esp")); # update context
+ &add ($A,&DWP(0,@T[1]));
+ &add (@T[0],&DWP(4,@T[1])); # $b
+ &add ($C,&DWP(8,@T[1]));
+ &mov (&DWP(0,@T[1]),$A);
+ &add ($D,&DWP(12,@T[1]));
+ &mov (&DWP(4,@T[1]),@T[0]);
+ &add ($E,&DWP(16,@T[1]));
+ &mov (&DWP(8,@T[1]),$C);
+ &mov ($B,$C);
+ &mov (&DWP(12,@T[1]),$D);
+ &xor ($B,$D);
+ &mov (&DWP(16,@T[1]),$E);
+ &and ($B,@T[0]);
+ &movdqa (@X[0],@X[-3&7]);
+ &xchg ($B,@T[0]);
+
+ &jmp (&label("loop"));
+
+&set_label("done",16); $j=$saved_j; @V=@saved_V;
+
+ &Xtail_ssse3(\&body_20_39);
+ &Xtail_ssse3(\&body_20_39);
+ &Xtail_ssse3(\&body_20_39);
+
+ &mov (@T[1],&DWP(192,"esp")); # update context
+ &add ($A,&DWP(0,@T[1]));
+ &mov ("esp",&DWP(192+12,"esp")); # restore %esp
+ &add (@T[0],&DWP(4,@T[1])); # $b
+ &add ($C,&DWP(8,@T[1]));
+ &mov (&DWP(0,@T[1]),$A);
+ &add ($D,&DWP(12,@T[1]));
+ &mov (&DWP(4,@T[1]),@T[0]);
+ &add ($E,&DWP(16,@T[1]));
+ &mov (&DWP(8,@T[1]),$C);
+ &mov (&DWP(12,@T[1]),$D);
+ &mov (&DWP(16,@T[1]),$E);
+
+&function_end("_sha1_block_data_order_ssse3");
+
+$rx=0; # reset
+
+if ($ymm) {
+my $Xi=4; # 4xSIMD Xupdate round, start pre-seeded
+my @X=map("xmm$_",(4..7,0..3)); # pre-seeded for $Xi=4
+my @V=($A,$B,$C,$D,$E);
+my $j=0; # hash round
+my @T=($T,$tmp1);
+my $inp;
+
+my $_rol=sub { &shld(@_[0],@_) };
+my $_ror=sub { &shrd(@_[0],@_) };
+
+&function_begin("_sha1_block_data_order_avx");
+ &call (&label("pic_point")); # make it PIC!
+ &set_label("pic_point");
+ &blindpop($tmp1);
+ &lea ($tmp1,&DWP(&label("K_XX_XX")."-".&label("pic_point"),$tmp1));
+&set_label("avx_shortcut");
+ &vzeroall();
+
+ &vmovdqa(@X[3],&QWP(0,$tmp1)); # K_00_19
+ &vmovdqa(@X[4],&QWP(16,$tmp1)); # K_20_39
+ &vmovdqa(@X[5],&QWP(32,$tmp1)); # K_40_59
+ &vmovdqa(@X[6],&QWP(48,$tmp1)); # K_60_79
+ &vmovdqa(@X[2],&QWP(64,$tmp1)); # pbswap mask
+
+ &mov ($E,&wparam(0)); # load argument block
+ &mov ($inp=@T[1],&wparam(1));
+ &mov ($D,&wparam(2));
+ &mov (@T[0],"esp");
+
+ # stack frame layout
+ #
+ # +0 X[0]+K X[1]+K X[2]+K X[3]+K # XMM->IALU xfer area
+ # X[4]+K X[5]+K X[6]+K X[7]+K
+ # X[8]+K X[9]+K X[10]+K X[11]+K
+ # X[12]+K X[13]+K X[14]+K X[15]+K
+ #
+ # +64 X[0] X[1] X[2] X[3] # XMM->XMM backtrace area
+ # X[4] X[5] X[6] X[7]
+ # X[8] X[9] X[10] X[11] # even borrowed for K_00_19
+ #
+ # +112 K_20_39 K_20_39 K_20_39 K_20_39 # constants
+ # K_40_59 K_40_59 K_40_59 K_40_59
+ # K_60_79 K_60_79 K_60_79 K_60_79
+ # K_00_19 K_00_19 K_00_19 K_00_19
+ # pbswap mask
+ #
+ # +192 ctx # argument block
+ # +196 inp
+ # +200 end
+ # +204 esp
+ &sub ("esp",208);
+ &and ("esp",-64);
+
+ &vmovdqa(&QWP(112+0,"esp"),@X[4]); # copy constants
+ &vmovdqa(&QWP(112+16,"esp"),@X[5]);
+ &vmovdqa(&QWP(112+32,"esp"),@X[6]);
+ &shl ($D,6); # len*64
+ &vmovdqa(&QWP(112+48,"esp"),@X[3]);
+ &add ($D,$inp); # end of input
+ &vmovdqa(&QWP(112+64,"esp"),@X[2]);
+ &add ($inp,64);
+ &mov (&DWP(192+0,"esp"),$E); # save argument block
+ &mov (&DWP(192+4,"esp"),$inp);
+ &mov (&DWP(192+8,"esp"),$D);
+ &mov (&DWP(192+12,"esp"),@T[0]); # save original %esp
+
+ &mov ($A,&DWP(0,$E)); # load context
+ &mov ($B,&DWP(4,$E));
+ &mov ($C,&DWP(8,$E));
+ &mov ($D,&DWP(12,$E));
+ &mov ($E,&DWP(16,$E));
+ &mov (@T[0],$B); # magic seed
+
+ &vmovdqu(@X[-4&7],&QWP(-64,$inp)); # load input to %xmm[0-3]
+ &vmovdqu(@X[-3&7],&QWP(-48,$inp));
+ &vmovdqu(@X[-2&7],&QWP(-32,$inp));
+ &vmovdqu(@X[-1&7],&QWP(-16,$inp));
+ &vpshufb(@X[-4&7],@X[-4&7],@X[2]); # byte swap
+ &vpshufb(@X[-3&7],@X[-3&7],@X[2]);
+ &vpshufb(@X[-2&7],@X[-2&7],@X[2]);
+ &vmovdqa(&QWP(112-16,"esp"),@X[3]); # borrow last backtrace slot
+ &vpshufb(@X[-1&7],@X[-1&7],@X[2]);
+ &vpaddd (@X[0],@X[-4&7],@X[3]); # add K_00_19
+ &vpaddd (@X[1],@X[-3&7],@X[3]);
+ &vpaddd (@X[2],@X[-2&7],@X[3]);
+ &vmovdqa(&QWP(0,"esp"),@X[0]); # X[]+K xfer to IALU
+ &mov (@T[1],$C);
+ &vmovdqa(&QWP(0+16,"esp"),@X[1]);
+ &xor (@T[1],$D);
+ &vmovdqa(&QWP(0+32,"esp"),@X[2]);
+ &and (@T[0],@T[1]);
+ &jmp (&label("loop"));
+
+sub Xupdate_avx_16_31() # recall that $Xi starts wtih 4
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 40 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpalignr(@X[0],@X[-3&7],@X[-4&7],8); # compose "X[-14]" in "X[0]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpaddd (@X[3],@X[3],@X[-1&7]);
+ &vmovdqa (&QWP(64+16*(($Xi-4)%3),"esp"),@X[-4&7]);# save X[] to backtrace buffer
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpsrldq(@X[2],@X[-1&7],4); # "X[-3]", 3 dwords
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor (@X[0],@X[0],@X[-4&7]); # "X[0]"^="X[-16]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[2],@X[2],@X[-2&7]); # "X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vmovdqa (&QWP(0+16*(($Xi-1)&3),"esp"),@X[3]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@X[2]); # "X[0]"^="X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpsrld (@X[2],@X[0],31);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpslldq(@X[4],@X[0],12); # "X[0]"<<96, extract one dword
+ &vpaddd (@X[0],@X[0],@X[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpsrld (@X[3],@X[4],30);
+ &vpor (@X[0],@X[0],@X[2]); # "X[0]"<<<=1
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpslld (@X[4],@X[4],2);
+ &vmovdqa (@X[2],&QWP(64+16*(($Xi-6)%3),"esp")) if ($Xi>5); # restore X[] from backtrace buffer
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor (@X[0],@X[0],@X[3]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@X[4]); # "X[0]"^=("X[0]"<<96)<<<2
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vmovdqa (@X[4],&QWP(112-16+16*(($Xi)/5),"esp")); # K_XX_XX
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ foreach (@insns) { eval; } # remaining instructions [if any]
+
+ $Xi++; push(@X,shift(@X)); # "rotate" X[]
+}
+
+sub Xupdate_avx_32_79()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 to 44 instructions
+ my ($a,$b,$c,$d,$e);
+
+ &vpalignr(@X[2],@X[-1&7],@X[-2&7],8); # compose "X[-6]"
+ &vpxor (@X[0],@X[0],@X[-4&7]); # "X[0]"="X[-32]"^"X[-16]"
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+
+ &vpxor (@X[0],@X[0],@X[-7&7]); # "X[0]"^="X[-28]"
+ &vmovdqa (&QWP(64+16*(($Xi-4)%3),"esp"),@X[-4&7]); # save X[] to backtrace buffer
+ eval(shift(@insns));
+ eval(shift(@insns));
+ if ($Xi%5) {
+ &vmovdqa (@X[4],@X[3]); # "perpetuate" K_XX_XX...
+ } else { # ... or load next one
+ &vmovdqa (@X[4],&QWP(112-16+16*($Xi/5),"esp"));
+ }
+ &vpaddd (@X[3],@X[3],@X[-1&7]);
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@X[2]); # "X[0]"^="X[-6]"
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+
+ &vpsrld (@X[2],@X[0],30);
+ &vmovdqa (&QWP(0+16*(($Xi-1)&3),"esp"),@X[3]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &vpslld (@X[0],@X[0],2);
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &vpor (@X[0],@X[0],@X[2]); # "X[0]"<<<=2
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ &vmovdqa (@X[2],&QWP(64+16*(($Xi-6)%3),"esp")) if($Xi<19); # restore X[] from backtrace buffer
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ $Xi++; push(@X,shift(@X)); # "rotate" X[]
+}
+
+sub Xuplast_avx_80()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ &vpaddd (@X[3],@X[3],@X[-1&7]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vmovdqa (&QWP(0+16*(($Xi-1)&3),"esp"),@X[3]); # X[]+K xfer IALU
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ &mov ($inp=@T[1],&DWP(192+4,"esp"));
+ &cmp ($inp,&DWP(192+8,"esp"));
+ &je (&label("done"));
+
+ &vmovdqa(@X[3],&QWP(112+48,"esp")); # K_00_19
+ &vmovdqa(@X[2],&QWP(112+64,"esp")); # pbswap mask
+ &vmovdqu(@X[-4&7],&QWP(0,$inp)); # load input
+ &vmovdqu(@X[-3&7],&QWP(16,$inp));
+ &vmovdqu(@X[-2&7],&QWP(32,$inp));
+ &vmovdqu(@X[-1&7],&QWP(48,$inp));
+ &add ($inp,64);
+ &vpshufb(@X[-4&7],@X[-4&7],@X[2]); # byte swap
+ &mov (&DWP(192+4,"esp"),$inp);
+ &vmovdqa(&QWP(112-16,"esp"),@X[3]); # borrow last backtrace slot
+
+ $Xi=0;
+}
+
+sub Xloop_avx()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpshufb (@X[($Xi-3)&7],@X[($Xi-3)&7],@X[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddd (@X[$Xi&7],@X[($Xi-4)&7],@X[3]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vmovdqa (&QWP(0+16*$Xi,"esp"),@X[$Xi&7]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ foreach (@insns) { eval; }
+ $Xi++;
+}
+
+sub Xtail_avx()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ foreach (@insns) { eval; }
+}
+
+&set_label("loop",16);
+ &Xupdate_avx_16_31(\&body_00_19);
+ &Xupdate_avx_16_31(\&body_00_19);
+ &Xupdate_avx_16_31(\&body_00_19);
+ &Xupdate_avx_16_31(\&body_00_19);
+ &Xupdate_avx_32_79(\&body_00_19);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xuplast_avx_80(\&body_20_39); # can jump to "done"
+
+ $saved_j=$j; @saved_V=@V;
+
+ &Xloop_avx(\&body_20_39);
+ &Xloop_avx(\&body_20_39);
+ &Xloop_avx(\&body_20_39);
+
+ &mov (@T[1],&DWP(192,"esp")); # update context
+ &add ($A,&DWP(0,@T[1]));
+ &add (@T[0],&DWP(4,@T[1])); # $b
+ &add ($C,&DWP(8,@T[1]));
+ &mov (&DWP(0,@T[1]),$A);
+ &add ($D,&DWP(12,@T[1]));
+ &mov (&DWP(4,@T[1]),@T[0]);
+ &add ($E,&DWP(16,@T[1]));
+ &mov ($B,$C);
+ &mov (&DWP(8,@T[1]),$C);
+ &xor ($B,$D);
+ &mov (&DWP(12,@T[1]),$D);
+ &and ($B,@T[0]);
+ &mov (&DWP(16,@T[1]),$E);
+ &xchg ($B,@T[0]);
+
+ &jmp (&label("loop"));
+
+&set_label("done",16); $j=$saved_j; @V=@saved_V;
+
+ &Xtail_avx(\&body_20_39);
+ &Xtail_avx(\&body_20_39);
+ &Xtail_avx(\&body_20_39);
+
+ &vzeroall();
+
+ &mov (@T[1],&DWP(192,"esp")); # update context
+ &add ($A,&DWP(0,@T[1]));
+ &mov ("esp",&DWP(192+12,"esp")); # restore %esp
+ &add (@T[0],&DWP(4,@T[1])); # $b
+ &add ($C,&DWP(8,@T[1]));
+ &mov (&DWP(0,@T[1]),$A);
+ &add ($D,&DWP(12,@T[1]));
+ &mov (&DWP(4,@T[1]),@T[0]);
+ &add ($E,&DWP(16,@T[1]));
+ &mov (&DWP(8,@T[1]),$C);
+ &mov (&DWP(12,@T[1]),$D);
+ &mov (&DWP(16,@T[1]),$E);
+&function_end("_sha1_block_data_order_avx");
+}
+&set_label("K_XX_XX",64);
+&data_word(0x5a827999,0x5a827999,0x5a827999,0x5a827999); # K_00_19
+&data_word(0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1); # K_20_39
+&data_word(0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc); # K_40_59
+&data_word(0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6); # K_60_79
+&data_word(0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f); # pbswap mask
+}
+&asciz("SHA1 block transform for x86, CRYPTOGAMS by <appro\@openssl.org>");
+
+&asm_finish();
diff --git a/devel/perlasm/sha1-ssse3-x86_64.pl b/devel/perlasm/sha1-ssse3-x86_64.pl
new file mode 100755
index 0000000000..2c89b1feea
--- /dev/null
+++ b/devel/perlasm/sha1-ssse3-x86_64.pl
@@ -0,0 +1,1815 @@
+#!/usr/bin/env perl
+#
+# ====================================================================
+# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
+# project. The module is, however, dual licensed under OpenSSL and
+# CRYPTOGAMS licenses depending on where you obtain it. For further
+# details see http://www.openssl.org/~appro/cryptogams/.
+# ====================================================================
+#
+# sha1_block procedure for x86_64.
+#
+# It was brought to my attention that on EM64T compiler-generated code
+# was far behind 32-bit assembler implementation. This is unlike on
+# Opteron where compiler-generated code was only 15% behind 32-bit
+# assembler, which originally made it hard to motivate the effort.
+# There was suggestion to mechanically translate 32-bit code, but I
+# dismissed it, reasoning that x86_64 offers enough register bank
+# capacity to fully utilize SHA-1 parallelism. Therefore this fresh
+# implementation:-) However! While 64-bit code does perform better
+# on Opteron, I failed to beat 32-bit assembler on EM64T core. Well,
+# x86_64 does offer larger *addressable* bank, but out-of-order core
+# reaches for even more registers through dynamic aliasing, and EM64T
+# core must have managed to run-time optimize even 32-bit code just as
+# good as 64-bit one. Performance improvement is summarized in the
+# following table:
+#
+# gcc 3.4 32-bit asm cycles/byte
+# Opteron +45% +20% 6.8
+# Xeon P4 +65% +0% 9.9
+# Core2 +60% +10% 7.0
+
+# August 2009.
+#
+# The code was revised to minimize code size and to maximize
+# "distance" between instructions producing input to 'lea'
+# instruction and the 'lea' instruction itself, which is essential
+# for Intel Atom core.
+
+# October 2010.
+#
+# Add SSSE3, Supplemental[!] SSE3, implementation. The idea behind it
+# is to offload message schedule denoted by Wt in NIST specification,
+# or Xupdate in OpenSSL source, to SIMD unit. See sha1-586.pl module
+# for background and implementation details. The only difference from
+# 32-bit code is that 64-bit code doesn't have to spill @X[] elements
+# to free temporary registers.
+
+# April 2011.
+#
+# Add AVX code path. See sha1-586.pl for further information.
+
+# May 2013.
+#
+# Add AVX2+BMI code path. Initial attempt (utilizing BMI instructions
+# and loading pair of consecutive blocks to 256-bit %ymm registers)
+# did not provide impressive performance improvement till a crucial
+# hint regarding the number of Xupdate iterations to pre-compute in
+# advance was provided by Ilya Albrekht of Intel Corp.
+
+######################################################################
+# Current performance is summarized in following table. Numbers are
+# CPU clock cycles spent to process single byte (less is better).
+#
+# x86_64 SSSE3 AVX[2]
+# P4 9.8 -
+# Opteron 6.65 -
+# Core2 6.70 6.05/+11% -
+# Westmere 7.08 5.44/+30% -
+# Sandy Bridge 7.93 6.16/+28% 4.99/+59%
+# Ivy Bridge 6.30 4.63/+36% 4.60/+37%
+# Haswell 5.98 4.36/+37% 3.57/+67%
+# Bulldozer 10.9 5.95/+82%
+# VIA Nano 10.2 7.46/+37%
+# Atom 11.0 9.61/+14%
+
+$flavour = shift;
+$output = shift;
+if ($flavour =~ /\./) { $output = $flavour; undef $flavour; }
+
+$win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
+
+$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
+( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
+( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
+die "can't locate x86_64-xlate.pl";
+
+if (`$ENV{CC} -Wa,-v -c -o /dev/null -x assembler /dev/null 2>&1`
+ =~ /GNU assembler version ([2-9]\.[0-9]+)/) {
+ $avx = ($1>=2.19) + ($1>=2.22);
+}
+
+if (!$avx && $win64 && ($flavour =~ /nasm/ || $ENV{ASM} =~ /nasm/) &&
+ `nasm -v 2>&1` =~ /NASM version ([2-9]\.[0-9]+)/) {
+ $avx = ($1>=2.09) + ($1>=2.10);
+}
+
+if (!$avx && $win64 && ($flavour =~ /masm/ || $ENV{ASM} =~ /ml64/) &&
+ `ml64 2>&1` =~ /Version ([0-9]+)\./) {
+ $avx = ($1>=10) + ($1>=11);
+}
+
+open OUT,"| \"$^X\" $xlate $flavour $output";
+*STDOUT=*OUT;
+
+$ctx="%rdi"; # 1st arg
+$inp="%rsi"; # 2nd arg
+$num="%rdx"; # 3rd arg
+
+# reassign arguments in order to produce more compact code
+$ctx="%r8";
+$inp="%r9";
+$num="%r10";
+
+$t0="%eax";
+$t1="%ebx";
+$t2="%ecx";
+@xi=("%edx","%ebp");
+$A="%esi";
+$B="%edi";
+$C="%r11d";
+$D="%r12d";
+$E="%r13d";
+
+@V=($A,$B,$C,$D,$E);
+
+sub BODY_00_19 {
+my ($i,$a,$b,$c,$d,$e)=@_;
+my $j=$i+1;
+$code.=<<___ if ($i==0);
+ mov `4*$i`($inp),$xi[0]
+ bswap $xi[0]
+ mov $xi[0],`4*$i`(%rsp)
+___
+$code.=<<___ if ($i<15);
+ mov $c,$t0
+ mov `4*$j`($inp),$xi[1]
+ mov $a,$t2
+ xor $d,$t0
+ bswap $xi[1]
+ rol \$5,$t2
+ lea 0x5a827999($xi[0],$e),$e
+ and $b,$t0
+ mov $xi[1],`4*$j`(%rsp)
+ add $t2,$e
+ xor $d,$t0
+ rol \$30,$b
+ add $t0,$e
+___
+$code.=<<___ if ($i>=15);
+ mov `4*($j%16)`(%rsp),$xi[1]
+ mov $c,$t0
+ mov $a,$t2
+ xor `4*(($j+2)%16)`(%rsp),$xi[1]
+ xor $d,$t0
+ rol \$5,$t2
+ xor `4*(($j+8)%16)`(%rsp),$xi[1]
+ and $b,$t0
+ lea 0x5a827999($xi[0],$e),$e
+ xor `4*(($j+13)%16)`(%rsp),$xi[1]
+ xor $d,$t0
+ rol \$1,$xi[1]
+ add $t2,$e
+ rol \$30,$b
+ mov $xi[1],`4*($j%16)`(%rsp)
+ add $t0,$e
+___
+unshift(@xi,pop(@xi));
+}
+
+sub BODY_20_39 {
+my ($i,$a,$b,$c,$d,$e)=@_;
+my $j=$i+1;
+my $K=($i<40)?0x6ed9eba1:0xca62c1d6;
+$code.=<<___ if ($i<79);
+ mov `4*($j%16)`(%rsp),$xi[1]
+ mov $c,$t0
+ mov $a,$t2
+ xor `4*(($j+2)%16)`(%rsp),$xi[1]
+ xor $b,$t0
+ rol \$5,$t2
+ lea $K($xi[0],$e),$e
+ xor `4*(($j+8)%16)`(%rsp),$xi[1]
+ xor $d,$t0
+ add $t2,$e
+ xor `4*(($j+13)%16)`(%rsp),$xi[1]
+ rol \$30,$b
+ add $t0,$e
+ rol \$1,$xi[1]
+___
+$code.=<<___ if ($i<76);
+ mov $xi[1],`4*($j%16)`(%rsp)
+___
+$code.=<<___ if ($i==79);
+ mov $c,$t0
+ mov $a,$t2
+ xor $b,$t0
+ lea $K($xi[0],$e),$e
+ rol \$5,$t2
+ xor $d,$t0
+ add $t2,$e
+ rol \$30,$b
+ add $t0,$e
+___
+unshift(@xi,pop(@xi));
+}
+
+sub BODY_40_59 {
+my ($i,$a,$b,$c,$d,$e)=@_;
+my $j=$i+1;
+$code.=<<___;
+ mov `4*($j%16)`(%rsp),$xi[1]
+ mov $c,$t0
+ mov $c,$t1
+ xor `4*(($j+2)%16)`(%rsp),$xi[1]
+ and $d,$t0
+ mov $a,$t2
+ xor `4*(($j+8)%16)`(%rsp),$xi[1]
+ xor $d,$t1
+ lea 0x8f1bbcdc($xi[0],$e),$e
+ rol \$5,$t2
+ xor `4*(($j+13)%16)`(%rsp),$xi[1]
+ add $t0,$e
+ and $b,$t1
+ rol \$1,$xi[1]
+ add $t1,$e
+ rol \$30,$b
+ mov $xi[1],`4*($j%16)`(%rsp)
+ add $t2,$e
+___
+unshift(@xi,pop(@xi));
+}
+
+$code.=<<___;
+.text
+.extern OPENSSL_ia32cap_P
+
+.globl sha1_block_data_order
+.type sha1_block_data_order,\@function,3
+.align 16
+sha1_block_data_order:
+ mov OPENSSL_ia32cap_P+0(%rip),%r9d
+ mov OPENSSL_ia32cap_P+4(%rip),%r8d
+ mov OPENSSL_ia32cap_P+8(%rip),%r10d
+ test \$`1<<9`,%r8d # check SSSE3 bit
+ jz .Lialu
+___
+$code.=<<___ if ($avx>1);
+ and \$`1<<3|1<<5|1<<8`,%r10d # check AVX2+BMI1+BMI2
+ cmp \$`1<<3|1<<5|1<<8`,%r10d
+ je _avx2_shortcut
+___
+$code.=<<___ if ($avx);
+ and \$`1<<28`,%r8d # mask AVX bit
+ and \$`1<<30`,%r9d # mask "Intel CPU" bit
+ or %r9d,%r8d
+ cmp \$`1<<28|1<<30`,%r8d
+ je _avx_shortcut
+___
+$code.=<<___;
+ jmp _ssse3_shortcut
+
+.align 16
+.Lialu:
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ mov %rsp,%r11
+ mov %rdi,$ctx # reassigned argument
+ sub \$`8+16*4`,%rsp
+ mov %rsi,$inp # reassigned argument
+ and \$-64,%rsp
+ mov %rdx,$num # reassigned argument
+ mov %r11,`16*4`(%rsp)
+.Lprologue:
+
+ mov 0($ctx),$A
+ mov 4($ctx),$B
+ mov 8($ctx),$C
+ mov 12($ctx),$D
+ mov 16($ctx),$E
+ jmp .Lloop
+
+.align 16
+.Lloop:
+___
+for($i=0;$i<20;$i++) { &BODY_00_19($i,@V); unshift(@V,pop(@V)); }
+for(;$i<40;$i++) { &BODY_20_39($i,@V); unshift(@V,pop(@V)); }
+for(;$i<60;$i++) { &BODY_40_59($i,@V); unshift(@V,pop(@V)); }
+for(;$i<80;$i++) { &BODY_20_39($i,@V); unshift(@V,pop(@V)); }
+$code.=<<___;
+ add 0($ctx),$A
+ add 4($ctx),$B
+ add 8($ctx),$C
+ add 12($ctx),$D
+ add 16($ctx),$E
+ mov $A,0($ctx)
+ mov $B,4($ctx)
+ mov $C,8($ctx)
+ mov $D,12($ctx)
+ mov $E,16($ctx)
+
+ sub \$1,$num
+ lea `16*4`($inp),$inp
+ jnz .Lloop
+
+ mov `16*4`(%rsp),%rsi
+ mov (%rsi),%r13
+ mov 8(%rsi),%r12
+ mov 16(%rsi),%rbp
+ mov 24(%rsi),%rbx
+ lea 32(%rsi),%rsp
+.Lepilogue:
+ ret
+.size sha1_block_data_order,.-sha1_block_data_order
+___
+{{{
+my $Xi=4;
+my @X=map("%xmm$_",(4..7,0..3));
+my @Tx=map("%xmm$_",(8..10));
+my $Kx="%xmm11";
+my @V=($A,$B,$C,$D,$E)=("%eax","%ebx","%ecx","%edx","%ebp"); # size optimization
+my @T=("%esi","%edi");
+my $j=0;
+my $rx=0;
+my $K_XX_XX="%r11";
+
+my $_rol=sub { &rol(@_) };
+my $_ror=sub { &ror(@_) };
+
+{ my $sn;
+sub align32() {
+ ++$sn;
+$code.=<<___;
+ jmp .Lalign32_$sn # see "Decoded ICache" in manual
+.align 32
+.Lalign32_$sn:
+___
+}
+}
+
+$code.=<<___;
+.type sha1_block_data_order_ssse3,\@function,3
+.align 16
+sha1_block_data_order_ssse3:
+_ssse3_shortcut:
+ push %rbx
+ push %rbp
+ push %r12
+ lea `-64-($win64?6*16:0)`(%rsp),%rsp
+___
+$code.=<<___ if ($win64);
+ movaps %xmm6,64+0(%rsp)
+ movaps %xmm7,64+16(%rsp)
+ movaps %xmm8,64+32(%rsp)
+ movaps %xmm9,64+48(%rsp)
+ movaps %xmm10,64+64(%rsp)
+ movaps %xmm11,64+80(%rsp)
+.Lprologue_ssse3:
+___
+$code.=<<___;
+ mov %rdi,$ctx # reassigned argument
+ mov %rsi,$inp # reassigned argument
+ mov %rdx,$num # reassigned argument
+
+ shl \$6,$num
+ add $inp,$num
+ lea K_XX_XX+64(%rip),$K_XX_XX
+
+ mov 0($ctx),$A # load context
+ mov 4($ctx),$B
+ mov 8($ctx),$C
+ mov 12($ctx),$D
+ mov $B,@T[0] # magic seed
+ mov 16($ctx),$E
+ mov $C,@T[1]
+ xor $D,@T[1]
+ and @T[1],@T[0]
+
+ movdqa 64($K_XX_XX),@X[2] # pbswap mask
+ movdqa -64($K_XX_XX),@Tx[1] # K_00_19
+ movdqu 0($inp),@X[-4&7] # load input to %xmm[0-3]
+ movdqu 16($inp),@X[-3&7]
+ movdqu 32($inp),@X[-2&7]
+ movdqu 48($inp),@X[-1&7]
+ pshufb @X[2],@X[-4&7] # byte swap
+ add \$64,$inp
+ pshufb @X[2],@X[-3&7]
+ pshufb @X[2],@X[-2&7]
+ pshufb @X[2],@X[-1&7]
+ paddd @Tx[1],@X[-4&7] # add K_00_19
+ paddd @Tx[1],@X[-3&7]
+ paddd @Tx[1],@X[-2&7]
+ movdqa @X[-4&7],0(%rsp) # X[]+K xfer to IALU
+ psubd @Tx[1],@X[-4&7] # restore X[]
+ movdqa @X[-3&7],16(%rsp)
+ psubd @Tx[1],@X[-3&7]
+ movdqa @X[-2&7],32(%rsp)
+ psubd @Tx[1],@X[-2&7]
+ jmp .Loop_ssse3
+___
+
+sub AUTOLOAD() # thunk [simplified] 32-bit style perlasm
+{ my $opcode = $AUTOLOAD; $opcode =~ s/.*:://;
+ my $arg = pop;
+ $arg = "\$$arg" if ($arg*1 eq $arg);
+ $code .= "\t$opcode\t".join(',',$arg,reverse @_)."\n";
+}
+
+sub Xupdate_ssse3_16_31() # recall that $Xi starts wtih 4
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 40 instructions
+ my ($a,$b,$c,$d,$e);
+
+ &movdqa (@X[0],@X[-3&7]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (@Tx[0],@X[-1&7]);
+ &palignr(@X[0],@X[-4&7],8); # compose "X[-14]" in "X[0]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &paddd (@Tx[1],@X[-1&7]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrldq (@Tx[0],4); # "X[-3]", 3 dwords
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor (@X[0],@X[-4&7]); # "X[0]"^="X[-16]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pxor (@Tx[0],@X[-2&7]); # "X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pxor (@X[0],@Tx[0]); # "X[0]"^="X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (eval(16*(($Xi-1)&3))."(%rsp)",@Tx[1]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &movdqa (@Tx[2],@X[0]);
+ &movdqa (@Tx[0],@X[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pslldq (@Tx[2],12); # "X[0]"<<96, extract one dword
+ &paddd (@X[0],@X[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &psrld (@Tx[0],31);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (@Tx[1],@Tx[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &psrld (@Tx[2],30);
+ &por (@X[0],@Tx[0]); # "X[0]"<<<=1
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pslld (@Tx[1],2);
+ &pxor (@X[0],@Tx[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (@Tx[2],eval(2*16*(($Xi)/5)-64)."($K_XX_XX)"); # K_XX_XX
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &pxor (@X[0],@Tx[1]); # "X[0]"^=("X[0]">>96)<<<2
+
+ foreach (@insns) { eval; } # remaining instructions [if any]
+
+ $Xi++; push(@X,shift(@X)); # "rotate" X[]
+ push(@Tx,shift(@Tx));
+}
+
+sub Xupdate_ssse3_32_79()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 to 44 instructions
+ my ($a,$b,$c,$d,$e);
+
+ &movdqa (@Tx[0],@X[-1&7]) if ($Xi==8);
+ eval(shift(@insns)); # body_20_39
+ &pxor (@X[0],@X[-4&7]); # "X[0]"="X[-32]"^"X[-16]"
+ &palignr(@Tx[0],@X[-2&7],8); # compose "X[-6]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+
+ &pxor (@X[0],@X[-7&7]); # "X[0]"^="X[-28]"
+ eval(shift(@insns));
+ eval(shift(@insns)) if (@insns[0] !~ /&ro[rl]/);
+ if ($Xi%5) {
+ &movdqa (@Tx[2],@Tx[1]);# "perpetuate" K_XX_XX...
+ } else { # ... or load next one
+ &movdqa (@Tx[2],eval(2*16*($Xi/5)-64)."($K_XX_XX)");
+ }
+ &paddd (@Tx[1],@X[-1&7]);
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &pxor (@X[0],@Tx[0]); # "X[0]"^="X[-6]"
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+
+ &movdqa (@Tx[0],@X[0]);
+ &movdqa (eval(16*(($Xi-1)&3))."(%rsp)",@Tx[1]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &pslld (@X[0],2);
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ &psrld (@Tx[0],30);
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &por (@X[0],@Tx[0]); # "X[0]"<<<=2
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ &movdqa (@Tx[1],@X[0]) if ($Xi<19);
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ $Xi++; push(@X,shift(@X)); # "rotate" X[]
+ push(@Tx,shift(@Tx));
+}
+
+sub Xuplast_ssse3_80()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ &paddd (@Tx[1],@X[-1&7]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &movdqa (eval(16*(($Xi-1)&3))."(%rsp)",@Tx[1]); # X[]+K xfer IALU
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ &cmp ($inp,$num);
+ &je (".Ldone_ssse3");
+
+ unshift(@Tx,pop(@Tx));
+
+ &movdqa (@X[2],"64($K_XX_XX)"); # pbswap mask
+ &movdqa (@Tx[1],"-64($K_XX_XX)"); # K_00_19
+ &movdqu (@X[-4&7],"0($inp)"); # load input
+ &movdqu (@X[-3&7],"16($inp)");
+ &movdqu (@X[-2&7],"32($inp)");
+ &movdqu (@X[-1&7],"48($inp)");
+ &pshufb (@X[-4&7],@X[2]); # byte swap
+ &add ($inp,64);
+
+ $Xi=0;
+}
+
+sub Xloop_ssse3()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pshufb (@X[($Xi-3)&7],@X[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &paddd (@X[($Xi-4)&7],@Tx[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa (eval(16*$Xi)."(%rsp)",@X[($Xi-4)&7]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psubd (@X[($Xi-4)&7],@Tx[1]);
+
+ foreach (@insns) { eval; }
+ $Xi++;
+}
+
+sub Xtail_ssse3()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ foreach (@insns) { eval; }
+}
+
+sub body_00_19 () { # ((c^d)&b)^d
+ # on start @T[0]=(c^d)&b
+ return &body_20_39() if ($rx==19); $rx++;
+ (
+ '($a,$b,$c,$d,$e)=@V;'.
+ '&$_ror ($b,$j?7:2)', # $b>>>2
+ '&xor (@T[0],$d)',
+ '&mov (@T[1],$a)', # $b for next round
+
+ '&add ($e,eval(4*($j&15))."(%rsp)")', # X[]+K xfer
+ '&xor ($b,$c)', # $c^$d for next round
+
+ '&$_rol ($a,5)',
+ '&add ($e,@T[0])',
+ '&and (@T[1],$b)', # ($b&($c^$d)) for next round
+
+ '&xor ($b,$c)', # restore $b
+ '&add ($e,$a);' .'$j++; unshift(@V,pop(@V)); unshift(@T,pop(@T));'
+ );
+}
+
+sub body_20_39 () { # b^d^c
+ # on entry @T[0]=b^d
+ return &body_40_59() if ($rx==39); $rx++;
+ (
+ '($a,$b,$c,$d,$e)=@V;'.
+ '&add ($e,eval(4*($j&15))."(%rsp)")', # X[]+K xfer
+ '&xor (@T[0],$d) if($j==19);'.
+ '&xor (@T[0],$c) if($j> 19)', # ($b^$d^$c)
+ '&mov (@T[1],$a)', # $b for next round
+
+ '&$_rol ($a,5)',
+ '&add ($e,@T[0])',
+ '&xor (@T[1],$c) if ($j< 79)', # $b^$d for next round
+
+ '&$_ror ($b,7)', # $b>>>2
+ '&add ($e,$a);' .'$j++; unshift(@V,pop(@V)); unshift(@T,pop(@T));'
+ );
+}
+
+sub body_40_59 () { # ((b^c)&(c^d))^c
+ # on entry @T[0]=(b^c), (c^=d)
+ $rx++;
+ (
+ '($a,$b,$c,$d,$e)=@V;'.
+ '&add ($e,eval(4*($j&15))."(%rsp)")', # X[]+K xfer
+ '&and (@T[0],$c) if ($j>=40)', # (b^c)&(c^d)
+ '&xor ($c,$d) if ($j>=40)', # restore $c
+
+ '&$_ror ($b,7)', # $b>>>2
+ '&mov (@T[1],$a)', # $b for next round
+ '&xor (@T[0],$c)',
+
+ '&$_rol ($a,5)',
+ '&add ($e,@T[0])',
+ '&xor (@T[1],$c) if ($j==59);'.
+ '&xor (@T[1],$b) if ($j< 59)', # b^c for next round
+
+ '&xor ($b,$c) if ($j< 59)', # c^d for next round
+ '&add ($e,$a);' .'$j++; unshift(@V,pop(@V)); unshift(@T,pop(@T));'
+ );
+}
+$code.=<<___;
+.align 16
+.Loop_ssse3:
+___
+ &Xupdate_ssse3_16_31(\&body_00_19);
+ &Xupdate_ssse3_16_31(\&body_00_19);
+ &Xupdate_ssse3_16_31(\&body_00_19);
+ &Xupdate_ssse3_16_31(\&body_00_19);
+ &Xupdate_ssse3_32_79(\&body_00_19);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_40_59);
+ &Xupdate_ssse3_32_79(\&body_20_39);
+ &Xuplast_ssse3_80(\&body_20_39); # can jump to "done"
+
+ $saved_j=$j; @saved_V=@V;
+
+ &Xloop_ssse3(\&body_20_39);
+ &Xloop_ssse3(\&body_20_39);
+ &Xloop_ssse3(\&body_20_39);
+
+$code.=<<___;
+ add 0($ctx),$A # update context
+ add 4($ctx),@T[0]
+ add 8($ctx),$C
+ add 12($ctx),$D
+ mov $A,0($ctx)
+ add 16($ctx),$E
+ mov @T[0],4($ctx)
+ mov @T[0],$B # magic seed
+ mov $C,8($ctx)
+ mov $C,@T[1]
+ mov $D,12($ctx)
+ xor $D,@T[1]
+ mov $E,16($ctx)
+ and @T[1],@T[0]
+ jmp .Loop_ssse3
+
+.align 16
+.Ldone_ssse3:
+___
+ $j=$saved_j; @V=@saved_V;
+
+ &Xtail_ssse3(\&body_20_39);
+ &Xtail_ssse3(\&body_20_39);
+ &Xtail_ssse3(\&body_20_39);
+
+$code.=<<___;
+ add 0($ctx),$A # update context
+ add 4($ctx),@T[0]
+ add 8($ctx),$C
+ mov $A,0($ctx)
+ add 12($ctx),$D
+ mov @T[0],4($ctx)
+ add 16($ctx),$E
+ mov $C,8($ctx)
+ mov $D,12($ctx)
+ mov $E,16($ctx)
+___
+$code.=<<___ if ($win64);
+ movaps 64+0(%rsp),%xmm6
+ movaps 64+16(%rsp),%xmm7
+ movaps 64+32(%rsp),%xmm8
+ movaps 64+48(%rsp),%xmm9
+ movaps 64+64(%rsp),%xmm10
+ movaps 64+80(%rsp),%xmm11
+___
+$code.=<<___;
+ lea `64+($win64?6*16:0)`(%rsp),%rsi
+ mov 0(%rsi),%r12
+ mov 8(%rsi),%rbp
+ mov 16(%rsi),%rbx
+ lea 24(%rsi),%rsp
+.Lepilogue_ssse3:
+ ret
+.size sha1_block_data_order_ssse3,.-sha1_block_data_order_ssse3
+___
+
+if ($avx) {
+$Xi=4; # reset variables
+@X=map("%xmm$_",(4..7,0..3));
+@Tx=map("%xmm$_",(8..10));
+$j=0;
+$rx=0;
+
+my $done_avx_label=".Ldone_avx";
+
+my $_rol=sub { &shld(@_[0],@_) };
+my $_ror=sub { &shrd(@_[0],@_) };
+
+$code.=<<___;
+.type sha1_block_data_order_avx,\@function,3
+.align 16
+sha1_block_data_order_avx:
+_avx_shortcut:
+ push %rbx
+ push %rbp
+ push %r12
+ lea `-64-($win64?6*16:0)`(%rsp),%rsp
+___
+$code.=<<___ if ($win64);
+ movaps %xmm6,64+0(%rsp)
+ movaps %xmm7,64+16(%rsp)
+ movaps %xmm8,64+32(%rsp)
+ movaps %xmm9,64+48(%rsp)
+ movaps %xmm10,64+64(%rsp)
+ movaps %xmm11,64+80(%rsp)
+.Lprologue_avx:
+___
+$code.=<<___;
+ mov %rdi,$ctx # reassigned argument
+ mov %rsi,$inp # reassigned argument
+ mov %rdx,$num # reassigned argument
+ vzeroupper
+
+ shl \$6,$num
+ add $inp,$num
+ lea K_XX_XX+64(%rip),$K_XX_XX
+
+ mov 0($ctx),$A # load context
+ mov 4($ctx),$B
+ mov 8($ctx),$C
+ mov 12($ctx),$D
+ mov $B,@T[0] # magic seed
+ mov 16($ctx),$E
+ mov $C,@T[1]
+ xor $D,@T[1]
+ and @T[1],@T[0]
+
+ vmovdqa 64($K_XX_XX),@X[2] # pbswap mask
+ vmovdqa -64($K_XX_XX),$Kx # K_00_19
+ vmovdqu 0($inp),@X[-4&7] # load input to %xmm[0-3]
+ vmovdqu 16($inp),@X[-3&7]
+ vmovdqu 32($inp),@X[-2&7]
+ vmovdqu 48($inp),@X[-1&7]
+ vpshufb @X[2],@X[-4&7],@X[-4&7] # byte swap
+ add \$64,$inp
+ vpshufb @X[2],@X[-3&7],@X[-3&7]
+ vpshufb @X[2],@X[-2&7],@X[-2&7]
+ vpshufb @X[2],@X[-1&7],@X[-1&7]
+ vpaddd $Kx,@X[-4&7],@X[0] # add K_00_19
+ vpaddd $Kx,@X[-3&7],@X[1]
+ vpaddd $Kx,@X[-2&7],@X[2]
+ vmovdqa @X[0],0(%rsp) # X[]+K xfer to IALU
+ vmovdqa @X[1],16(%rsp)
+ vmovdqa @X[2],32(%rsp)
+ jmp .Loop_avx
+___
+
+sub Xupdate_avx_16_31() # recall that $Xi starts wtih 4
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 40 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpalignr(@X[0],@X[-3&7],@X[-4&7],8); # compose "X[-14]" in "X[0]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpaddd (@Tx[1],$Kx,@X[-1&7]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpsrldq(@Tx[0],@X[-1&7],4); # "X[-3]", 3 dwords
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor (@X[0],@X[0],@X[-4&7]); # "X[0]"^="X[-16]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@Tx[0],@Tx[0],@X[-2&7]); # "X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@Tx[0]); # "X[0]"^="X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vmovdqa (eval(16*(($Xi-1)&3))."(%rsp)",@Tx[1]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpsrld (@Tx[0],@X[0],31);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpslldq(@Tx[2],@X[0],12); # "X[0]"<<96, extract one dword
+ &vpaddd (@X[0],@X[0],@X[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpsrld (@Tx[1],@Tx[2],30);
+ &vpor (@X[0],@X[0],@Tx[0]); # "X[0]"<<<=1
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpslld (@Tx[2],@Tx[2],2);
+ &vpxor (@X[0],@X[0],@Tx[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@Tx[2]); # "X[0]"^=("X[0]">>96)<<<2
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vmovdqa ($Kx,eval(2*16*(($Xi)/5)-64)."($K_XX_XX)") if ($Xi%5==0); # K_XX_XX
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+
+ foreach (@insns) { eval; } # remaining instructions [if any]
+
+ $Xi++; push(@X,shift(@X)); # "rotate" X[]
+}
+
+sub Xupdate_avx_32_79()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 to 44 instructions
+ my ($a,$b,$c,$d,$e);
+
+ &vpalignr(@Tx[0],@X[-1&7],@X[-2&7],8); # compose "X[-6]"
+ &vpxor (@X[0],@X[0],@X[-4&7]); # "X[0]"="X[-32]"^"X[-16]"
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+
+ &vpxor (@X[0],@X[0],@X[-7&7]); # "X[0]"^="X[-28]"
+ eval(shift(@insns));
+ eval(shift(@insns)) if (@insns[0] !~ /&ro[rl]/);
+ &vpaddd (@Tx[1],$Kx,@X[-1&7]);
+ &vmovdqa ($Kx,eval(2*16*($Xi/5)-64)."($K_XX_XX)") if ($Xi%5==0);
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@Tx[0]); # "X[0]"^="X[-6]"
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+
+ &vpsrld (@Tx[0],@X[0],30);
+ &vmovdqa (eval(16*(($Xi-1)&3))."(%rsp)",@Tx[1]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &vpslld (@X[0],@X[0],2);
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # ror
+ eval(shift(@insns));
+
+ &vpor (@X[0],@X[0],@Tx[0]); # "X[0]"<<<=2
+ eval(shift(@insns)); # body_20_39
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # rol
+ eval(shift(@insns));
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ $Xi++; push(@X,shift(@X)); # "rotate" X[]
+}
+
+sub Xuplast_avx_80()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ &vpaddd (@Tx[1],$Kx,@X[-1&7]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vmovdqa (eval(16*(($Xi-1)&3))."(%rsp)",@Tx[1]); # X[]+K xfer IALU
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ &cmp ($inp,$num);
+ &je ($done_avx_label);
+
+ &vmovdqa(@X[2],"64($K_XX_XX)"); # pbswap mask
+ &vmovdqa($Kx,"-64($K_XX_XX)"); # K_00_19
+ &vmovdqu(@X[-4&7],"0($inp)"); # load input
+ &vmovdqu(@X[-3&7],"16($inp)");
+ &vmovdqu(@X[-2&7],"32($inp)");
+ &vmovdqu(@X[-1&7],"48($inp)");
+ &vpshufb(@X[-4&7],@X[-4&7],@X[2]); # byte swap
+ &add ($inp,64);
+
+ $Xi=0;
+}
+
+sub Xloop_avx()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpshufb(@X[($Xi-3)&7],@X[($Xi-3)&7],@X[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddd (@X[$Xi&7],@X[($Xi-4)&7],$Kx);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vmovdqa(eval(16*$Xi)."(%rsp)",@X[$Xi&7]); # X[]+K xfer to IALU
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ foreach (@insns) { eval; }
+ $Xi++;
+}
+
+sub Xtail_avx()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ foreach (@insns) { eval; }
+}
+
+$code.=<<___;
+.align 16
+.Loop_avx:
+___
+ &Xupdate_avx_16_31(\&body_00_19);
+ &Xupdate_avx_16_31(\&body_00_19);
+ &Xupdate_avx_16_31(\&body_00_19);
+ &Xupdate_avx_16_31(\&body_00_19);
+ &Xupdate_avx_32_79(\&body_00_19);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_40_59);
+ &Xupdate_avx_32_79(\&body_20_39);
+ &Xuplast_avx_80(\&body_20_39); # can jump to "done"
+
+ $saved_j=$j; @saved_V=@V;
+
+ &Xloop_avx(\&body_20_39);
+ &Xloop_avx(\&body_20_39);
+ &Xloop_avx(\&body_20_39);
+
+$code.=<<___;
+ add 0($ctx),$A # update context
+ add 4($ctx),@T[0]
+ add 8($ctx),$C
+ add 12($ctx),$D
+ mov $A,0($ctx)
+ add 16($ctx),$E
+ mov @T[0],4($ctx)
+ mov @T[0],$B # magic seed
+ mov $C,8($ctx)
+ mov $C,@T[1]
+ mov $D,12($ctx)
+ xor $D,@T[1]
+ mov $E,16($ctx)
+ and @T[1],@T[0]
+ jmp .Loop_avx
+
+.align 16
+$done_avx_label:
+___
+ $j=$saved_j; @V=@saved_V;
+
+ &Xtail_avx(\&body_20_39);
+ &Xtail_avx(\&body_20_39);
+ &Xtail_avx(\&body_20_39);
+
+$code.=<<___;
+ vzeroupper
+
+ add 0($ctx),$A # update context
+ add 4($ctx),@T[0]
+ add 8($ctx),$C
+ mov $A,0($ctx)
+ add 12($ctx),$D
+ mov @T[0],4($ctx)
+ add 16($ctx),$E
+ mov $C,8($ctx)
+ mov $D,12($ctx)
+ mov $E,16($ctx)
+___
+$code.=<<___ if ($win64);
+ movaps 64+0(%rsp),%xmm6
+ movaps 64+16(%rsp),%xmm7
+ movaps 64+32(%rsp),%xmm8
+ movaps 64+48(%rsp),%xmm9
+ movaps 64+64(%rsp),%xmm10
+ movaps 64+80(%rsp),%xmm11
+___
+$code.=<<___;
+ lea `64+($win64?6*16:0)`(%rsp),%rsi
+ mov 0(%rsi),%r12
+ mov 8(%rsi),%rbp
+ mov 16(%rsi),%rbx
+ lea 24(%rsi),%rsp
+.Lepilogue_avx:
+ ret
+.size sha1_block_data_order_avx,.-sha1_block_data_order_avx
+___
+
+if ($avx>1) {
+use integer;
+$Xi=4; # reset variables
+@X=map("%ymm$_",(4..7,0..3));
+@Tx=map("%ymm$_",(8..10));
+$Kx="%ymm11";
+$j=0;
+
+my @ROTX=("%eax","%ebp","%ebx","%ecx","%edx","%esi");
+my ($a5,$t0)=("%r12d","%edi");
+
+my ($A,$F,$B,$C,$D,$E)=@ROTX;
+my $rx=0;
+my $frame="%r13";
+
+$code.=<<___;
+.type sha1_block_data_order_avx2,\@function,3
+.align 16
+sha1_block_data_order_avx2:
+_avx2_shortcut:
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ lea (%rsp),%r14
+___
+$code.=<<___ if ($win64);
+ lea -6*16(%rsp),%rsp
+ movaps %xmm6,-6*16(%r14)
+ movaps %xmm7,-5*16(%r14)
+ movaps %xmm8,-4*16(%r14)
+ movaps %xmm9,-3*16(%r14)
+ movaps %xmm10,-2*16(%r14)
+ movaps %xmm11,-1*16(%r14)
+.Lprologue_avx2:
+___
+$code.=<<___;
+ mov %rdi,$ctx # reassigned argument
+ mov %rsi,$inp # reassigned argument
+ mov %rdx,$num # reassigned argument
+ vzeroupper
+
+ lea -640(%rsp),%rsp
+ shl \$6,$num
+ lea 64($inp),$frame
+ and \$-128,%rsp
+ add $inp,$num
+ lea K_XX_XX+64(%rip),$K_XX_XX
+
+ mov 0($ctx),$A # load context
+ cmp $num,$frame
+ cmovae $inp,$frame # next or same block
+ mov 4($ctx),$F
+ mov 8($ctx),$C
+ mov 12($ctx),$D
+ mov 16($ctx),$E
+ vmovdqu 64($K_XX_XX),@X[2] # pbswap mask
+
+ vmovdqu ($inp),%xmm0
+ vmovdqu 16($inp),%xmm1
+ vmovdqu 32($inp),%xmm2
+ vmovdqu 48($inp),%xmm3
+ lea 64($inp),$inp
+ vinserti128 \$1,($frame),@X[-4&7],@X[-4&7]
+ vinserti128 \$1,16($frame),@X[-3&7],@X[-3&7]
+ vpshufb @X[2],@X[-4&7],@X[-4&7]
+ vinserti128 \$1,32($frame),@X[-2&7],@X[-2&7]
+ vpshufb @X[2],@X[-3&7],@X[-3&7]
+ vinserti128 \$1,48($frame),@X[-1&7],@X[-1&7]
+ vpshufb @X[2],@X[-2&7],@X[-2&7]
+ vmovdqu -64($K_XX_XX),$Kx # K_00_19
+ vpshufb @X[2],@X[-1&7],@X[-1&7]
+
+ vpaddd $Kx,@X[-4&7],@X[0] # add K_00_19
+ vpaddd $Kx,@X[-3&7],@X[1]
+ vmovdqu @X[0],0(%rsp) # X[]+K xfer to IALU
+ vpaddd $Kx,@X[-2&7],@X[2]
+ vmovdqu @X[1],32(%rsp)
+ vpaddd $Kx,@X[-1&7],@X[3]
+ vmovdqu @X[2],64(%rsp)
+ vmovdqu @X[3],96(%rsp)
+___
+for (;$Xi<8;$Xi++) { # Xupdate_avx2_16_31
+ use integer;
+
+ &vpalignr(@X[0],@X[-3&7],@X[-4&7],8); # compose "X[-14]" in "X[0]"
+ &vpsrldq(@Tx[0],@X[-1&7],4); # "X[-3]", 3 dwords
+ &vpxor (@X[0],@X[0],@X[-4&7]); # "X[0]"^="X[-16]"
+ &vpxor (@Tx[0],@Tx[0],@X[-2&7]); # "X[-3]"^"X[-8]"
+ &vpxor (@X[0],@X[0],@Tx[0]); # "X[0]"^="X[-3]"^"X[-8]"
+ &vpsrld (@Tx[0],@X[0],31);
+ &vmovdqu($Kx,eval(2*16*(($Xi)/5)-64)."($K_XX_XX)") if ($Xi%5==0); # K_XX_XX
+ &vpslldq(@Tx[2],@X[0],12); # "X[0]"<<96, extract one dword
+ &vpaddd (@X[0],@X[0],@X[0]);
+ &vpsrld (@Tx[1],@Tx[2],30);
+ &vpor (@X[0],@X[0],@Tx[0]); # "X[0]"<<<=1
+ &vpslld (@Tx[2],@Tx[2],2);
+ &vpxor (@X[0],@X[0],@Tx[1]);
+ &vpxor (@X[0],@X[0],@Tx[2]); # "X[0]"^=("X[0]">>96)<<<2
+ &vpaddd (@Tx[1],@X[0],$Kx);
+ &vmovdqu("32*$Xi(%rsp)",@Tx[1]); # X[]+K xfer to IALU
+
+ push(@X,shift(@X)); # "rotate" X[]
+}
+$code.=<<___;
+ lea 128(%rsp),$frame
+ jmp .Loop_avx2
+.align 32
+.Loop_avx2:
+ rorx \$2,$F,$B
+ andn $D,$F,$t0
+ and $C,$F
+ xor $t0,$F
+___
+sub bodyx_00_19 () { # 8 instructions, 3 cycles critical path
+ # at start $f=(b&c)^(~b&d), $b>>>=2
+ return &bodyx_20_39() if ($rx==19); $rx++;
+ (
+ '($a,$f,$b,$c,$d,$e)=@ROTX;'.
+
+ '&add ($e,((32*($j/4)+4*($j%4))%256-128)."($frame)");'. # e+=X[i]+K
+ '&lea ($frame,"256($frame)") if ($j%32==31);',
+ '&andn ($t0,$a,$c)', # ~b&d for next round
+
+ '&add ($e,$f)', # e+=(b&c)^(~b&d)
+ '&rorx ($a5,$a,27)', # a<<<5
+ '&rorx ($f,$a,2)', # b>>>2 for next round
+ '&and ($a,$b)', # b&c for next round
+
+ '&add ($e,$a5)', # e+=a<<<5
+ '&xor ($a,$t0);'. # f=(b&c)^(~b&d) for next round
+
+ 'unshift(@ROTX,pop(@ROTX)); $j++;'
+ )
+}
+
+sub bodyx_20_39 () { # 7 instructions, 2 cycles critical path
+ # on entry $f=b^c^d, $b>>>=2
+ return &bodyx_40_59() if ($rx==39); $rx++;
+ (
+ '($a,$f,$b,$c,$d,$e)=@ROTX;'.
+
+ '&add ($e,((32*($j/4)+4*($j%4))%256-128)."($frame)");'. # e+=X[i]+K
+ '&lea ($frame,"256($frame)") if ($j%32==31);',
+
+ '&lea ($e,"($e,$f)")', # e+=b^c^d
+ '&rorx ($a5,$a,27)', # a<<<5
+ '&rorx ($f,$a,2) if ($j<79)', # b>>>2 in next round
+ '&xor ($a,$b) if ($j<79)', # b^c for next round
+
+ '&add ($e,$a5)', # e+=a<<<5
+ '&xor ($a,$c) if ($j<79);'. # f=b^c^d for next round
+
+ 'unshift(@ROTX,pop(@ROTX)); $j++;'
+ )
+}
+
+sub bodyx_40_59 () { # 10 instructions, 3 cycles critical path
+ # on entry $f=((b^c)&(c^d)), $b>>>=2
+ $rx++;
+ (
+ '($a,$f,$b,$c,$d,$e)=@ROTX;'.
+
+ '&add ($e,((32*($j/4)+4*($j%4))%256-128)."($frame)");'. # e+=X[i]+K
+ '&lea ($frame,"256($frame)") if ($j%32==31);',
+ '&xor ($f,$c) if ($j>39)', # (b^c)&(c^d)^c
+ '&mov ($t0,$b) if ($j<59)', # count on zero latency
+ '&xor ($t0,$c) if ($j<59)', # c^d for next round
+
+ '&lea ($e,"($e,$f)")', # e+=(b^c)&(c^d)^c
+ '&rorx ($a5,$a,27)', # a<<<5
+ '&rorx ($f,$a,2)', # b>>>2 in next round
+ '&xor ($a,$b)', # b^c for next round
+
+ '&add ($e,$a5)', # e+=a<<<5
+ '&and ($a,$t0) if ($j< 59);'. # f=(b^c)&(c^d) for next round
+ '&xor ($a,$c) if ($j==59);'. # f=b^c^d for next round
+
+ 'unshift(@ROTX,pop(@ROTX)); $j++;'
+ )
+}
+
+sub Xupdate_avx2_16_31() # recall that $Xi starts wtih 4
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body,&$body); # 35 instructions
+ my ($a,$b,$c,$d,$e);
+
+ &vpalignr(@X[0],@X[-3&7],@X[-4&7],8); # compose "X[-14]" in "X[0]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpsrldq(@Tx[0],@X[-1&7],4); # "X[-3]", 3 dwords
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@X[-4&7]); # "X[0]"^="X[-16]"
+ &vpxor (@Tx[0],@Tx[0],@X[-2&7]); # "X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@Tx[0]); # "X[0]"^="X[-3]"^"X[-8]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpsrld (@Tx[0],@X[0],31);
+ &vmovdqu($Kx,eval(2*16*(($Xi)/5)-64)."($K_XX_XX)") if ($Xi%5==0); # K_XX_XX
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpslldq(@Tx[2],@X[0],12); # "X[0]"<<96, extract one dword
+ &vpaddd (@X[0],@X[0],@X[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpsrld (@Tx[1],@Tx[2],30);
+ &vpor (@X[0],@X[0],@Tx[0]); # "X[0]"<<<=1
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpslld (@Tx[2],@Tx[2],2);
+ &vpxor (@X[0],@X[0],@Tx[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@Tx[2]); # "X[0]"^=("X[0]">>96)<<<2
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpaddd (@Tx[1],@X[0],$Kx);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vmovdqu(eval(32*($Xi))."(%rsp)",@Tx[1]); # X[]+K xfer to IALU
+
+ foreach (@insns) { eval; } # remaining instructions [if any]
+
+ $Xi++;
+ push(@X,shift(@X)); # "rotate" X[]
+}
+
+sub Xupdate_avx2_32_79()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body,&$body); # 35 to 50 instructions
+ my ($a,$b,$c,$d,$e);
+
+ &vpalignr(@Tx[0],@X[-1&7],@X[-2&7],8); # compose "X[-6]"
+ &vpxor (@X[0],@X[0],@X[-4&7]); # "X[0]"="X[-32]"^"X[-16]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@X[-7&7]); # "X[0]"^="X[-28]"
+ &vmovdqu($Kx,eval(2*16*($Xi/5)-64)."($K_XX_XX)") if ($Xi%5==0);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpxor (@X[0],@X[0],@Tx[0]); # "X[0]"^="X[-6]"
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpsrld (@Tx[0],@X[0],30);
+ &vpslld (@X[0],@X[0],2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ #&vpslld (@X[0],@X[0],2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpor (@X[0],@X[0],@Tx[0]); # "X[0]"<<<=2
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vpaddd (@Tx[1],@X[0],$Kx);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+
+ &vmovdqu("32*$Xi(%rsp)",@Tx[1]); # X[]+K xfer to IALU
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ $Xi++;
+ push(@X,shift(@X)); # "rotate" X[]
+}
+
+sub Xloop_avx2()
+{ use integer;
+ my $body = shift;
+ my @insns = (&$body,&$body,&$body,&$body,&$body); # 32 instructions
+ my ($a,$b,$c,$d,$e);
+
+ foreach (@insns) { eval; }
+}
+
+ &align32();
+ &Xupdate_avx2_32_79(\&bodyx_00_19);
+ &Xupdate_avx2_32_79(\&bodyx_00_19);
+ &Xupdate_avx2_32_79(\&bodyx_00_19);
+ &Xupdate_avx2_32_79(\&bodyx_00_19);
+
+ &Xupdate_avx2_32_79(\&bodyx_20_39);
+ &Xupdate_avx2_32_79(\&bodyx_20_39);
+ &Xupdate_avx2_32_79(\&bodyx_20_39);
+ &Xupdate_avx2_32_79(\&bodyx_20_39);
+
+ &align32();
+ &Xupdate_avx2_32_79(\&bodyx_40_59);
+ &Xupdate_avx2_32_79(\&bodyx_40_59);
+ &Xupdate_avx2_32_79(\&bodyx_40_59);
+ &Xupdate_avx2_32_79(\&bodyx_40_59);
+
+ &Xloop_avx2(\&bodyx_20_39);
+ &Xloop_avx2(\&bodyx_20_39);
+ &Xloop_avx2(\&bodyx_20_39);
+ &Xloop_avx2(\&bodyx_20_39);
+
+$code.=<<___;
+ lea 128($inp),$frame
+ lea 128($inp),%rdi # borrow $t0
+ cmp $num,$frame
+ cmovae $inp,$frame # next or previous block
+
+ # output is d-e-[a]-f-b-c => A=d,F=e,C=f,D=b,E=c
+ add 0($ctx),@ROTX[0] # update context
+ add 4($ctx),@ROTX[1]
+ add 8($ctx),@ROTX[3]
+ mov @ROTX[0],0($ctx)
+ add 12($ctx),@ROTX[4]
+ mov @ROTX[1],4($ctx)
+ mov @ROTX[0],$A # A=d
+ add 16($ctx),@ROTX[5]
+ mov @ROTX[3],$a5
+ mov @ROTX[3],8($ctx)
+ mov @ROTX[4],$D # D=b
+ #xchg @ROTX[5],$F # F=c, C=f
+ mov @ROTX[4],12($ctx)
+ mov @ROTX[1],$F # F=e
+ mov @ROTX[5],16($ctx)
+ #mov $F,16($ctx)
+ mov @ROTX[5],$E # E=c
+ mov $a5,$C # C=f
+ #xchg $F,$E # E=c, F=e
+
+ cmp $num,$inp
+ je .Ldone_avx2
+___
+
+$Xi=4; # reset variables
+@X=map("%ymm$_",(4..7,0..3));
+
+$code.=<<___;
+ vmovdqu 64($K_XX_XX),@X[2] # pbswap mask
+ cmp $num,%rdi # borrowed $t0
+ ja .Last_avx2
+
+ vmovdqu -64(%rdi),%xmm0 # low part of @X[-4&7]
+ vmovdqu -48(%rdi),%xmm1
+ vmovdqu -32(%rdi),%xmm2
+ vmovdqu -16(%rdi),%xmm3
+ vinserti128 \$1,0($frame),@X[-4&7],@X[-4&7]
+ vinserti128 \$1,16($frame),@X[-3&7],@X[-3&7]
+ vinserti128 \$1,32($frame),@X[-2&7],@X[-2&7]
+ vinserti128 \$1,48($frame),@X[-1&7],@X[-1&7]
+ jmp .Last_avx2
+
+.align 32
+.Last_avx2:
+ lea 128+16(%rsp),$frame
+ rorx \$2,$F,$B
+ andn $D,$F,$t0
+ and $C,$F
+ xor $t0,$F
+ sub \$-128,$inp
+___
+ $rx=$j=0; @ROTX=($A,$F,$B,$C,$D,$E);
+
+ &Xloop_avx2 (\&bodyx_00_19);
+ &Xloop_avx2 (\&bodyx_00_19);
+ &Xloop_avx2 (\&bodyx_00_19);
+ &Xloop_avx2 (\&bodyx_00_19);
+
+ &Xloop_avx2 (\&bodyx_20_39);
+ &vmovdqu ($Kx,"-64($K_XX_XX)"); # K_00_19
+ &vpshufb (@X[-4&7],@X[-4&7],@X[2]); # byte swap
+ &Xloop_avx2 (\&bodyx_20_39);
+ &vpshufb (@X[-3&7],@X[-3&7],@X[2]);
+ &vpaddd (@Tx[0],@X[-4&7],$Kx); # add K_00_19
+ &Xloop_avx2 (\&bodyx_20_39);
+ &vmovdqu ("0(%rsp)",@Tx[0]);
+ &vpshufb (@X[-2&7],@X[-2&7],@X[2]);
+ &vpaddd (@Tx[1],@X[-3&7],$Kx);
+ &Xloop_avx2 (\&bodyx_20_39);
+ &vmovdqu ("32(%rsp)",@Tx[1]);
+ &vpshufb (@X[-1&7],@X[-1&7],@X[2]);
+ &vpaddd (@X[2],@X[-2&7],$Kx);
+
+ &Xloop_avx2 (\&bodyx_40_59);
+ &align32 ();
+ &vmovdqu ("64(%rsp)",@X[2]);
+ &vpaddd (@X[3],@X[-1&7],$Kx);
+ &Xloop_avx2 (\&bodyx_40_59);
+ &vmovdqu ("96(%rsp)",@X[3]);
+ &Xloop_avx2 (\&bodyx_40_59);
+ &Xupdate_avx2_16_31(\&bodyx_40_59);
+
+ &Xupdate_avx2_16_31(\&bodyx_20_39);
+ &Xupdate_avx2_16_31(\&bodyx_20_39);
+ &Xupdate_avx2_16_31(\&bodyx_20_39);
+ &Xloop_avx2 (\&bodyx_20_39);
+
+$code.=<<___;
+ lea 128(%rsp),$frame
+
+ # output is d-e-[a]-f-b-c => A=d,F=e,C=f,D=b,E=c
+ add 0($ctx),@ROTX[0] # update context
+ add 4($ctx),@ROTX[1]
+ add 8($ctx),@ROTX[3]
+ mov @ROTX[0],0($ctx)
+ add 12($ctx),@ROTX[4]
+ mov @ROTX[1],4($ctx)
+ mov @ROTX[0],$A # A=d
+ add 16($ctx),@ROTX[5]
+ mov @ROTX[3],$a5
+ mov @ROTX[3],8($ctx)
+ mov @ROTX[4],$D # D=b
+ #xchg @ROTX[5],$F # F=c, C=f
+ mov @ROTX[4],12($ctx)
+ mov @ROTX[1],$F # F=e
+ mov @ROTX[5],16($ctx)
+ #mov $F,16($ctx)
+ mov @ROTX[5],$E # E=c
+ mov $a5,$C # C=f
+ #xchg $F,$E # E=c, F=e
+
+ cmp $num,$inp
+ jbe .Loop_avx2
+
+.Ldone_avx2:
+ vzeroupper
+___
+$code.=<<___ if ($win64);
+ movaps -6*16(%r14),%xmm6
+ movaps -5*16(%r14),%xmm7
+ movaps -4*16(%r14),%xmm8
+ movaps -3*16(%r14),%xmm9
+ movaps -2*16(%r14),%xmm10
+ movaps -1*16(%r14),%xmm11
+___
+$code.=<<___;
+ lea (%r14),%rsi
+ mov 0(%rsi),%r14
+ mov 8(%rsi),%r13
+ mov 16(%rsi),%r12
+ mov 24(%rsi),%rbp
+ mov 32(%rsi),%rbx
+ lea 40(%rsi),%rsp
+.Lepilogue_avx2:
+ ret
+.size sha1_block_data_order_avx2,.-sha1_block_data_order_avx2
+___
+}
+}
+$code.=<<___;
+.align 64
+K_XX_XX:
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 # K_00_19
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 # K_00_19
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 # K_20_39
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 # K_20_39
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc # K_40_59
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc # K_40_59
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 # K_60_79
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 # K_60_79
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f # pbswap mask
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f # pbswap mask
+___
+}}}
+$code.=<<___;
+.asciz "SHA1 block transform for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
+.align 64
+___
+
+# EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
+# CONTEXT *context,DISPATCHER_CONTEXT *disp)
+if ($win64) {
+$rec="%rcx";
+$frame="%rdx";
+$context="%r8";
+$disp="%r9";
+
+$code.=<<___;
+.extern __imp_RtlVirtualUnwind
+.type se_handler,\@abi-omnipotent
+.align 16
+se_handler:
+ push %rsi
+ push %rdi
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+ pushfq
+ sub \$64,%rsp
+
+ mov 120($context),%rax # pull context->Rax
+ mov 248($context),%rbx # pull context->Rip
+
+ lea .Lprologue(%rip),%r10
+ cmp %r10,%rbx # context->Rip<.Lprologue
+ jb .Lcommon_seh_tail
+
+ mov 152($context),%rax # pull context->Rsp
+
+ lea .Lepilogue(%rip),%r10
+ cmp %r10,%rbx # context->Rip>=.Lepilogue
+ jae .Lcommon_seh_tail
+
+ mov `16*4`(%rax),%rax # pull saved stack pointer
+ lea 32(%rax),%rax
+
+ mov -8(%rax),%rbx
+ mov -16(%rax),%rbp
+ mov -24(%rax),%r12
+ mov -32(%rax),%r13
+ mov %rbx,144($context) # restore context->Rbx
+ mov %rbp,160($context) # restore context->Rbp
+ mov %r12,216($context) # restore context->R12
+ mov %r13,224($context) # restore context->R13
+
+ jmp .Lcommon_seh_tail
+.size se_handler,.-se_handler
+
+.type ssse3_handler,\@abi-omnipotent
+.align 16
+ssse3_handler:
+ push %rsi
+ push %rdi
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+ pushfq
+ sub \$64,%rsp
+
+ mov 120($context),%rax # pull context->Rax
+ mov 248($context),%rbx # pull context->Rip
+
+ mov 8($disp),%rsi # disp->ImageBase
+ mov 56($disp),%r11 # disp->HandlerData
+
+ mov 0(%r11),%r10d # HandlerData[0]
+ lea (%rsi,%r10),%r10 # prologue label
+ cmp %r10,%rbx # context->Rip<prologue label
+ jb .Lcommon_seh_tail
+
+ mov 152($context),%rax # pull context->Rsp
+
+ mov 4(%r11),%r10d # HandlerData[1]
+ lea (%rsi,%r10),%r10 # epilogue label
+ cmp %r10,%rbx # context->Rip>=epilogue label
+ jae .Lcommon_seh_tail
+
+ lea 64(%rax),%rsi
+ lea 512($context),%rdi # &context.Xmm6
+ mov \$12,%ecx
+ .long 0xa548f3fc # cld; rep movsq
+ lea `24+64+6*16`(%rax),%rax # adjust stack pointer
+
+ mov -8(%rax),%rbx
+ mov -16(%rax),%rbp
+ mov -24(%rax),%r12
+ mov %rbx,144($context) # restore context->Rbx
+ mov %rbp,160($context) # restore context->Rbp
+ mov %r12,216($context) # restore cotnext->R12
+
+.Lcommon_seh_tail:
+ mov 8(%rax),%rdi
+ mov 16(%rax),%rsi
+ mov %rax,152($context) # restore context->Rsp
+ mov %rsi,168($context) # restore context->Rsi
+ mov %rdi,176($context) # restore context->Rdi
+
+ mov 40($disp),%rdi # disp->ContextRecord
+ mov $context,%rsi # context
+ mov \$154,%ecx # sizeof(CONTEXT)
+ .long 0xa548f3fc # cld; rep movsq
+
+ mov $disp,%rsi
+ xor %rcx,%rcx # arg1, UNW_FLAG_NHANDLER
+ mov 8(%rsi),%rdx # arg2, disp->ImageBase
+ mov 0(%rsi),%r8 # arg3, disp->ControlPc
+ mov 16(%rsi),%r9 # arg4, disp->FunctionEntry
+ mov 40(%rsi),%r10 # disp->ContextRecord
+ lea 56(%rsi),%r11 # &disp->HandlerData
+ lea 24(%rsi),%r12 # &disp->EstablisherFrame
+ mov %r10,32(%rsp) # arg5
+ mov %r11,40(%rsp) # arg6
+ mov %r12,48(%rsp) # arg7
+ mov %rcx,56(%rsp) # arg8, (NULL)
+ call *__imp_RtlVirtualUnwind(%rip)
+
+ mov \$1,%eax # ExceptionContinueSearch
+ add \$64,%rsp
+ popfq
+ pop %r15
+ pop %r14
+ pop %r13
+ pop %r12
+ pop %rbp
+ pop %rbx
+ pop %rdi
+ pop %rsi
+ ret
+.size ssse3_handler,.-ssse3_handler
+
+.section .pdata
+.align 4
+ .rva .LSEH_begin_sha1_block_data_order
+ .rva .LSEH_end_sha1_block_data_order
+ .rva .LSEH_info_sha1_block_data_order
+ .rva .LSEH_begin_sha1_block_data_order_ssse3
+ .rva .LSEH_end_sha1_block_data_order_ssse3
+ .rva .LSEH_info_sha1_block_data_order_ssse3
+___
+$code.=<<___ if ($avx);
+ .rva .LSEH_begin_sha1_block_data_order_avx
+ .rva .LSEH_end_sha1_block_data_order_avx
+ .rva .LSEH_info_sha1_block_data_order_avx
+___
+$code.=<<___ if ($avx>1);
+ .rva .LSEH_begin_sha1_block_data_order_avx2
+ .rva .LSEH_end_sha1_block_data_order_avx2
+ .rva .LSEH_info_sha1_block_data_order_avx2
+___
+$code.=<<___;
+.section .xdata
+.align 8
+.LSEH_info_sha1_block_data_order:
+ .byte 9,0,0,0
+ .rva se_handler
+.LSEH_info_sha1_block_data_order_ssse3:
+ .byte 9,0,0,0
+ .rva ssse3_handler
+ .rva .Lprologue_ssse3,.Lepilogue_ssse3 # HandlerData[]
+___
+$code.=<<___ if ($avx);
+.LSEH_info_sha1_block_data_order_avx:
+ .byte 9,0,0,0
+ .rva ssse3_handler
+ .rva .Lprologue_avx,.Lepilogue_avx # HandlerData[]
+___
+$code.=<<___ if ($avx>1);
+.LSEH_info_sha1_block_data_order_avx2:
+ .byte 9,0,0,0
+ .rva ssse3_handler
+ .rva .Lprologue_avx2,.Lepilogue_avx2 # HandlerData[]
+___
+}
+
+####################################################################
+
+$code =~ s/\`([^\`]*)\`/eval $1/gem;
+print $code;
+close STDOUT;
diff --git a/devel/perlasm/sha256-ssse3-x86.pl b/devel/perlasm/sha256-ssse3-x86.pl
new file mode 100644
index 0000000000..bd48b638c5
--- /dev/null
+++ b/devel/perlasm/sha256-ssse3-x86.pl
@@ -0,0 +1,1125 @@
+#!/usr/bin/env perl
+#
+# ====================================================================
+# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
+# project. The module is, however, dual licensed under OpenSSL and
+# CRYPTOGAMS licenses depending on where you obtain it. For further
+# details see http://www.openssl.org/~appro/cryptogams/.
+# ====================================================================
+#
+# SHA256 block transform for x86. September 2007.
+#
+# Performance improvement over compiler generated code varies from
+# 10% to 40% [see below]. Not very impressive on some µ-archs, but
+# it's 5 times smaller and optimizies amount of writes.
+#
+# May 2012.
+#
+# Optimization including two of Pavel Semjanov's ideas, alternative
+# Maj and full unroll, resulted in ~20-25% improvement on most CPUs,
+# ~7% on Pentium, ~40% on Atom. As fully unrolled loop body is almost
+# 15x larger, 8KB vs. 560B, it's fired only for longer inputs. But not
+# on P4, where it kills performance, nor Sandy Bridge, where folded
+# loop is approximately as fast...
+#
+# June 2012.
+#
+# Add AMD XOP-specific code path, >30% improvement on Bulldozer over
+# May version, >60% over original. Add AVX+shrd code path, >25%
+# improvement on Sandy Bridge over May version, 60% over original.
+#
+# May 2013.
+#
+# Replace AMD XOP code path with SSSE3 to cover more processors.
+# (Biggest improvement coefficient is on upcoming Atom Silvermont,
+# not shown.) Add AVX+BMI code path.
+#
+# Performance in clock cycles per processed byte (less is better):
+#
+# gcc icc x86 asm(*) SIMD x86_64 asm(**)
+# Pentium 46 57 40/38 - -
+# PIII 36 33 27/24 - -
+# P4 41 38 28 - 17.3
+# AMD K8 27 25 19/15.5 - 14.9
+# Core2 26 23 18/15.6 14.3 13.8
+# Westmere 27 - 19/15.7 13.4 12.3
+# Sandy Bridge 25 - 15.9 12.4 11.6
+# Ivy Bridge 24 - 15.0 11.4 10.3
+# Haswell 22 - 13.9 9.46 7.80
+# Bulldozer 36 - 27/22 17.0 13.6
+# VIA Nano 36 - 25/22 16.8 16.5
+# Atom 50 - 30/25 21.9 18.9
+#
+# (*) numbers after slash are for unrolled loop, where applicable;
+# (**) x86_64 assembly performance is presented for reference
+# purposes, results are best-available;
+
+$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
+push(@INC,"${dir}","${dir}../../perlasm");
+require "x86asm.pl";
+
+&asm_init($ARGV[0],"sha512-586.pl",$ARGV[$#ARGV] eq "386");
+
+$xmm=$avx=0;
+for (@ARGV) { $xmm=1 if (/-DOPENSSL_IA32_SSE2/); }
+
+if ($xmm && `$ENV{CC} -Wa,-v -c -o /dev/null -x assembler /dev/null 2>&1`
+ =~ /GNU assembler version ([2-9]\.[0-9]+)/) {
+ $avx = ($1>=2.19) + ($1>=2.22);
+}
+
+if ($xmm && !$avx && $ARGV[0] eq "win32n" &&
+ `nasm -v 2>&1` =~ /NASM version ([2-9]\.[0-9]+)/) {
+ $avx = ($1>=2.03) + ($1>=2.10);
+}
+
+if ($xmm && !$avx && $ARGV[0] eq "win32" &&
+ `ml 2>&1` =~ /Version ([0-9]+)\./) {
+ $avx = ($1>=10) + ($1>=11);
+}
+
+$unroll_after = 64*4; # If pre-evicted from L1P cache first spin of
+ # fully unrolled loop was measured to run about
+ # 3-4x slower. If slowdown coefficient is N and
+ # unrolled loop is m times faster, then you break
+ # even at (N-1)/(m-1) blocks. Then it needs to be
+ # adjusted for probability of code being evicted,
+ # code size/cache size=1/4. Typical m is 1.15...
+
+$A="eax";
+$E="edx";
+$T="ebx";
+$Aoff=&DWP(4,"esp");
+$Boff=&DWP(8,"esp");
+$Coff=&DWP(12,"esp");
+$Doff=&DWP(16,"esp");
+$Eoff=&DWP(20,"esp");
+$Foff=&DWP(24,"esp");
+$Goff=&DWP(28,"esp");
+$Hoff=&DWP(32,"esp");
+$Xoff=&DWP(36,"esp");
+$K256="ebp";
+
+sub BODY_16_63() {
+ &mov ($T,"ecx"); # "ecx" is preloaded
+ &mov ("esi",&DWP(4*(9+15+16-14),"esp"));
+ &ror ("ecx",18-7);
+ &mov ("edi","esi");
+ &ror ("esi",19-17);
+ &xor ("ecx",$T);
+ &shr ($T,3);
+ &ror ("ecx",7);
+ &xor ("esi","edi");
+ &xor ($T,"ecx"); # T = sigma0(X[-15])
+ &ror ("esi",17);
+ &add ($T,&DWP(4*(9+15+16),"esp")); # T += X[-16]
+ &shr ("edi",10);
+ &add ($T,&DWP(4*(9+15+16-9),"esp")); # T += X[-7]
+ #&xor ("edi","esi") # sigma1(X[-2])
+ # &add ($T,"edi"); # T += sigma1(X[-2])
+ # &mov (&DWP(4*(9+15),"esp"),$T); # save X[0]
+
+ &BODY_00_15(1);
+}
+sub BODY_00_15() {
+ my $in_16_63=shift;
+
+ &mov ("ecx",$E);
+ &xor ("edi","esi") if ($in_16_63); # sigma1(X[-2])
+ &mov ("esi",$Foff);
+ &ror ("ecx",25-11);
+ &add ($T,"edi") if ($in_16_63); # T += sigma1(X[-2])
+ &mov ("edi",$Goff);
+ &xor ("ecx",$E);
+ &xor ("esi","edi");
+ &mov ($T,&DWP(4*(9+15),"esp")) if (!$in_16_63);
+ &mov (&DWP(4*(9+15),"esp"),$T) if ($in_16_63); # save X[0]
+ &ror ("ecx",11-6);
+ &and ("esi",$E);
+ &mov ($Eoff,$E); # modulo-scheduled
+ &xor ($E,"ecx");
+ &add ($T,$Hoff); # T += h
+ &xor ("esi","edi"); # Ch(e,f,g)
+ &ror ($E,6); # Sigma1(e)
+ &mov ("ecx",$A);
+ &add ($T,"esi"); # T += Ch(e,f,g)
+
+ &ror ("ecx",22-13);
+ &add ($T,$E); # T += Sigma1(e)
+ &mov ("edi",$Boff);
+ &xor ("ecx",$A);
+ &mov ($Aoff,$A); # modulo-scheduled
+ &lea ("esp",&DWP(-4,"esp"));
+ &ror ("ecx",13-2);
+ &mov ("esi",&DWP(0,$K256));
+ &xor ("ecx",$A);
+ &mov ($E,$Eoff); # e in next iteration, d in this one
+ &xor ($A,"edi"); # a ^= b
+ &ror ("ecx",2); # Sigma0(a)
+
+ &add ($T,"esi"); # T+= K[i]
+ &mov (&DWP(0,"esp"),$A); # (b^c) in next round
+ &add ($E,$T); # d += T
+ &and ($A,&DWP(4,"esp")); # a &= (b^c)
+ &add ($T,"ecx"); # T += Sigma0(a)
+ &xor ($A,"edi"); # h = Maj(a,b,c) = Ch(a^b,c,b)
+ &mov ("ecx",&DWP(4*(9+15+16-1),"esp")) if ($in_16_63); # preload T
+ &add ($K256,4);
+ &add ($A,$T); # h += T
+}
+
+&external_label("OPENSSL_ia32cap_P") if (!$i386);
+
+&function_begin("sha256_block_data_order");
+ &mov ("esi",wparam(0)); # ctx
+ &mov ("edi",wparam(1)); # inp
+ &mov ("eax",wparam(2)); # num
+ &mov ("ebx","esp"); # saved sp
+
+ &call (&label("pic_point")); # make it PIC!
+&set_label("pic_point");
+ &blindpop($K256);
+ &lea ($K256,&DWP(&label("K256")."-".&label("pic_point"),$K256));
+
+ &sub ("esp",16);
+ &and ("esp",-64);
+
+ &shl ("eax",6);
+ &add ("eax","edi");
+ &mov (&DWP(0,"esp"),"esi"); # ctx
+ &mov (&DWP(4,"esp"),"edi"); # inp
+ &mov (&DWP(8,"esp"),"eax"); # inp+num*128
+ &mov (&DWP(12,"esp"),"ebx"); # saved sp
+ if (!$i386) {
+ &picmeup("edx","OPENSSL_ia32cap_P",$K256,&label("K256"));
+ &mov ("ecx",&DWP(0,"edx"));
+ &mov ("ebx",&DWP(4,"edx"));
+ &test ("ecx",1<<20); # check for P4
+ &jnz (&label("loop"));
+ &and ("ecx",1<<30); # mask "Intel CPU" bit
+ &and ("ebx",1<<28|1<<9); # mask AVX and SSSE3 bits
+ &or ("ecx","ebx");
+ &and ("ecx",1<<28|1<<30);
+ &cmp ("ecx",1<<28|1<<30);
+ if ($xmm) {
+ &je (&label("AVX")) if ($avx);
+ &test ("ebx",1<<9); # check for SSSE3
+ &jnz (&label("SSSE3"));
+ } else {
+ &je (&label("loop_shrd"));
+ }
+ if ($unroll_after) {
+ &sub ("eax","edi");
+ &cmp ("eax",$unroll_after);
+ &jae (&label("unrolled"));
+ } }
+ &jmp (&label("loop"));
+
+sub COMPACT_LOOP() {
+my $suffix=shift;
+
+&set_label("loop$suffix",$suffix?32:16);
+ # copy input block to stack reversing byte and dword order
+ for($i=0;$i<4;$i++) {
+ &mov ("eax",&DWP($i*16+0,"edi"));
+ &mov ("ebx",&DWP($i*16+4,"edi"));
+ &mov ("ecx",&DWP($i*16+8,"edi"));
+ &bswap ("eax");
+ &mov ("edx",&DWP($i*16+12,"edi"));
+ &bswap ("ebx");
+ &push ("eax");
+ &bswap ("ecx");
+ &push ("ebx");
+ &bswap ("edx");
+ &push ("ecx");
+ &push ("edx");
+ }
+ &add ("edi",64);
+ &lea ("esp",&DWP(-4*9,"esp"));# place for A,B,C,D,E,F,G,H
+ &mov (&DWP(4*(9+16)+4,"esp"),"edi");
+
+ # copy ctx->h[0-7] to A,B,C,D,E,F,G,H on stack
+ &mov ($A,&DWP(0,"esi"));
+ &mov ("ebx",&DWP(4,"esi"));
+ &mov ("ecx",&DWP(8,"esi"));
+ &mov ("edi",&DWP(12,"esi"));
+ # &mov ($Aoff,$A);
+ &mov ($Boff,"ebx");
+ &xor ("ebx","ecx");
+ &mov ($Coff,"ecx");
+ &mov ($Doff,"edi");
+ &mov (&DWP(0,"esp"),"ebx"); # magic
+ &mov ($E,&DWP(16,"esi"));
+ &mov ("ebx",&DWP(20,"esi"));
+ &mov ("ecx",&DWP(24,"esi"));
+ &mov ("edi",&DWP(28,"esi"));
+ # &mov ($Eoff,$E);
+ &mov ($Foff,"ebx");
+ &mov ($Goff,"ecx");
+ &mov ($Hoff,"edi");
+
+&set_label("00_15$suffix",16);
+
+ &BODY_00_15();
+
+ &cmp ("esi",0xc19bf174);
+ &jne (&label("00_15$suffix"));
+
+ &mov ("ecx",&DWP(4*(9+15+16-1),"esp")); # preloaded in BODY_00_15(1)
+ &jmp (&label("16_63$suffix"));
+
+&set_label("16_63$suffix",16);
+
+ &BODY_16_63();
+
+ &cmp ("esi",0xc67178f2);
+ &jne (&label("16_63$suffix"));
+
+ &mov ("esi",&DWP(4*(9+16+64)+0,"esp"));#ctx
+ # &mov ($A,$Aoff);
+ &mov ("ebx",$Boff);
+ # &mov ("edi",$Coff);
+ &mov ("ecx",$Doff);
+ &add ($A,&DWP(0,"esi"));
+ &add ("ebx",&DWP(4,"esi"));
+ &add ("edi",&DWP(8,"esi"));
+ &add ("ecx",&DWP(12,"esi"));
+ &mov (&DWP(0,"esi"),$A);
+ &mov (&DWP(4,"esi"),"ebx");
+ &mov (&DWP(8,"esi"),"edi");
+ &mov (&DWP(12,"esi"),"ecx");
+ # &mov ($E,$Eoff);
+ &mov ("eax",$Foff);
+ &mov ("ebx",$Goff);
+ &mov ("ecx",$Hoff);
+ &mov ("edi",&DWP(4*(9+16+64)+4,"esp"));#inp
+ &add ($E,&DWP(16,"esi"));
+ &add ("eax",&DWP(20,"esi"));
+ &add ("ebx",&DWP(24,"esi"));
+ &add ("ecx",&DWP(28,"esi"));
+ &mov (&DWP(16,"esi"),$E);
+ &mov (&DWP(20,"esi"),"eax");
+ &mov (&DWP(24,"esi"),"ebx");
+ &mov (&DWP(28,"esi"),"ecx");
+
+ &lea ("esp",&DWP(4*(9+16+64),"esp"));# destroy frame
+ &sub ($K256,4*64); # rewind K
+
+ &cmp ("edi",&DWP(8,"esp")); # are we done yet?
+ &jb (&label("loop$suffix"));
+}
+ &COMPACT_LOOP();
+ &mov ("esp",&DWP(12,"esp")); # restore sp
+&function_end_A();
+ if (!$i386 && !$xmm) {
+ # ~20% improvement on Sandy Bridge
+ local *ror = sub { &shrd(@_[0],@_) };
+ &COMPACT_LOOP("_shrd");
+ &mov ("esp",&DWP(12,"esp")); # restore sp
+&function_end_A();
+ }
+
+&set_label("K256",64); # Yes! I keep it in the code segment!
+@K256=( 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5,
+ 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5,
+ 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3,
+ 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174,
+ 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc,
+ 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da,
+ 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7,
+ 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967,
+ 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13,
+ 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85,
+ 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3,
+ 0xd192e819,0xd6990624,0xf40e3585,0x106aa070,
+ 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5,
+ 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3,
+ 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208,
+ 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 );
+&data_word(@K256);
+&data_word(0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f); # byte swap mask
+&asciz("SHA256 block transform for x86, CRYPTOGAMS by <appro\@openssl.org>");
+
+($a,$b,$c,$d,$e,$f,$g,$h)=(0..7); # offsets
+sub off { &DWP(4*(((shift)-$i)&7),"esp"); }
+
+if (!$i386 && $unroll_after) {
+my @AH=($A,$K256);
+
+&set_label("unrolled",16);
+ &lea ("esp",&DWP(-96,"esp"));
+ # copy ctx->h[0-7] to A,B,C,D,E,F,G,H on stack
+ &mov ($AH[0],&DWP(0,"esi"));
+ &mov ($AH[1],&DWP(4,"esi"));
+ &mov ("ecx",&DWP(8,"esi"));
+ &mov ("ebx",&DWP(12,"esi"));
+ #&mov (&DWP(0,"esp"),$AH[0]);
+ &mov (&DWP(4,"esp"),$AH[1]);
+ &xor ($AH[1],"ecx"); # magic
+ &mov (&DWP(8,"esp"),"ecx");
+ &mov (&DWP(12,"esp"),"ebx");
+ &mov ($E,&DWP(16,"esi"));
+ &mov ("ebx",&DWP(20,"esi"));
+ &mov ("ecx",&DWP(24,"esi"));
+ &mov ("esi",&DWP(28,"esi"));
+ #&mov (&DWP(16,"esp"),$E);
+ &mov (&DWP(20,"esp"),"ebx");
+ &mov (&DWP(24,"esp"),"ecx");
+ &mov (&DWP(28,"esp"),"esi");
+ &jmp (&label("grand_loop"));
+
+&set_label("grand_loop",16);
+ # copy input block to stack reversing byte order
+ for($i=0;$i<5;$i++) {
+ &mov ("ebx",&DWP(12*$i+0,"edi"));
+ &mov ("ecx",&DWP(12*$i+4,"edi"));
+ &bswap ("ebx");
+ &mov ("esi",&DWP(12*$i+8,"edi"));
+ &bswap ("ecx");
+ &mov (&DWP(32+12*$i+0,"esp"),"ebx");
+ &bswap ("esi");
+ &mov (&DWP(32+12*$i+4,"esp"),"ecx");
+ &mov (&DWP(32+12*$i+8,"esp"),"esi");
+ }
+ &mov ("ebx",&DWP($i*12,"edi"));
+ &add ("edi",64);
+ &bswap ("ebx");
+ &mov (&DWP(96+4,"esp"),"edi");
+ &mov (&DWP(32+12*$i,"esp"),"ebx");
+
+ my ($t1,$t2) = ("ecx","esi");
+
+ for ($i=0;$i<64;$i++) {
+
+ if ($i>=16) {
+ &mov ($T,$t1); # $t1 is preloaded
+ # &mov ($t2,&DWP(32+4*(($i+14)&15),"esp"));
+ &ror ($t1,18-7);
+ &mov ("edi",$t2);
+ &ror ($t2,19-17);
+ &xor ($t1,$T);
+ &shr ($T,3);
+ &ror ($t1,7);
+ &xor ($t2,"edi");
+ &xor ($T,$t1); # T = sigma0(X[-15])
+ &ror ($t2,17);
+ &add ($T,&DWP(32+4*($i&15),"esp")); # T += X[-16]
+ &shr ("edi",10);
+ &add ($T,&DWP(32+4*(($i+9)&15),"esp")); # T += X[-7]
+ #&xor ("edi",$t2) # sigma1(X[-2])
+ # &add ($T,"edi"); # T += sigma1(X[-2])
+ # &mov (&DWP(4*(9+15),"esp"),$T); # save X[0]
+ }
+ &mov ($t1,$E);
+ &xor ("edi",$t2) if ($i>=16); # sigma1(X[-2])
+ &mov ($t2,&off($f));
+ &ror ($E,25-11);
+ &add ($T,"edi") if ($i>=16); # T += sigma1(X[-2])
+ &mov ("edi",&off($g));
+ &xor ($E,$t1);
+ &mov ($T,&DWP(32+4*($i&15),"esp")) if ($i<16); # X[i]
+ &mov (&DWP(32+4*($i&15),"esp"),$T) if ($i>=16 && $i<62); # save X[0]
+ &xor ($t2,"edi");
+ &ror ($E,11-6);
+ &and ($t2,$t1);
+ &mov (&off($e),$t1); # save $E, modulo-scheduled
+ &xor ($E,$t1);
+ &add ($T,&off($h)); # T += h
+ &xor ("edi",$t2); # Ch(e,f,g)
+ &ror ($E,6); # Sigma1(e)
+ &mov ($t1,$AH[0]);
+ &add ($T,"edi"); # T += Ch(e,f,g)
+
+ &ror ($t1,22-13);
+ &mov ($t2,$AH[0]);
+ &mov ("edi",&off($b));
+ &xor ($t1,$AH[0]);
+ &mov (&off($a),$AH[0]); # save $A, modulo-scheduled
+ &xor ($AH[0],"edi"); # a ^= b, (b^c) in next round
+ &ror ($t1,13-2);
+ &and ($AH[1],$AH[0]); # (b^c) &= (a^b)
+ &lea ($E,&DWP(@K256[$i],$T,$E)); # T += Sigma1(1)+K[i]
+ &xor ($t1,$t2);
+ &xor ($AH[1],"edi"); # h = Maj(a,b,c) = Ch(a^b,c,b)
+ &mov ($t2,&DWP(32+4*(($i+2)&15),"esp")) if ($i>=15 && $i<63);
+ &ror ($t1,2); # Sigma0(a)
+
+ &add ($AH[1],$E); # h += T
+ &add ($E,&off($d)); # d += T
+ &add ($AH[1],$t1); # h += Sigma0(a)
+ &mov ($t1,&DWP(32+4*(($i+15)&15),"esp")) if ($i>=15 && $i<63);
+
+ @AH = reverse(@AH); # rotate(a,h)
+ ($t1,$t2) = ($t2,$t1); # rotate(t1,t2)
+ }
+ &mov ("esi",&DWP(96,"esp")); #ctx
+ #&mov ($AH[0],&DWP(0,"esp"));
+ &xor ($AH[1],"edi"); #&mov ($AH[1],&DWP(4,"esp"));
+ #&mov ("edi", &DWP(8,"esp"));
+ &mov ("ecx",&DWP(12,"esp"));
+ &add ($AH[0],&DWP(0,"esi"));
+ &add ($AH[1],&DWP(4,"esi"));
+ &add ("edi",&DWP(8,"esi"));
+ &add ("ecx",&DWP(12,"esi"));
+ &mov (&DWP(0,"esi"),$AH[0]);
+ &mov (&DWP(4,"esi"),$AH[1]);
+ &mov (&DWP(8,"esi"),"edi");
+ &mov (&DWP(12,"esi"),"ecx");
+ #&mov (&DWP(0,"esp"),$AH[0]);
+ &mov (&DWP(4,"esp"),$AH[1]);
+ &xor ($AH[1],"edi"); # magic
+ &mov (&DWP(8,"esp"),"edi");
+ &mov (&DWP(12,"esp"),"ecx");
+ #&mov ($E,&DWP(16,"esp"));
+ &mov ("edi",&DWP(20,"esp"));
+ &mov ("ebx",&DWP(24,"esp"));
+ &mov ("ecx",&DWP(28,"esp"));
+ &add ($E,&DWP(16,"esi"));
+ &add ("edi",&DWP(20,"esi"));
+ &add ("ebx",&DWP(24,"esi"));
+ &add ("ecx",&DWP(28,"esi"));
+ &mov (&DWP(16,"esi"),$E);
+ &mov (&DWP(20,"esi"),"edi");
+ &mov (&DWP(24,"esi"),"ebx");
+ &mov (&DWP(28,"esi"),"ecx");
+ #&mov (&DWP(16,"esp"),$E);
+ &mov (&DWP(20,"esp"),"edi");
+ &mov ("edi",&DWP(96+4,"esp")); # inp
+ &mov (&DWP(24,"esp"),"ebx");
+ &mov (&DWP(28,"esp"),"ecx");
+
+ &cmp ("edi",&DWP(96+8,"esp")); # are we done yet?
+ &jb (&label("grand_loop"));
+
+ &mov ("esp",&DWP(96+12,"esp")); # restore sp
+&function_end_A();
+}
+ if (!$i386 && $xmm) {{{
+my @X = map("xmm$_",(0..3));
+my ($t0,$t1,$t2,$t3) = map("xmm$_",(4..7));
+my @AH = ($A,$T);
+
+&set_label("SSSE3",32);
+ &lea ("esp",&DWP(-96,"esp"));
+ # copy ctx->h[0-7] to A,B,C,D,E,F,G,H on stack
+ &mov ($AH[0],&DWP(0,"esi"));
+ &mov ($AH[1],&DWP(4,"esi"));
+ &mov ("ecx",&DWP(8,"esi"));
+ &mov ("edi",&DWP(12,"esi"));
+ #&mov (&DWP(0,"esp"),$AH[0]);
+ &mov (&DWP(4,"esp"),$AH[1]);
+ &xor ($AH[1],"ecx"); # magic
+ &mov (&DWP(8,"esp"),"ecx");
+ &mov (&DWP(12,"esp"),"edi");
+ &mov ($E,&DWP(16,"esi"));
+ &mov ("edi",&DWP(20,"esi"));
+ &mov ("ecx",&DWP(24,"esi"));
+ &mov ("esi",&DWP(28,"esi"));
+ #&mov (&DWP(16,"esp"),$E);
+ &mov (&DWP(20,"esp"),"edi");
+ &mov ("edi",&DWP(96+4,"esp")); # inp
+ &mov (&DWP(24,"esp"),"ecx");
+ &mov (&DWP(28,"esp"),"esi");
+ &movdqa ($t3,&QWP(256,$K256));
+ &jmp (&label("grand_ssse3"));
+
+&set_label("grand_ssse3",16);
+ # load input, reverse byte order, add K256[0..15], save to stack
+ &movdqu (@X[0],&QWP(0,"edi"));
+ &movdqu (@X[1],&QWP(16,"edi"));
+ &movdqu (@X[2],&QWP(32,"edi"));
+ &movdqu (@X[3],&QWP(48,"edi"));
+ &add ("edi",64);
+ &pshufb (@X[0],$t3);
+ &mov (&DWP(96+4,"esp"),"edi");
+ &pshufb (@X[1],$t3);
+ &movdqa ($t0,&QWP(0,$K256));
+ &pshufb (@X[2],$t3);
+ &movdqa ($t1,&QWP(16,$K256));
+ &paddd ($t0,@X[0]);
+ &pshufb (@X[3],$t3);
+ &movdqa ($t2,&QWP(32,$K256));
+ &paddd ($t1,@X[1]);
+ &movdqa ($t3,&QWP(48,$K256));
+ &movdqa (&QWP(32+0,"esp"),$t0);
+ &paddd ($t2,@X[2]);
+ &movdqa (&QWP(32+16,"esp"),$t1);
+ &paddd ($t3,@X[3]);
+ &movdqa (&QWP(32+32,"esp"),$t2);
+ &movdqa (&QWP(32+48,"esp"),$t3);
+ &jmp (&label("ssse3_00_47"));
+
+&set_label("ssse3_00_47",16);
+ &add ($K256,64);
+
+sub SSSE3_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body,&$body,&$body); # 120 instructions
+
+ eval(shift(@insns));
+ &movdqa ($t0,@X[1]);
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ &movdqa ($t3,@X[3]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &palignr ($t0,@X[0],4); # X[1..4]
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ &palignr ($t3,@X[2],4); # X[9..12]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa ($t1,$t0);
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ &movdqa ($t2,$t0);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrld ($t0,3);
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &paddd (@X[0],$t3); # X[0..3] += X[9..12]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrld ($t2,7);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ &pshufd ($t3,@X[3],0b11111010); # X[14..15]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pslld ($t1,32-18);
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &pxor ($t0,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrld ($t2,18-7);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &pxor ($t0,$t1);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pslld ($t1,18-7);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &pxor ($t0,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa ($t2,$t3);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &pxor ($t0,$t1); # sigma0(X[1..4])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrld ($t3,10);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &paddd (@X[0],$t0); # X[0..3] += sigma0(X[1..4])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrlq ($t2,17);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &pxor ($t3,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrlq ($t2,19-17);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &pxor ($t3,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pshufd ($t3,$t3,0b10000000);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ &psrldq ($t3,8);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &paddd (@X[0],$t3); # X[0..1] += sigma1(X[14..15])
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ &pshufd ($t3,@X[0],0b01010000); # X[16..17]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa ($t2,$t3);
+ eval(shift(@insns)); # @
+ &psrld ($t3,10);
+ eval(shift(@insns));
+ &psrlq ($t2,17);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &pxor ($t3,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrlq ($t2,19-17);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &pxor ($t3,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pshufd ($t3,$t3,0b00001000);
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &movdqa ($t2,&QWP(16*$j,$K256));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pslldq ($t3,8);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); # @
+ &paddd (@X[0],$t3); # X[2..3] += sigma1(X[16..17])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &paddd ($t2,@X[0]);
+ eval(shift(@insns)); # @
+
+ foreach (@insns) { eval; } # remaining instructions
+
+ &movdqa (&QWP(32+16*$j,"esp"),$t2);
+}
+
+sub body_00_15 () {
+ (
+ '&mov ("ecx",$E);',
+ '&ror ($E,25-11);',
+ '&mov ("esi",&off($f));',
+ '&xor ($E,"ecx");',
+ '&mov ("edi",&off($g));',
+ '&xor ("esi","edi");',
+ '&ror ($E,11-6);',
+ '&and ("esi","ecx");',
+ '&mov (&off($e),"ecx");', # save $E, modulo-scheduled
+ '&xor ($E,"ecx");',
+ '&xor ("edi","esi");', # Ch(e,f,g)
+ '&ror ($E,6);', # T = Sigma1(e)
+ '&mov ("ecx",$AH[0]);',
+ '&add ($E,"edi");', # T += Ch(e,f,g)
+ '&mov ("edi",&off($b));',
+ '&mov ("esi",$AH[0]);',
+
+ '&ror ("ecx",22-13);',
+ '&mov (&off($a),$AH[0]);', # save $A, modulo-scheduled
+ '&xor ("ecx",$AH[0]);',
+ '&xor ($AH[0],"edi");', # a ^= b, (b^c) in next round
+ '&add ($E,&off($h));', # T += h
+ '&ror ("ecx",13-2);',
+ '&and ($AH[1],$AH[0]);', # (b^c) &= (a^b)
+ '&xor ("ecx","esi");',
+ '&add ($E,&DWP(32+4*($i&15),"esp"));', # T += K[i]+X[i]
+ '&xor ($AH[1],"edi");', # h = Maj(a,b,c) = Ch(a^b,c,b)
+ '&ror ("ecx",2);', # Sigma0(a)
+
+ '&add ($AH[1],$E);', # h += T
+ '&add ($E,&off($d));', # d += T
+ '&add ($AH[1],"ecx");'. # h += Sigma0(a)
+
+ '@AH = reverse(@AH); $i++;' # rotate(a,h)
+ );
+}
+
+ for ($i=0,$j=0; $j<4; $j++) {
+ &SSSE3_00_47($j,\&body_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &cmp (&DWP(16*$j,$K256),0x00010203);
+ &jne (&label("ssse3_00_47"));
+
+ for ($i=0; $i<16; ) {
+ foreach(body_00_15()) { eval; }
+ }
+
+ &mov ("esi",&DWP(96,"esp")); #ctx
+ #&mov ($AH[0],&DWP(0,"esp"));
+ &xor ($AH[1],"edi"); #&mov ($AH[1],&DWP(4,"esp"));
+ #&mov ("edi", &DWP(8,"esp"));
+ &mov ("ecx",&DWP(12,"esp"));
+ &add ($AH[0],&DWP(0,"esi"));
+ &add ($AH[1],&DWP(4,"esi"));
+ &add ("edi",&DWP(8,"esi"));
+ &add ("ecx",&DWP(12,"esi"));
+ &mov (&DWP(0,"esi"),$AH[0]);
+ &mov (&DWP(4,"esi"),$AH[1]);
+ &mov (&DWP(8,"esi"),"edi");
+ &mov (&DWP(12,"esi"),"ecx");
+ #&mov (&DWP(0,"esp"),$AH[0]);
+ &mov (&DWP(4,"esp"),$AH[1]);
+ &xor ($AH[1],"edi"); # magic
+ &mov (&DWP(8,"esp"),"edi");
+ &mov (&DWP(12,"esp"),"ecx");
+ #&mov ($E,&DWP(16,"esp"));
+ &mov ("edi",&DWP(20,"esp"));
+ &mov ("ecx",&DWP(24,"esp"));
+ &add ($E,&DWP(16,"esi"));
+ &add ("edi",&DWP(20,"esi"));
+ &add ("ecx",&DWP(24,"esi"));
+ &mov (&DWP(16,"esi"),$E);
+ &mov (&DWP(20,"esi"),"edi");
+ &mov (&DWP(20,"esp"),"edi");
+ &mov ("edi",&DWP(28,"esp"));
+ &mov (&DWP(24,"esi"),"ecx");
+ #&mov (&DWP(16,"esp"),$E);
+ &add ("edi",&DWP(28,"esi"));
+ &mov (&DWP(24,"esp"),"ecx");
+ &mov (&DWP(28,"esi"),"edi");
+ &mov (&DWP(28,"esp"),"edi");
+ &mov ("edi",&DWP(96+4,"esp")); # inp
+
+ &movdqa ($t3,&QWP(64,$K256));
+ &sub ($K256,3*64); # rewind K
+ &cmp ("edi",&DWP(96+8,"esp")); # are we done yet?
+ &jb (&label("grand_ssse3"));
+
+ &mov ("esp",&DWP(96+12,"esp")); # restore sp
+&function_end_A();
+ if ($avx) {
+&set_label("AVX",32);
+ if ($avx>1) {
+ &mov ("edx",&DWP(8,"edx"));
+ &and ("edx",1<<8|1<<3); # check for BMI2+BMI1
+ &cmp ("edx",1<<8|1<<3);
+ &je (&label("AVX_BMI"));
+ }
+ &lea ("esp",&DWP(-96,"esp"));
+ &vzeroall ();
+ # copy ctx->h[0-7] to A,B,C,D,E,F,G,H on stack
+ &mov ($AH[0],&DWP(0,"esi"));
+ &mov ($AH[1],&DWP(4,"esi"));
+ &mov ("ecx",&DWP(8,"esi"));
+ &mov ("edi",&DWP(12,"esi"));
+ #&mov (&DWP(0,"esp"),$AH[0]);
+ &mov (&DWP(4,"esp"),$AH[1]);
+ &xor ($AH[1],"ecx"); # magic
+ &mov (&DWP(8,"esp"),"ecx");
+ &mov (&DWP(12,"esp"),"edi");
+ &mov ($E,&DWP(16,"esi"));
+ &mov ("edi",&DWP(20,"esi"));
+ &mov ("ecx",&DWP(24,"esi"));
+ &mov ("esi",&DWP(28,"esi"));
+ #&mov (&DWP(16,"esp"),$E);
+ &mov (&DWP(20,"esp"),"edi");
+ &mov ("edi",&DWP(96+4,"esp")); # inp
+ &mov (&DWP(24,"esp"),"ecx");
+ &mov (&DWP(28,"esp"),"esi");
+ &vmovdqa ($t3,&QWP(256,$K256));
+ &jmp (&label("grand_avx"));
+
+&set_label("grand_avx",32);
+ # load input, reverse byte order, add K256[0..15], save to stack
+ &vmovdqu (@X[0],&QWP(0,"edi"));
+ &vmovdqu (@X[1],&QWP(16,"edi"));
+ &vmovdqu (@X[2],&QWP(32,"edi"));
+ &vmovdqu (@X[3],&QWP(48,"edi"));
+ &add ("edi",64);
+ &vpshufb (@X[0],@X[0],$t3);
+ &mov (&DWP(96+4,"esp"),"edi");
+ &vpshufb (@X[1],@X[1],$t3);
+ &vpshufb (@X[2],@X[2],$t3);
+ &vpaddd ($t0,@X[0],&QWP(0,$K256));
+ &vpshufb (@X[3],@X[3],$t3);
+ &vpaddd ($t1,@X[1],&QWP(16,$K256));
+ &vpaddd ($t2,@X[2],&QWP(32,$K256));
+ &vpaddd ($t3,@X[3],&QWP(48,$K256));
+ &vmovdqa (&QWP(32+0,"esp"),$t0);
+ &vmovdqa (&QWP(32+16,"esp"),$t1);
+ &vmovdqa (&QWP(32+32,"esp"),$t2);
+ &vmovdqa (&QWP(32+48,"esp"),$t3);
+ &jmp (&label("avx_00_47"));
+
+&set_label("avx_00_47",16);
+ &add ($K256,64);
+
+sub Xupdate_AVX () {
+ (
+ '&vpalignr ($t0,@X[1],@X[0],4);', # X[1..4]
+ '&vpalignr ($t3,@X[3],@X[2],4);', # X[9..12]
+ '&vpsrld ($t2,$t0,7);',
+ '&vpaddd (@X[0],@X[0],$t3);', # X[0..3] += X[9..16]
+ '&vpsrld ($t3,$t0,3);',
+ '&vpslld ($t1,$t0,14);',
+ '&vpxor ($t0,$t3,$t2);',
+ '&vpshufd ($t3,@X[3],0b11111010)',# X[14..15]
+ '&vpsrld ($t2,$t2,18-7);',
+ '&vpxor ($t0,$t0,$t1);',
+ '&vpslld ($t1,$t1,25-14);',
+ '&vpxor ($t0,$t0,$t2);',
+ '&vpsrld ($t2,$t3,10);',
+ '&vpxor ($t0,$t0,$t1);', # sigma0(X[1..4])
+ '&vpsrlq ($t1,$t3,17);',
+ '&vpaddd (@X[0],@X[0],$t0);', # X[0..3] += sigma0(X[1..4])
+ '&vpxor ($t2,$t2,$t1);',
+ '&vpsrlq ($t3,$t3,19);',
+ '&vpxor ($t2,$t2,$t3);', # sigma1(X[14..15]
+ '&vpshufd ($t3,$t2,0b10000100);',
+ '&vpsrldq ($t3,$t3,8);',
+ '&vpaddd (@X[0],@X[0],$t3);', # X[0..1] += sigma1(X[14..15])
+ '&vpshufd ($t3,@X[0],0b01010000)',# X[16..17]
+ '&vpsrld ($t2,$t3,10);',
+ '&vpsrlq ($t1,$t3,17);',
+ '&vpxor ($t2,$t2,$t1);',
+ '&vpsrlq ($t3,$t3,19);',
+ '&vpxor ($t2,$t2,$t3);', # sigma1(X[16..17]
+ '&vpshufd ($t3,$t2,0b11101000);',
+ '&vpslldq ($t3,$t3,8);',
+ '&vpaddd (@X[0],@X[0],$t3);' # X[2..3] += sigma1(X[16..17])
+ );
+}
+
+local *ror = sub { &shrd(@_[0],@_) };
+sub AVX_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body,&$body,&$body); # 120 instructions
+my $insn;
+
+ foreach (Xupdate_AVX()) { # 31 instructions
+ eval;
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval($insn = shift(@insns));
+ eval(shift(@insns)) if ($insn =~ /rorx/ && @insns[0] =~ /rorx/);
+ }
+ &vpaddd ($t2,@X[0],&QWP(16*$j,$K256));
+ foreach (@insns) { eval; } # remaining instructions
+ &vmovdqa (&QWP(32+16*$j,"esp"),$t2);
+}
+
+ for ($i=0,$j=0; $j<4; $j++) {
+ &AVX_00_47($j,\&body_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &cmp (&DWP(16*$j,$K256),0x00010203);
+ &jne (&label("avx_00_47"));
+
+ for ($i=0; $i<16; ) {
+ foreach(body_00_15()) { eval; }
+ }
+
+ &mov ("esi",&DWP(96,"esp")); #ctx
+ #&mov ($AH[0],&DWP(0,"esp"));
+ &xor ($AH[1],"edi"); #&mov ($AH[1],&DWP(4,"esp"));
+ #&mov ("edi", &DWP(8,"esp"));
+ &mov ("ecx",&DWP(12,"esp"));
+ &add ($AH[0],&DWP(0,"esi"));
+ &add ($AH[1],&DWP(4,"esi"));
+ &add ("edi",&DWP(8,"esi"));
+ &add ("ecx",&DWP(12,"esi"));
+ &mov (&DWP(0,"esi"),$AH[0]);
+ &mov (&DWP(4,"esi"),$AH[1]);
+ &mov (&DWP(8,"esi"),"edi");
+ &mov (&DWP(12,"esi"),"ecx");
+ #&mov (&DWP(0,"esp"),$AH[0]);
+ &mov (&DWP(4,"esp"),$AH[1]);
+ &xor ($AH[1],"edi"); # magic
+ &mov (&DWP(8,"esp"),"edi");
+ &mov (&DWP(12,"esp"),"ecx");
+ #&mov ($E,&DWP(16,"esp"));
+ &mov ("edi",&DWP(20,"esp"));
+ &mov ("ecx",&DWP(24,"esp"));
+ &add ($E,&DWP(16,"esi"));
+ &add ("edi",&DWP(20,"esi"));
+ &add ("ecx",&DWP(24,"esi"));
+ &mov (&DWP(16,"esi"),$E);
+ &mov (&DWP(20,"esi"),"edi");
+ &mov (&DWP(20,"esp"),"edi");
+ &mov ("edi",&DWP(28,"esp"));
+ &mov (&DWP(24,"esi"),"ecx");
+ #&mov (&DWP(16,"esp"),$E);
+ &add ("edi",&DWP(28,"esi"));
+ &mov (&DWP(24,"esp"),"ecx");
+ &mov (&DWP(28,"esi"),"edi");
+ &mov (&DWP(28,"esp"),"edi");
+ &mov ("edi",&DWP(96+4,"esp")); # inp
+
+ &vmovdqa ($t3,&QWP(64,$K256));
+ &sub ($K256,3*64); # rewind K
+ &cmp ("edi",&DWP(96+8,"esp")); # are we done yet?
+ &jb (&label("grand_avx"));
+
+ &mov ("esp",&DWP(96+12,"esp")); # restore sp
+ &vzeroall ();
+&function_end_A();
+ if ($avx>1) {
+sub bodyx_00_15 () { # +10%
+ (
+ '&rorx ("ecx",$E,6)',
+ '&rorx ("esi",$E,11)',
+ '&mov (&off($e),$E)', # save $E, modulo-scheduled
+ '&rorx ("edi",$E,25)',
+ '&xor ("ecx","esi")',
+ '&andn ("esi",$E,&off($g))',
+ '&xor ("ecx","edi")', # Sigma1(e)
+ '&and ($E,&off($f))',
+ '&mov (&off($a),$AH[0]);', # save $A, modulo-scheduled
+ '&or ($E,"esi")', # T = Ch(e,f,g)
+
+ '&rorx ("edi",$AH[0],2)',
+ '&rorx ("esi",$AH[0],13)',
+ '&lea ($E,&DWP(0,$E,"ecx"))', # T += Sigma1(e)
+ '&rorx ("ecx",$AH[0],22)',
+ '&xor ("esi","edi")',
+ '&mov ("edi",&off($b))',
+ '&xor ("ecx","esi")', # Sigma0(a)
+
+ '&xor ($AH[0],"edi")', # a ^= b, (b^c) in next round
+ '&add ($E,&off($h))', # T += h
+ '&and ($AH[1],$AH[0])', # (b^c) &= (a^b)
+ '&add ($E,&DWP(32+4*($i&15),"esp"))', # T += K[i]+X[i]
+ '&xor ($AH[1],"edi")', # h = Maj(a,b,c) = Ch(a^b,c,b)
+
+ '&add ("ecx",$E)', # h += T
+ '&add ($E,&off($d))', # d += T
+ '&lea ($AH[1],&DWP(0,$AH[1],"ecx"));'. # h += Sigma0(a)
+
+ '@AH = reverse(@AH); $i++;' # rotate(a,h)
+ );
+}
+
+&set_label("AVX_BMI",32);
+ &lea ("esp",&DWP(-96,"esp"));
+ &vzeroall ();
+ # copy ctx->h[0-7] to A,B,C,D,E,F,G,H on stack
+ &mov ($AH[0],&DWP(0,"esi"));
+ &mov ($AH[1],&DWP(4,"esi"));
+ &mov ("ecx",&DWP(8,"esi"));
+ &mov ("edi",&DWP(12,"esi"));
+ #&mov (&DWP(0,"esp"),$AH[0]);
+ &mov (&DWP(4,"esp"),$AH[1]);
+ &xor ($AH[1],"ecx"); # magic
+ &mov (&DWP(8,"esp"),"ecx");
+ &mov (&DWP(12,"esp"),"edi");
+ &mov ($E,&DWP(16,"esi"));
+ &mov ("edi",&DWP(20,"esi"));
+ &mov ("ecx",&DWP(24,"esi"));
+ &mov ("esi",&DWP(28,"esi"));
+ #&mov (&DWP(16,"esp"),$E);
+ &mov (&DWP(20,"esp"),"edi");
+ &mov ("edi",&DWP(96+4,"esp")); # inp
+ &mov (&DWP(24,"esp"),"ecx");
+ &mov (&DWP(28,"esp"),"esi");
+ &vmovdqa ($t3,&QWP(256,$K256));
+ &jmp (&label("grand_avx_bmi"));
+
+&set_label("grand_avx_bmi",32);
+ # load input, reverse byte order, add K256[0..15], save to stack
+ &vmovdqu (@X[0],&QWP(0,"edi"));
+ &vmovdqu (@X[1],&QWP(16,"edi"));
+ &vmovdqu (@X[2],&QWP(32,"edi"));
+ &vmovdqu (@X[3],&QWP(48,"edi"));
+ &add ("edi",64);
+ &vpshufb (@X[0],@X[0],$t3);
+ &mov (&DWP(96+4,"esp"),"edi");
+ &vpshufb (@X[1],@X[1],$t3);
+ &vpshufb (@X[2],@X[2],$t3);
+ &vpaddd ($t0,@X[0],&QWP(0,$K256));
+ &vpshufb (@X[3],@X[3],$t3);
+ &vpaddd ($t1,@X[1],&QWP(16,$K256));
+ &vpaddd ($t2,@X[2],&QWP(32,$K256));
+ &vpaddd ($t3,@X[3],&QWP(48,$K256));
+ &vmovdqa (&QWP(32+0,"esp"),$t0);
+ &vmovdqa (&QWP(32+16,"esp"),$t1);
+ &vmovdqa (&QWP(32+32,"esp"),$t2);
+ &vmovdqa (&QWP(32+48,"esp"),$t3);
+ &jmp (&label("avx_bmi_00_47"));
+
+&set_label("avx_bmi_00_47",16);
+ &add ($K256,64);
+
+ for ($i=0,$j=0; $j<4; $j++) {
+ &AVX_00_47($j,\&bodyx_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &cmp (&DWP(16*$j,$K256),0x00010203);
+ &jne (&label("avx_bmi_00_47"));
+
+ for ($i=0; $i<16; ) {
+ foreach(bodyx_00_15()) { eval; }
+ }
+
+ &mov ("esi",&DWP(96,"esp")); #ctx
+ #&mov ($AH[0],&DWP(0,"esp"));
+ &xor ($AH[1],"edi"); #&mov ($AH[1],&DWP(4,"esp"));
+ #&mov ("edi", &DWP(8,"esp"));
+ &mov ("ecx",&DWP(12,"esp"));
+ &add ($AH[0],&DWP(0,"esi"));
+ &add ($AH[1],&DWP(4,"esi"));
+ &add ("edi",&DWP(8,"esi"));
+ &add ("ecx",&DWP(12,"esi"));
+ &mov (&DWP(0,"esi"),$AH[0]);
+ &mov (&DWP(4,"esi"),$AH[1]);
+ &mov (&DWP(8,"esi"),"edi");
+ &mov (&DWP(12,"esi"),"ecx");
+ #&mov (&DWP(0,"esp"),$AH[0]);
+ &mov (&DWP(4,"esp"),$AH[1]);
+ &xor ($AH[1],"edi"); # magic
+ &mov (&DWP(8,"esp"),"edi");
+ &mov (&DWP(12,"esp"),"ecx");
+ #&mov ($E,&DWP(16,"esp"));
+ &mov ("edi",&DWP(20,"esp"));
+ &mov ("ecx",&DWP(24,"esp"));
+ &add ($E,&DWP(16,"esi"));
+ &add ("edi",&DWP(20,"esi"));
+ &add ("ecx",&DWP(24,"esi"));
+ &mov (&DWP(16,"esi"),$E);
+ &mov (&DWP(20,"esi"),"edi");
+ &mov (&DWP(20,"esp"),"edi");
+ &mov ("edi",&DWP(28,"esp"));
+ &mov (&DWP(24,"esi"),"ecx");
+ #&mov (&DWP(16,"esp"),$E);
+ &add ("edi",&DWP(28,"esi"));
+ &mov (&DWP(24,"esp"),"ecx");
+ &mov (&DWP(28,"esi"),"edi");
+ &mov (&DWP(28,"esp"),"edi");
+ &mov ("edi",&DWP(96+4,"esp")); # inp
+
+ &vmovdqa ($t3,&QWP(64,$K256));
+ &sub ($K256,3*64); # rewind K
+ &cmp ("edi",&DWP(96+8,"esp")); # are we done yet?
+ &jb (&label("grand_avx_bmi"));
+
+ &mov ("esp",&DWP(96+12,"esp")); # restore sp
+ &vzeroall ();
+&function_end_A();
+ }
+ }
+ }}}
+&function_end_B("sha256_block_data_order");
+
+&asm_finish();
diff --git a/devel/perlasm/sha512-ssse3-x86.pl b/devel/perlasm/sha512-ssse3-x86.pl
new file mode 100644
index 0000000000..9fc792964f
--- /dev/null
+++ b/devel/perlasm/sha512-ssse3-x86.pl
@@ -0,0 +1,910 @@
+#!/usr/bin/env perl
+#
+# ====================================================================
+# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
+# project. The module is, however, dual licensed under OpenSSL and
+# CRYPTOGAMS licenses depending on where you obtain it. For further
+# details see http://www.openssl.org/~appro/cryptogams/.
+# ====================================================================
+#
+# SHA512 block transform for x86. September 2007.
+#
+# May 2013.
+#
+# Add SSSE3 code path, 20-25% improvement [over original SSE2 code].
+#
+# Performance in clock cycles per processed byte (less is better):
+#
+# gcc icc x86 asm SIMD(*) x86_64(**)
+# Pentium 100 97 61 - -
+# PIII 75 77 56 - -
+# P4 116 95 82 34.6 30.8
+# AMD K8 54 55 36 20.7 9.57
+# Core2 66 57 40 15.9 9.97
+# Westmere 70 - 38 12.2 9.58
+# Sandy Bridge 58 - 35 11.9 11.2
+# Ivy Bridge 50 - 33 11.5 8.17
+# Haswell 46 - 29 11.3 7.66
+# Bulldozer 121 - 50 14.0 13.5
+# VIA Nano 91 - 52 33 14.7
+# Atom 126 - 68 48(***) 14.7
+#
+# (*) whichever best applicable.
+# (**) x86_64 assembler performance is presented for reference
+# purposes, the results are for integer-only code.
+# (***) paddq is increadibly slow on Atom.
+#
+# IALU code-path is optimized for elder Pentiums. On vanilla Pentium
+# performance improvement over compiler generated code reaches ~60%,
+# while on PIII - ~35%. On newer µ-archs improvement varies from 15%
+# to 50%, but it's less important as they are expected to execute SSE2
+# code-path, which is commonly ~2-3x faster [than compiler generated
+# code]. SSE2 code-path is as fast as original sha512-sse2.pl, even
+# though it does not use 128-bit operations. The latter means that
+# SSE2-aware kernel is no longer required to execute the code. Another
+# difference is that new code optimizes amount of writes, but at the
+# cost of increased data cache "footprint" by 1/2KB.
+
+$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
+push(@INC,"${dir}","${dir}../../perlasm");
+require "x86asm.pl";
+
+&asm_init($ARGV[0],"sha512-586.pl",$ARGV[$#ARGV] eq "386");
+
+$sse2=0;
+for (@ARGV) { $sse2=1 if (/-DOPENSSL_IA32_SSE2/); }
+
+&external_label("OPENSSL_ia32cap_P") if ($sse2);
+
+$Tlo=&DWP(0,"esp"); $Thi=&DWP(4,"esp");
+$Alo=&DWP(8,"esp"); $Ahi=&DWP(8+4,"esp");
+$Blo=&DWP(16,"esp"); $Bhi=&DWP(16+4,"esp");
+$Clo=&DWP(24,"esp"); $Chi=&DWP(24+4,"esp");
+$Dlo=&DWP(32,"esp"); $Dhi=&DWP(32+4,"esp");
+$Elo=&DWP(40,"esp"); $Ehi=&DWP(40+4,"esp");
+$Flo=&DWP(48,"esp"); $Fhi=&DWP(48+4,"esp");
+$Glo=&DWP(56,"esp"); $Ghi=&DWP(56+4,"esp");
+$Hlo=&DWP(64,"esp"); $Hhi=&DWP(64+4,"esp");
+$K512="ebp";
+
+$Asse2=&QWP(0,"esp");
+$Bsse2=&QWP(8,"esp");
+$Csse2=&QWP(16,"esp");
+$Dsse2=&QWP(24,"esp");
+$Esse2=&QWP(32,"esp");
+$Fsse2=&QWP(40,"esp");
+$Gsse2=&QWP(48,"esp");
+$Hsse2=&QWP(56,"esp");
+
+$A="mm0"; # B-D and
+$E="mm4"; # F-H are commonly loaded to respectively mm1-mm3 and
+ # mm5-mm7, but it's done on on-demand basis...
+$BxC="mm2"; # ... except for B^C
+
+sub BODY_00_15_sse2 {
+ my $phase=shift;
+
+ #&movq ("mm5",$Fsse2); # load f
+ #&movq ("mm6",$Gsse2); # load g
+
+ &movq ("mm1",$E); # %mm1 is sliding right
+ &pxor ("mm5","mm6"); # f^=g
+ &psrlq ("mm1",14);
+ &movq ($Esse2,$E); # modulo-scheduled save e
+ &pand ("mm5",$E); # f&=e
+ &psllq ($E,23); # $E is sliding left
+ &movq ($A,"mm3") if ($phase<2);
+ &movq (&QWP(8*9,"esp"),"mm7") # save X[i]
+ &movq ("mm3","mm1"); # %mm3 is T1
+ &psrlq ("mm1",4);
+ &pxor ("mm5","mm6"); # Ch(e,f,g)
+ &pxor ("mm3",$E);
+ &psllq ($E,23);
+ &pxor ("mm3","mm1");
+ &movq ($Asse2,$A); # modulo-scheduled save a
+ &paddq ("mm7","mm5"); # X[i]+=Ch(e,f,g)
+ &pxor ("mm3",$E);
+ &psrlq ("mm1",23);
+ &paddq ("mm7",$Hsse2); # X[i]+=h
+ &pxor ("mm3","mm1");
+ &psllq ($E,4);
+ &paddq ("mm7",QWP(0,$K512)); # X[i]+=K512[i]
+ &pxor ("mm3",$E); # T1=Sigma1_512(e)
+
+ &movq ($E,$Dsse2); # e = load d, e in next round
+ &paddq ("mm3","mm7"); # T1+=X[i]
+ &movq ("mm5",$A); # %mm5 is sliding right
+ &psrlq ("mm5",28);
+ &paddq ($E,"mm3"); # d += T1
+ &movq ("mm6",$A); # %mm6 is sliding left
+ &movq ("mm7","mm5");
+ &psllq ("mm6",25);
+ &movq ("mm1",$Bsse2); # load b
+ &psrlq ("mm5",6);
+ &pxor ("mm7","mm6");
+ &sub ("esp",8);
+ &psllq ("mm6",5);
+ &pxor ("mm7","mm5");
+ &pxor ($A,"mm1"); # a^b, b^c in next round
+ &psrlq ("mm5",5);
+ &pxor ("mm7","mm6");
+ &pand ($BxC,$A); # (b^c)&(a^b)
+ &psllq ("mm6",6);
+ &pxor ("mm7","mm5");
+ &pxor ($BxC,"mm1"); # [h=]Maj(a,b,c)
+ &pxor ("mm6","mm7"); # Sigma0_512(a)
+ &movq ("mm7",&QWP(8*(9+16-1),"esp")) if ($phase!=0); # pre-fetch
+ &movq ("mm5",$Fsse2) if ($phase==0); # load f
+
+ if ($phase>1) {
+ &paddq ($BxC,"mm6"); # h+=Sigma0(a)
+ &add ($K512,8);
+ #&paddq ($BxC,"mm3"); # h+=T1
+
+ ($A,$BxC) = ($BxC,$A); # rotate registers
+ } else {
+ &paddq ("mm3",$BxC); # T1+=Maj(a,b,c)
+ &movq ($BxC,$A);
+ &add ($K512,8);
+ &paddq ("mm3","mm6"); # T1+=Sigma0(a)
+ &movq ("mm6",$Gsse2) if ($phase==0); # load g
+ #&movq ($A,"mm3"); # h=T1
+ }
+}
+
+sub BODY_00_15_x86 {
+ #define Sigma1(x) (ROTR((x),14) ^ ROTR((x),18) ^ ROTR((x),41))
+ # LO lo>>14^hi<<18 ^ lo>>18^hi<<14 ^ hi>>9^lo<<23
+ # HI hi>>14^lo<<18 ^ hi>>18^lo<<14 ^ lo>>9^hi<<23
+ &mov ("ecx",$Elo);
+ &mov ("edx",$Ehi);
+ &mov ("esi","ecx");
+
+ &shr ("ecx",9); # lo>>9
+ &mov ("edi","edx");
+ &shr ("edx",9); # hi>>9
+ &mov ("ebx","ecx");
+ &shl ("esi",14); # lo<<14
+ &mov ("eax","edx");
+ &shl ("edi",14); # hi<<14
+ &xor ("ebx","esi");
+
+ &shr ("ecx",14-9); # lo>>14
+ &xor ("eax","edi");
+ &shr ("edx",14-9); # hi>>14
+ &xor ("eax","ecx");
+ &shl ("esi",18-14); # lo<<18
+ &xor ("ebx","edx");
+ &shl ("edi",18-14); # hi<<18
+ &xor ("ebx","esi");
+
+ &shr ("ecx",18-14); # lo>>18
+ &xor ("eax","edi");
+ &shr ("edx",18-14); # hi>>18
+ &xor ("eax","ecx");
+ &shl ("esi",23-18); # lo<<23
+ &xor ("ebx","edx");
+ &shl ("edi",23-18); # hi<<23
+ &xor ("eax","esi");
+ &xor ("ebx","edi"); # T1 = Sigma1(e)
+
+ &mov ("ecx",$Flo);
+ &mov ("edx",$Fhi);
+ &mov ("esi",$Glo);
+ &mov ("edi",$Ghi);
+ &add ("eax",$Hlo);
+ &adc ("ebx",$Hhi); # T1 += h
+ &xor ("ecx","esi");
+ &xor ("edx","edi");
+ &and ("ecx",$Elo);
+ &and ("edx",$Ehi);
+ &add ("eax",&DWP(8*(9+15)+0,"esp"));
+ &adc ("ebx",&DWP(8*(9+15)+4,"esp")); # T1 += X[0]
+ &xor ("ecx","esi");
+ &xor ("edx","edi"); # Ch(e,f,g) = (f^g)&e)^g
+
+ &mov ("esi",&DWP(0,$K512));
+ &mov ("edi",&DWP(4,$K512)); # K[i]
+ &add ("eax","ecx");
+ &adc ("ebx","edx"); # T1 += Ch(e,f,g)
+ &mov ("ecx",$Dlo);
+ &mov ("edx",$Dhi);
+ &add ("eax","esi");
+ &adc ("ebx","edi"); # T1 += K[i]
+ &mov ($Tlo,"eax");
+ &mov ($Thi,"ebx"); # put T1 away
+ &add ("eax","ecx");
+ &adc ("ebx","edx"); # d += T1
+
+ #define Sigma0(x) (ROTR((x),28) ^ ROTR((x),34) ^ ROTR((x),39))
+ # LO lo>>28^hi<<4 ^ hi>>2^lo<<30 ^ hi>>7^lo<<25
+ # HI hi>>28^lo<<4 ^ lo>>2^hi<<30 ^ lo>>7^hi<<25
+ &mov ("ecx",$Alo);
+ &mov ("edx",$Ahi);
+ &mov ($Dlo,"eax");
+ &mov ($Dhi,"ebx");
+ &mov ("esi","ecx");
+
+ &shr ("ecx",2); # lo>>2
+ &mov ("edi","edx");
+ &shr ("edx",2); # hi>>2
+ &mov ("ebx","ecx");
+ &shl ("esi",4); # lo<<4
+ &mov ("eax","edx");
+ &shl ("edi",4); # hi<<4
+ &xor ("ebx","esi");
+
+ &shr ("ecx",7-2); # lo>>7
+ &xor ("eax","edi");
+ &shr ("edx",7-2); # hi>>7
+ &xor ("ebx","ecx");
+ &shl ("esi",25-4); # lo<<25
+ &xor ("eax","edx");
+ &shl ("edi",25-4); # hi<<25
+ &xor ("eax","esi");
+
+ &shr ("ecx",28-7); # lo>>28
+ &xor ("ebx","edi");
+ &shr ("edx",28-7); # hi>>28
+ &xor ("eax","ecx");
+ &shl ("esi",30-25); # lo<<30
+ &xor ("ebx","edx");
+ &shl ("edi",30-25); # hi<<30
+ &xor ("eax","esi");
+ &xor ("ebx","edi"); # Sigma0(a)
+
+ &mov ("ecx",$Alo);
+ &mov ("edx",$Ahi);
+ &mov ("esi",$Blo);
+ &mov ("edi",$Bhi);
+ &add ("eax",$Tlo);
+ &adc ("ebx",$Thi); # T1 = Sigma0(a)+T1
+ &or ("ecx","esi");
+ &or ("edx","edi");
+ &and ("ecx",$Clo);
+ &and ("edx",$Chi);
+ &and ("esi",$Alo);
+ &and ("edi",$Ahi);
+ &or ("ecx","esi");
+ &or ("edx","edi"); # Maj(a,b,c) = ((a|b)&c)|(a&b)
+
+ &add ("eax","ecx");
+ &adc ("ebx","edx"); # T1 += Maj(a,b,c)
+ &mov ($Tlo,"eax");
+ &mov ($Thi,"ebx");
+
+ &mov (&LB("edx"),&BP(0,$K512)); # pre-fetch LSB of *K
+ &sub ("esp",8);
+ &lea ($K512,&DWP(8,$K512)); # K++
+}
+
+
+&function_begin("sha512_block_data_order");
+ &mov ("esi",wparam(0)); # ctx
+ &mov ("edi",wparam(1)); # inp
+ &mov ("eax",wparam(2)); # num
+ &mov ("ebx","esp"); # saved sp
+
+ &call (&label("pic_point")); # make it PIC!
+&set_label("pic_point");
+ &blindpop($K512);
+ &lea ($K512,&DWP(&label("K512")."-".&label("pic_point"),$K512));
+
+ &sub ("esp",16);
+ &and ("esp",-64);
+
+ &shl ("eax",7);
+ &add ("eax","edi");
+ &mov (&DWP(0,"esp"),"esi"); # ctx
+ &mov (&DWP(4,"esp"),"edi"); # inp
+ &mov (&DWP(8,"esp"),"eax"); # inp+num*128
+ &mov (&DWP(12,"esp"),"ebx"); # saved sp
+
+if ($sse2) {
+ &picmeup("edx","OPENSSL_ia32cap_P",$K512,&label("K512"));
+ &mov ("ecx",&DWP(0,"edx"));
+ &test ("ecx",1<<26);
+ &jz (&label("loop_x86"));
+
+ &mov ("edx",&DWP(4,"edx"));
+
+ # load ctx->h[0-7]
+ &movq ($A,&QWP(0,"esi"));
+ &and ("ecx",1<<24); # XMM registers availability
+ &movq ("mm1",&QWP(8,"esi"));
+ &and ("edx",1<<9); # SSSE3 bit
+ &movq ($BxC,&QWP(16,"esi"));
+ &or ("ecx","edx");
+ &movq ("mm3",&QWP(24,"esi"));
+ &movq ($E,&QWP(32,"esi"));
+ &movq ("mm5",&QWP(40,"esi"));
+ &movq ("mm6",&QWP(48,"esi"));
+ &movq ("mm7",&QWP(56,"esi"));
+ &cmp ("ecx",1<<24|1<<9);
+ &je (&label("SSSE3"));
+ &sub ("esp",8*10);
+ &jmp (&label("loop_sse2"));
+
+&set_label("loop_sse2",16);
+ #&movq ($Asse2,$A);
+ &movq ($Bsse2,"mm1");
+ &movq ($Csse2,$BxC);
+ &movq ($Dsse2,"mm3");
+ #&movq ($Esse2,$E);
+ &movq ($Fsse2,"mm5");
+ &movq ($Gsse2,"mm6");
+ &pxor ($BxC,"mm1"); # magic
+ &movq ($Hsse2,"mm7");
+ &movq ("mm3",$A); # magic
+
+ &mov ("eax",&DWP(0,"edi"));
+ &mov ("ebx",&DWP(4,"edi"));
+ &add ("edi",8);
+ &mov ("edx",15); # counter
+ &bswap ("eax");
+ &bswap ("ebx");
+ &jmp (&label("00_14_sse2"));
+
+&set_label("00_14_sse2",16);
+ &movd ("mm1","eax");
+ &mov ("eax",&DWP(0,"edi"));
+ &movd ("mm7","ebx");
+ &mov ("ebx",&DWP(4,"edi"));
+ &add ("edi",8);
+ &bswap ("eax");
+ &bswap ("ebx");
+ &punpckldq("mm7","mm1");
+
+ &BODY_00_15_sse2();
+
+ &dec ("edx");
+ &jnz (&label("00_14_sse2"));
+
+ &movd ("mm1","eax");
+ &movd ("mm7","ebx");
+ &punpckldq("mm7","mm1");
+
+ &BODY_00_15_sse2(1);
+
+ &pxor ($A,$A); # A is in %mm3
+ &mov ("edx",32); # counter
+ &jmp (&label("16_79_sse2"));
+
+&set_label("16_79_sse2",16);
+ for ($j=0;$j<2;$j++) { # 2x unroll
+ #&movq ("mm7",&QWP(8*(9+16-1),"esp")); # prefetched in BODY_00_15
+ &movq ("mm5",&QWP(8*(9+16-14),"esp"));
+ &movq ("mm1","mm7");
+ &psrlq ("mm7",1);
+ &movq ("mm6","mm5");
+ &psrlq ("mm5",6);
+ &psllq ("mm1",56);
+ &paddq ($A,"mm3"); # from BODY_00_15
+ &movq ("mm3","mm7");
+ &psrlq ("mm7",7-1);
+ &pxor ("mm3","mm1");
+ &psllq ("mm1",63-56);
+ &pxor ("mm3","mm7");
+ &psrlq ("mm7",8-7);
+ &pxor ("mm3","mm1");
+ &movq ("mm1","mm5");
+ &psrlq ("mm5",19-6);
+ &pxor ("mm7","mm3"); # sigma0
+
+ &psllq ("mm6",3);
+ &pxor ("mm1","mm5");
+ &paddq ("mm7",&QWP(8*(9+16),"esp"));
+ &pxor ("mm1","mm6");
+ &psrlq ("mm5",61-19);
+ &paddq ("mm7",&QWP(8*(9+16-9),"esp"));
+ &pxor ("mm1","mm5");
+ &psllq ("mm6",45-3);
+ &movq ("mm5",$Fsse2); # load f
+ &pxor ("mm1","mm6"); # sigma1
+ &movq ("mm6",$Gsse2); # load g
+
+ &paddq ("mm7","mm1"); # X[i]
+ #&movq (&QWP(8*9,"esp"),"mm7"); # moved to BODY_00_15
+
+ &BODY_00_15_sse2(2);
+ }
+ &dec ("edx");
+ &jnz (&label("16_79_sse2"));
+
+ #&movq ($A,$Asse2);
+ &paddq ($A,"mm3"); # from BODY_00_15
+ &movq ("mm1",$Bsse2);
+ #&movq ($BxC,$Csse2);
+ &movq ("mm3",$Dsse2);
+ #&movq ($E,$Esse2);
+ &movq ("mm5",$Fsse2);
+ &movq ("mm6",$Gsse2);
+ &movq ("mm7",$Hsse2);
+
+ &pxor ($BxC,"mm1"); # de-magic
+ &paddq ($A,&QWP(0,"esi"));
+ &paddq ("mm1",&QWP(8,"esi"));
+ &paddq ($BxC,&QWP(16,"esi"));
+ &paddq ("mm3",&QWP(24,"esi"));
+ &paddq ($E,&QWP(32,"esi"));
+ &paddq ("mm5",&QWP(40,"esi"));
+ &paddq ("mm6",&QWP(48,"esi"));
+ &paddq ("mm7",&QWP(56,"esi"));
+
+ &mov ("eax",8*80);
+ &movq (&QWP(0,"esi"),$A);
+ &movq (&QWP(8,"esi"),"mm1");
+ &movq (&QWP(16,"esi"),$BxC);
+ &movq (&QWP(24,"esi"),"mm3");
+ &movq (&QWP(32,"esi"),$E);
+ &movq (&QWP(40,"esi"),"mm5");
+ &movq (&QWP(48,"esi"),"mm6");
+ &movq (&QWP(56,"esi"),"mm7");
+
+ &lea ("esp",&DWP(0,"esp","eax")); # destroy frame
+ &sub ($K512,"eax"); # rewind K
+
+ &cmp ("edi",&DWP(8*10+8,"esp")); # are we done yet?
+ &jb (&label("loop_sse2"));
+
+ &mov ("esp",&DWP(8*10+12,"esp")); # restore sp
+ &emms ();
+&function_end_A();
+
+&set_label("SSSE3",32);
+{ my ($cnt,$frame)=("ecx","edx");
+ my @X=map("xmm$_",(0..7));
+ my $j;
+ my $i=0;
+
+ &lea ($frame,&DWP(-64,"esp"));
+ &sub ("esp",256);
+
+ # fixed stack frame layout
+ #
+ # +0 A B C D E F G H # backing store
+ # +64 X[0]+K[i] .. X[15]+K[i] # XMM->MM xfer area
+ # +192 # XMM off-load ring buffer
+ # +256 # saved parameters
+
+ &movdqa (@X[1],&QWP(80*8,$K512)); # byte swap mask
+ &movdqu (@X[0],&QWP(0,"edi"));
+ &pshufb (@X[0],@X[1]);
+ for ($j=0;$j<8;$j++) {
+ &movdqa (&QWP(16*(($j-1)%4),$frame),@X[3]) if ($j>4); # off-load
+ &movdqa (@X[3],&QWP(16*($j%8),$K512));
+ &movdqa (@X[2],@X[1]) if ($j<7); # perpetuate byte swap mask
+ &movdqu (@X[1],&QWP(16*($j+1),"edi")) if ($j<7); # next input
+ &movdqa (@X[1],&QWP(16*(($j+1)%4),$frame)) if ($j==7);# restore @X[0]
+ &paddq (@X[3],@X[0]);
+ &pshufb (@X[1],@X[2]) if ($j<7);
+ &movdqa (&QWP(16*($j%8)-128,$frame),@X[3]); # xfer X[i]+K[i]
+
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ #&jmp (&label("loop_ssse3"));
+ &nop ();
+
+&set_label("loop_ssse3",32);
+ &movdqa (@X[2],&QWP(16*(($j+1)%4),$frame)); # pre-restore @X[1]
+ &movdqa (&QWP(16*(($j-1)%4),$frame),@X[3]); # off-load @X[3]
+ &lea ($K512,&DWP(16*8,$K512));
+
+ #&movq ($Asse2,$A); # off-load A-H
+ &movq ($Bsse2,"mm1");
+ &mov ("ebx","edi");
+ &movq ($Csse2,$BxC);
+ &lea ("edi",&DWP(128,"edi")); # advance input
+ &movq ($Dsse2,"mm3");
+ &cmp ("edi","eax");
+ #&movq ($Esse2,$E);
+ &movq ($Fsse2,"mm5");
+ &cmovb ("ebx","edi");
+ &movq ($Gsse2,"mm6");
+ &mov ("ecx",4); # loop counter
+ &pxor ($BxC,"mm1"); # magic
+ &movq ($Hsse2,"mm7");
+ &pxor ("mm3","mm3"); # magic
+
+ &jmp (&label("00_47_ssse3"));
+
+sub BODY_00_15_ssse3 { # "phase-less" copy of BODY_00_15_sse2
+ (
+ '&movq ("mm1",$E)', # %mm1 is sliding right
+ '&movq ("mm7",&QWP(((-8*$i)%128)-128,$frame))',# X[i]+K[i]
+ '&pxor ("mm5","mm6")', # f^=g
+ '&psrlq ("mm1",14)',
+ '&movq (&QWP(8*($i+4)%64,"esp"),$E)', # modulo-scheduled save e
+ '&pand ("mm5",$E)', # f&=e
+ '&psllq ($E,23)', # $E is sliding left
+ '&paddq ($A,"mm3")', # [h+=Maj(a,b,c)]
+ '&movq ("mm3","mm1")', # %mm3 is T1
+ '&psrlq("mm1",4)',
+ '&pxor ("mm5","mm6")', # Ch(e,f,g)
+ '&pxor ("mm3",$E)',
+ '&psllq($E,23)',
+ '&pxor ("mm3","mm1")',
+ '&movq (&QWP(8*$i%64,"esp"),$A)', # modulo-scheduled save a
+ '&paddq("mm7","mm5")', # X[i]+=Ch(e,f,g)
+ '&pxor ("mm3",$E)',
+ '&psrlq("mm1",23)',
+ '&paddq("mm7",&QWP(8*($i+7)%64,"esp"))', # X[i]+=h
+ '&pxor ("mm3","mm1")',
+ '&psllq($E,4)',
+ '&pxor ("mm3",$E)', # T1=Sigma1_512(e)
+
+ '&movq ($E,&QWP(8*($i+3)%64,"esp"))', # e = load d, e in next round
+ '&paddq ("mm3","mm7")', # T1+=X[i]
+ '&movq ("mm5",$A)', # %mm5 is sliding right
+ '&psrlq("mm5",28)',
+ '&paddq ($E,"mm3")', # d += T1
+ '&movq ("mm6",$A)', # %mm6 is sliding left
+ '&movq ("mm7","mm5")',
+ '&psllq("mm6",25)',
+ '&movq ("mm1",&QWP(8*($i+1)%64,"esp"))', # load b
+ '&psrlq("mm5",6)',
+ '&pxor ("mm7","mm6")',
+ '&psllq("mm6",5)',
+ '&pxor ("mm7","mm5")',
+ '&pxor ($A,"mm1")', # a^b, b^c in next round
+ '&psrlq("mm5",5)',
+ '&pxor ("mm7","mm6")',
+ '&pand ($BxC,$A)', # (b^c)&(a^b)
+ '&psllq("mm6",6)',
+ '&pxor ("mm7","mm5")',
+ '&pxor ($BxC,"mm1")', # [h=]Maj(a,b,c)
+ '&pxor ("mm6","mm7")', # Sigma0_512(a)
+ '&movq ("mm5",&QWP(8*($i+5-1)%64,"esp"))', # pre-load f
+ '&paddq ($BxC,"mm6")', # h+=Sigma0(a)
+ '&movq ("mm6",&QWP(8*($i+6-1)%64,"esp"))', # pre-load g
+
+ '($A,$BxC) = ($BxC,$A); $i--;'
+ );
+}
+
+&set_label("00_47_ssse3",32);
+
+ for(;$j<16;$j++) {
+ my ($t0,$t2,$t1)=@X[2..4];
+ my @insns = (&BODY_00_15_ssse3(),&BODY_00_15_ssse3());
+
+ &movdqa ($t2,@X[5]);
+ &movdqa (@X[1],$t0); # restore @X[1]
+ &palignr ($t0,@X[0],8); # X[1..2]
+ &movdqa (&QWP(16*($j%4),$frame),@X[4]); # off-load @X[4]
+ &palignr ($t2,@X[4],8); # X[9..10]
+
+ &movdqa ($t1,$t0);
+ &psrlq ($t0,7);
+ &paddq (@X[0],$t2); # X[0..1] += X[9..10]
+ &movdqa ($t2,$t1);
+ &psrlq ($t1,1);
+ &psllq ($t2,64-8);
+ &pxor ($t0,$t1);
+ &psrlq ($t1,8-1);
+ &pxor ($t0,$t2);
+ &psllq ($t2,8-1);
+ &pxor ($t0,$t1);
+ &movdqa ($t1,@X[7]);
+ &pxor ($t0,$t2); # sigma0(X[1..2])
+ &movdqa ($t2,@X[7]);
+ &psrlq ($t1,6);
+ &paddq (@X[0],$t0); # X[0..1] += sigma0(X[1..2])
+
+ &movdqa ($t0,@X[7]);
+ &psrlq ($t2,19);
+ &psllq ($t0,64-61);
+ &pxor ($t1,$t2);
+ &psrlq ($t2,61-19);
+ &pxor ($t1,$t0);
+ &psllq ($t0,61-19);
+ &pxor ($t1,$t2);
+ &movdqa ($t2,&QWP(16*(($j+2)%4),$frame));# pre-restore @X[1]
+ &pxor ($t1,$t0); # sigma0(X[1..2])
+ &movdqa ($t0,&QWP(16*($j%8),$K512));
+ eval(shift(@insns));
+ &paddq (@X[0],$t1); # X[0..1] += sigma0(X[14..15])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &paddq ($t0,@X[0]);
+ foreach(@insns) { eval; }
+ &movdqa (&QWP(16*($j%8)-128,$frame),$t0);# xfer X[i]+K[i]
+
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &lea ($K512,&DWP(16*8,$K512));
+ &dec ("ecx");
+ &jnz (&label("00_47_ssse3"));
+
+ &movdqa (@X[1],&QWP(0,$K512)); # byte swap mask
+ &lea ($K512,&DWP(-80*8,$K512)); # rewind
+ &movdqu (@X[0],&QWP(0,"ebx"));
+ &pshufb (@X[0],@X[1]);
+
+ for ($j=0;$j<8;$j++) { # load next or same block
+ my @insns = (&BODY_00_15_ssse3(),&BODY_00_15_ssse3());
+
+ &movdqa (&QWP(16*(($j-1)%4),$frame),@X[3]) if ($j>4); # off-load
+ &movdqa (@X[3],&QWP(16*($j%8),$K512));
+ &movdqa (@X[2],@X[1]) if ($j<7); # perpetuate byte swap mask
+ &movdqu (@X[1],&QWP(16*($j+1),"ebx")) if ($j<7); # next input
+ &movdqa (@X[1],&QWP(16*(($j+1)%4),$frame)) if ($j==7);# restore @X[0]
+ &paddq (@X[3],@X[0]);
+ &pshufb (@X[1],@X[2]) if ($j<7);
+ foreach(@insns) { eval; }
+ &movdqa (&QWP(16*($j%8)-128,$frame),@X[3]);# xfer X[i]+K[i]
+
+ push(@X,shift(@X)); # rotate(@X)
+ }
+
+ #&movq ($A,$Asse2); # load A-H
+ &movq ("mm1",$Bsse2);
+ &paddq ($A,"mm3"); # from BODY_00_15
+ #&movq ($BxC,$Csse2);
+ &movq ("mm3",$Dsse2);
+ #&movq ($E,$Esse2);
+ #&movq ("mm5",$Fsse2);
+ #&movq ("mm6",$Gsse2);
+ &movq ("mm7",$Hsse2);
+
+ &pxor ($BxC,"mm1"); # de-magic
+ &paddq ($A,&QWP(0,"esi"));
+ &paddq ("mm1",&QWP(8,"esi"));
+ &paddq ($BxC,&QWP(16,"esi"));
+ &paddq ("mm3",&QWP(24,"esi"));
+ &paddq ($E,&QWP(32,"esi"));
+ &paddq ("mm5",&QWP(40,"esi"));
+ &paddq ("mm6",&QWP(48,"esi"));
+ &paddq ("mm7",&QWP(56,"esi"));
+
+ &movq (&QWP(0,"esi"),$A);
+ &movq (&QWP(8,"esi"),"mm1");
+ &movq (&QWP(16,"esi"),$BxC);
+ &movq (&QWP(24,"esi"),"mm3");
+ &movq (&QWP(32,"esi"),$E);
+ &movq (&QWP(40,"esi"),"mm5");
+ &movq (&QWP(48,"esi"),"mm6");
+ &movq (&QWP(56,"esi"),"mm7");
+
+ &cmp ("edi","eax") # are we done yet?
+ &jb (&label("loop_ssse3"));
+
+ &mov ("esp",&DWP(64+12,$frame)); # restore sp
+ &emms ();
+}
+&function_end_A();
+}
+&set_label("loop_x86",16);
+ # copy input block to stack reversing byte and qword order
+ for ($i=0;$i<8;$i++) {
+ &mov ("eax",&DWP($i*16+0,"edi"));
+ &mov ("ebx",&DWP($i*16+4,"edi"));
+ &mov ("ecx",&DWP($i*16+8,"edi"));
+ &mov ("edx",&DWP($i*16+12,"edi"));
+ &bswap ("eax");
+ &bswap ("ebx");
+ &bswap ("ecx");
+ &bswap ("edx");
+ &push ("eax");
+ &push ("ebx");
+ &push ("ecx");
+ &push ("edx");
+ }
+ &add ("edi",128);
+ &sub ("esp",9*8); # place for T,A,B,C,D,E,F,G,H
+ &mov (&DWP(8*(9+16)+4,"esp"),"edi");
+
+ # copy ctx->h[0-7] to A,B,C,D,E,F,G,H on stack
+ &lea ("edi",&DWP(8,"esp"));
+ &mov ("ecx",16);
+ &data_word(0xA5F3F689); # rep movsd
+
+&set_label("00_15_x86",16);
+ &BODY_00_15_x86();
+
+ &cmp (&LB("edx"),0x94);
+ &jne (&label("00_15_x86"));
+
+&set_label("16_79_x86",16);
+ #define sigma0(x) (ROTR((x),1) ^ ROTR((x),8) ^ ((x)>>7))
+ # LO lo>>1^hi<<31 ^ lo>>8^hi<<24 ^ lo>>7^hi<<25
+ # HI hi>>1^lo<<31 ^ hi>>8^lo<<24 ^ hi>>7
+ &mov ("ecx",&DWP(8*(9+15+16-1)+0,"esp"));
+ &mov ("edx",&DWP(8*(9+15+16-1)+4,"esp"));
+ &mov ("esi","ecx");
+
+ &shr ("ecx",1); # lo>>1
+ &mov ("edi","edx");
+ &shr ("edx",1); # hi>>1
+ &mov ("eax","ecx");
+ &shl ("esi",24); # lo<<24
+ &mov ("ebx","edx");
+ &shl ("edi",24); # hi<<24
+ &xor ("ebx","esi");
+
+ &shr ("ecx",7-1); # lo>>7
+ &xor ("eax","edi");
+ &shr ("edx",7-1); # hi>>7
+ &xor ("eax","ecx");
+ &shl ("esi",31-24); # lo<<31
+ &xor ("ebx","edx");
+ &shl ("edi",25-24); # hi<<25
+ &xor ("ebx","esi");
+
+ &shr ("ecx",8-7); # lo>>8
+ &xor ("eax","edi");
+ &shr ("edx",8-7); # hi>>8
+ &xor ("eax","ecx");
+ &shl ("edi",31-25); # hi<<31
+ &xor ("ebx","edx");
+ &xor ("eax","edi"); # T1 = sigma0(X[-15])
+
+ &mov (&DWP(0,"esp"),"eax");
+ &mov (&DWP(4,"esp"),"ebx"); # put T1 away
+
+ #define sigma1(x) (ROTR((x),19) ^ ROTR((x),61) ^ ((x)>>6))
+ # LO lo>>19^hi<<13 ^ hi>>29^lo<<3 ^ lo>>6^hi<<26
+ # HI hi>>19^lo<<13 ^ lo>>29^hi<<3 ^ hi>>6
+ &mov ("ecx",&DWP(8*(9+15+16-14)+0,"esp"));
+ &mov ("edx",&DWP(8*(9+15+16-14)+4,"esp"));
+ &mov ("esi","ecx");
+
+ &shr ("ecx",6); # lo>>6
+ &mov ("edi","edx");
+ &shr ("edx",6); # hi>>6
+ &mov ("eax","ecx");
+ &shl ("esi",3); # lo<<3
+ &mov ("ebx","edx");
+ &shl ("edi",3); # hi<<3
+ &xor ("eax","esi");
+
+ &shr ("ecx",19-6); # lo>>19
+ &xor ("ebx","edi");
+ &shr ("edx",19-6); # hi>>19
+ &xor ("eax","ecx");
+ &shl ("esi",13-3); # lo<<13
+ &xor ("ebx","edx");
+ &shl ("edi",13-3); # hi<<13
+ &xor ("ebx","esi");
+
+ &shr ("ecx",29-19); # lo>>29
+ &xor ("eax","edi");
+ &shr ("edx",29-19); # hi>>29
+ &xor ("ebx","ecx");
+ &shl ("edi",26-13); # hi<<26
+ &xor ("eax","edx");
+ &xor ("eax","edi"); # sigma1(X[-2])
+
+ &mov ("ecx",&DWP(8*(9+15+16)+0,"esp"));
+ &mov ("edx",&DWP(8*(9+15+16)+4,"esp"));
+ &add ("eax",&DWP(0,"esp"));
+ &adc ("ebx",&DWP(4,"esp")); # T1 = sigma1(X[-2])+T1
+ &mov ("esi",&DWP(8*(9+15+16-9)+0,"esp"));
+ &mov ("edi",&DWP(8*(9+15+16-9)+4,"esp"));
+ &add ("eax","ecx");
+ &adc ("ebx","edx"); # T1 += X[-16]
+ &add ("eax","esi");
+ &adc ("ebx","edi"); # T1 += X[-7]
+ &mov (&DWP(8*(9+15)+0,"esp"),"eax");
+ &mov (&DWP(8*(9+15)+4,"esp"),"ebx"); # save X[0]
+
+ &BODY_00_15_x86();
+
+ &cmp (&LB("edx"),0x17);
+ &jne (&label("16_79_x86"));
+
+ &mov ("esi",&DWP(8*(9+16+80)+0,"esp"));# ctx
+ &mov ("edi",&DWP(8*(9+16+80)+4,"esp"));# inp
+ for($i=0;$i<4;$i++) {
+ &mov ("eax",&DWP($i*16+0,"esi"));
+ &mov ("ebx",&DWP($i*16+4,"esi"));
+ &mov ("ecx",&DWP($i*16+8,"esi"));
+ &mov ("edx",&DWP($i*16+12,"esi"));
+ &add ("eax",&DWP(8+($i*16)+0,"esp"));
+ &adc ("ebx",&DWP(8+($i*16)+4,"esp"));
+ &mov (&DWP($i*16+0,"esi"),"eax");
+ &mov (&DWP($i*16+4,"esi"),"ebx");
+ &add ("ecx",&DWP(8+($i*16)+8,"esp"));
+ &adc ("edx",&DWP(8+($i*16)+12,"esp"));
+ &mov (&DWP($i*16+8,"esi"),"ecx");
+ &mov (&DWP($i*16+12,"esi"),"edx");
+ }
+ &add ("esp",8*(9+16+80)); # destroy frame
+ &sub ($K512,8*80); # rewind K
+
+ &cmp ("edi",&DWP(8,"esp")); # are we done yet?
+ &jb (&label("loop_x86"));
+
+ &mov ("esp",&DWP(12,"esp")); # restore sp
+&function_end_A();
+
+&set_label("K512",64); # Yes! I keep it in the code segment!
+ &data_word(0xd728ae22,0x428a2f98); # u64
+ &data_word(0x23ef65cd,0x71374491); # u64
+ &data_word(0xec4d3b2f,0xb5c0fbcf); # u64
+ &data_word(0x8189dbbc,0xe9b5dba5); # u64
+ &data_word(0xf348b538,0x3956c25b); # u64
+ &data_word(0xb605d019,0x59f111f1); # u64
+ &data_word(0xaf194f9b,0x923f82a4); # u64
+ &data_word(0xda6d8118,0xab1c5ed5); # u64
+ &data_word(0xa3030242,0xd807aa98); # u64
+ &data_word(0x45706fbe,0x12835b01); # u64
+ &data_word(0x4ee4b28c,0x243185be); # u64
+ &data_word(0xd5ffb4e2,0x550c7dc3); # u64
+ &data_word(0xf27b896f,0x72be5d74); # u64
+ &data_word(0x3b1696b1,0x80deb1fe); # u64
+ &data_word(0x25c71235,0x9bdc06a7); # u64
+ &data_word(0xcf692694,0xc19bf174); # u64
+ &data_word(0x9ef14ad2,0xe49b69c1); # u64
+ &data_word(0x384f25e3,0xefbe4786); # u64
+ &data_word(0x8b8cd5b5,0x0fc19dc6); # u64
+ &data_word(0x77ac9c65,0x240ca1cc); # u64
+ &data_word(0x592b0275,0x2de92c6f); # u64
+ &data_word(0x6ea6e483,0x4a7484aa); # u64
+ &data_word(0xbd41fbd4,0x5cb0a9dc); # u64
+ &data_word(0x831153b5,0x76f988da); # u64
+ &data_word(0xee66dfab,0x983e5152); # u64
+ &data_word(0x2db43210,0xa831c66d); # u64
+ &data_word(0x98fb213f,0xb00327c8); # u64
+ &data_word(0xbeef0ee4,0xbf597fc7); # u64
+ &data_word(0x3da88fc2,0xc6e00bf3); # u64
+ &data_word(0x930aa725,0xd5a79147); # u64
+ &data_word(0xe003826f,0x06ca6351); # u64
+ &data_word(0x0a0e6e70,0x14292967); # u64
+ &data_word(0x46d22ffc,0x27b70a85); # u64
+ &data_word(0x5c26c926,0x2e1b2138); # u64
+ &data_word(0x5ac42aed,0x4d2c6dfc); # u64
+ &data_word(0x9d95b3df,0x53380d13); # u64
+ &data_word(0x8baf63de,0x650a7354); # u64
+ &data_word(0x3c77b2a8,0x766a0abb); # u64
+ &data_word(0x47edaee6,0x81c2c92e); # u64
+ &data_word(0x1482353b,0x92722c85); # u64
+ &data_word(0x4cf10364,0xa2bfe8a1); # u64
+ &data_word(0xbc423001,0xa81a664b); # u64
+ &data_word(0xd0f89791,0xc24b8b70); # u64
+ &data_word(0x0654be30,0xc76c51a3); # u64
+ &data_word(0xd6ef5218,0xd192e819); # u64
+ &data_word(0x5565a910,0xd6990624); # u64
+ &data_word(0x5771202a,0xf40e3585); # u64
+ &data_word(0x32bbd1b8,0x106aa070); # u64
+ &data_word(0xb8d2d0c8,0x19a4c116); # u64
+ &data_word(0x5141ab53,0x1e376c08); # u64
+ &data_word(0xdf8eeb99,0x2748774c); # u64
+ &data_word(0xe19b48a8,0x34b0bcb5); # u64
+ &data_word(0xc5c95a63,0x391c0cb3); # u64
+ &data_word(0xe3418acb,0x4ed8aa4a); # u64
+ &data_word(0x7763e373,0x5b9cca4f); # u64
+ &data_word(0xd6b2b8a3,0x682e6ff3); # u64
+ &data_word(0x5defb2fc,0x748f82ee); # u64
+ &data_word(0x43172f60,0x78a5636f); # u64
+ &data_word(0xa1f0ab72,0x84c87814); # u64
+ &data_word(0x1a6439ec,0x8cc70208); # u64
+ &data_word(0x23631e28,0x90befffa); # u64
+ &data_word(0xde82bde9,0xa4506ceb); # u64
+ &data_word(0xb2c67915,0xbef9a3f7); # u64
+ &data_word(0xe372532b,0xc67178f2); # u64
+ &data_word(0xea26619c,0xca273ece); # u64
+ &data_word(0x21c0c207,0xd186b8c7); # u64
+ &data_word(0xcde0eb1e,0xeada7dd6); # u64
+ &data_word(0xee6ed178,0xf57d4f7f); # u64
+ &data_word(0x72176fba,0x06f067aa); # u64
+ &data_word(0xa2c898a6,0x0a637dc5); # u64
+ &data_word(0xbef90dae,0x113f9804); # u64
+ &data_word(0x131c471b,0x1b710b35); # u64
+ &data_word(0x23047d84,0x28db77f5); # u64
+ &data_word(0x40c72493,0x32caab7b); # u64
+ &data_word(0x15c9bebc,0x3c9ebe0a); # u64
+ &data_word(0x9c100d4c,0x431d67c4); # u64
+ &data_word(0xcb3e42b6,0x4cc5d4be); # u64
+ &data_word(0xfc657e2a,0x597f299c); # u64
+ &data_word(0x3ad6faec,0x5fcb6fab); # u64
+ &data_word(0x4a475817,0x6c44198c); # u64
+
+ &data_word(0x04050607,0x00010203); # byte swap
+ &data_word(0x0c0d0e0f,0x08090a0b); # mask
+&function_end_B("sha512_block_data_order");
+&asciz("SHA512 block transform for x86, CRYPTOGAMS by <appro\@openssl.org>");
+
+&asm_finish();
diff --git a/devel/perlasm/sha512-ssse3-x86_64.pl b/devel/perlasm/sha512-ssse3-x86_64.pl
new file mode 100755
index 0000000000..8070d09c94
--- /dev/null
+++ b/devel/perlasm/sha512-ssse3-x86_64.pl
@@ -0,0 +1,2152 @@
+#!/usr/bin/env perl
+#
+# ====================================================================
+# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
+# project. Rights for redistribution and usage in source and binary
+# forms are granted according to the OpenSSL license.
+# ====================================================================
+#
+# sha256/512_block procedure for x86_64.
+#
+# 40% improvement over compiler-generated code on Opteron. On EM64T
+# sha256 was observed to run >80% faster and sha512 - >40%. No magical
+# tricks, just straight implementation... I really wonder why gcc
+# [being armed with inline assembler] fails to generate as fast code.
+# The only thing which is cool about this module is that it's very
+# same instruction sequence used for both SHA-256 and SHA-512. In
+# former case the instructions operate on 32-bit operands, while in
+# latter - on 64-bit ones. All I had to do is to get one flavor right,
+# the other one passed the test right away:-)
+#
+# sha256_block runs in ~1005 cycles on Opteron, which gives you
+# asymptotic performance of 64*1000/1005=63.7MBps times CPU clock
+# frequency in GHz. sha512_block runs in ~1275 cycles, which results
+# in 128*1000/1275=100MBps per GHz. Is there room for improvement?
+# Well, if you compare it to IA-64 implementation, which maintains
+# X[16] in register bank[!], tends to 4 instructions per CPU clock
+# cycle and runs in 1003 cycles, 1275 is very good result for 3-way
+# issue Opteron pipeline and X[16] maintained in memory. So that *if*
+# there is a way to improve it, *then* the only way would be to try to
+# offload X[16] updates to SSE unit, but that would require "deeper"
+# loop unroll, which in turn would naturally cause size blow-up, not
+# to mention increased complexity! And once again, only *if* it's
+# actually possible to noticeably improve overall ILP, instruction
+# level parallelism, on a given CPU implementation in this case.
+#
+# Special note on Intel EM64T. While Opteron CPU exhibits perfect
+# perfromance ratio of 1.5 between 64- and 32-bit flavors [see above],
+# [currently available] EM64T CPUs apparently are far from it. On the
+# contrary, 64-bit version, sha512_block, is ~30% *slower* than 32-bit
+# sha256_block:-( This is presumably because 64-bit shifts/rotates
+# apparently are not atomic instructions, but implemented in microcode.
+#
+# May 2012.
+#
+# Optimization including one of Pavel Semjanov's ideas, alternative
+# Maj, resulted in >=5% improvement on most CPUs, +20% SHA256 and
+# unfortunately -2% SHA512 on P4 [which nobody should care about
+# that much].
+#
+# June 2012.
+#
+# Add SIMD code paths, see below for improvement coefficients. SSSE3
+# code path was not attempted for SHA512, because improvement is not
+# estimated to be high enough, noticeably less than 9%, to justify
+# the effort, not on pre-AVX processors. [Obviously with exclusion
+# for VIA Nano, but it has SHA512 instruction that is faster and
+# should be used instead.] For reference, corresponding estimated
+# upper limit for improvement for SSSE3 SHA256 is 28%. The fact that
+# higher coefficients are observed on VIA Nano and Bulldozer has more
+# to do with specifics of their architecture [which is topic for
+# separate discussion].
+#
+# November 2012.
+#
+# Add AVX2 code path. Two consecutive input blocks are loaded to
+# 256-bit %ymm registers, with data from first block to least
+# significant 128-bit halves and data from second to most significant.
+# The data is then processed with same SIMD instruction sequence as
+# for AVX, but with %ymm as operands. Side effect is increased stack
+# frame, 448 additional bytes in SHA256 and 1152 in SHA512.
+
+######################################################################
+# Current performance in cycles per processed byte (less is better):
+#
+# SHA256 SSSE3 AVX/XOP(*) SHA512 AVX/XOP(*)
+#
+# AMD K8 14.9 - - 9.57 -
+# P4 17.3 - - 30.8 -
+# Core 2 15.6 13.8(+13%) - 9.97 -
+# Westmere 14.8 12.3(+19%) - 9.58 -
+# Sandy Bridge 17.4 14.2(+23%) 11.6(+50%(**)) 11.2 8.10(+38%(**))
+# Ivy Bridge 12.6 10.5(+20%) 10.3(+22%) 8.17 7.22(+13%)
+# Haswell 12.2 9.28(+31%) 7.80(+56%) 7.66 5.40(+42%)
+# Bulldozer 21.1 13.6(+54%) 13.6(+54%(***)) 13.5 8.58(+57%)
+# VIA Nano 23.0 16.5(+39%) - 14.7 -
+# Atom 23.0 18.9(+22%) - 14.7 -
+#
+# (*) whichever best applicable;
+# (**) switch from ror to shrd stands for fair share of improvement;
+# (***) execution time is fully determined by remaining integer-only
+# part, body_00_15; reducing the amount of SIMD instructions
+# below certain limit makes no difference/sense; to conserve
+# space SHA256 XOP code path is therefore omitted;
+
+$flavour = shift;
+$output = shift;
+if ($flavour =~ /\./) { $output = $flavour; undef $flavour; }
+
+$win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
+
+$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
+( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
+( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
+die "can't locate x86_64-xlate.pl";
+
+if (`$ENV{CC} -Wa,-v -c -o /dev/null -x assembler /dev/null 2>&1`
+ =~ /GNU assembler version ([2-9]\.[0-9]+)/) {
+ $avx = ($1>=2.19) + ($1>=2.22);
+}
+
+if (!$avx && $win64 && ($flavour =~ /nasm/ || $ENV{ASM} =~ /nasm/) &&
+ `nasm -v 2>&1` =~ /NASM version ([2-9]\.[0-9]+)/) {
+ $avx = ($1>=2.09) + ($1>=2.10);
+}
+
+if (!$avx && $win64 && ($flavour =~ /masm/ || $ENV{ASM} =~ /ml64/) &&
+ `ml64 2>&1` =~ /Version ([0-9]+)\./) {
+ $avx = ($1>=10) + ($1>=11);
+}
+
+open OUT,"| \"$^X\" $xlate $flavour $output";
+*STDOUT=*OUT;
+
+if ($output =~ /512/) {
+ $func="sha512_block_data_order";
+ $TABLE="K512";
+ $SZ=8;
+ @ROT=($A,$B,$C,$D,$E,$F,$G,$H)=("%rax","%rbx","%rcx","%rdx",
+ "%r8", "%r9", "%r10","%r11");
+ ($T1,$a0,$a1,$a2,$a3)=("%r12","%r13","%r14","%r15","%rdi");
+ @Sigma0=(28,34,39);
+ @Sigma1=(14,18,41);
+ @sigma0=(1, 8, 7);
+ @sigma1=(19,61, 6);
+ $rounds=80;
+} else {
+ $func="sha256_block_data_order";
+ $TABLE="K256";
+ $SZ=4;
+ @ROT=($A,$B,$C,$D,$E,$F,$G,$H)=("%eax","%ebx","%ecx","%edx",
+ "%r8d","%r9d","%r10d","%r11d");
+ ($T1,$a0,$a1,$a2,$a3)=("%r12d","%r13d","%r14d","%r15d","%edi");
+ @Sigma0=( 2,13,22);
+ @Sigma1=( 6,11,25);
+ @sigma0=( 7,18, 3);
+ @sigma1=(17,19,10);
+ $rounds=64;
+}
+
+$ctx="%rdi"; # 1st arg, zapped by $a3
+$inp="%rsi"; # 2nd arg
+$Tbl="%rbp";
+
+$_ctx="16*$SZ+0*8(%rsp)";
+$_inp="16*$SZ+1*8(%rsp)";
+$_end="16*$SZ+2*8(%rsp)";
+$_rsp="16*$SZ+3*8(%rsp)";
+$framesz="16*$SZ+4*8";
+
+
+sub ROUND_00_15()
+{ my ($i,$a,$b,$c,$d,$e,$f,$g,$h) = @_;
+ my $STRIDE=$SZ;
+ $STRIDE += 16 if ($i%(16/$SZ)==(16/$SZ-1));
+
+$code.=<<___;
+ ror \$`$Sigma1[2]-$Sigma1[1]`,$a0
+ mov $f,$a2
+
+ xor $e,$a0
+ ror \$`$Sigma0[2]-$Sigma0[1]`,$a1
+ xor $g,$a2 # f^g
+
+ mov $T1,`$SZ*($i&0xf)`(%rsp)
+ xor $a,$a1
+ and $e,$a2 # (f^g)&e
+
+ ror \$`$Sigma1[1]-$Sigma1[0]`,$a0
+ add $h,$T1 # T1+=h
+ xor $g,$a2 # Ch(e,f,g)=((f^g)&e)^g
+
+ ror \$`$Sigma0[1]-$Sigma0[0]`,$a1
+ xor $e,$a0
+ add $a2,$T1 # T1+=Ch(e,f,g)
+
+ mov $a,$a2
+ add ($Tbl),$T1 # T1+=K[round]
+ xor $a,$a1
+
+ xor $b,$a2 # a^b, b^c in next round
+ ror \$$Sigma1[0],$a0 # Sigma1(e)
+ mov $b,$h
+
+ and $a2,$a3
+ ror \$$Sigma0[0],$a1 # Sigma0(a)
+ add $a0,$T1 # T1+=Sigma1(e)
+
+ xor $a3,$h # h=Maj(a,b,c)=Ch(a^b,c,b)
+ add $T1,$d # d+=T1
+ add $T1,$h # h+=T1
+
+ lea $STRIDE($Tbl),$Tbl # round++
+___
+$code.=<<___ if ($i<15);
+ add $a1,$h # h+=Sigma0(a)
+___
+ ($a2,$a3) = ($a3,$a2);
+}
+
+sub ROUND_16_XX()
+{ my ($i,$a,$b,$c,$d,$e,$f,$g,$h) = @_;
+
+$code.=<<___;
+ mov `$SZ*(($i+1)&0xf)`(%rsp),$a0
+ mov `$SZ*(($i+14)&0xf)`(%rsp),$a2
+
+ mov $a0,$T1
+ ror \$`$sigma0[1]-$sigma0[0]`,$a0
+ add $a1,$a # modulo-scheduled h+=Sigma0(a)
+ mov $a2,$a1
+ ror \$`$sigma1[1]-$sigma1[0]`,$a2
+
+ xor $T1,$a0
+ shr \$$sigma0[2],$T1
+ ror \$$sigma0[0],$a0
+ xor $a1,$a2
+ shr \$$sigma1[2],$a1
+
+ ror \$$sigma1[0],$a2
+ xor $a0,$T1 # sigma0(X[(i+1)&0xf])
+ xor $a1,$a2 # sigma1(X[(i+14)&0xf])
+ add `$SZ*(($i+9)&0xf)`(%rsp),$T1
+
+ add `$SZ*($i&0xf)`(%rsp),$T1
+ mov $e,$a0
+ add $a2,$T1
+ mov $a,$a1
+___
+ &ROUND_00_15(@_);
+}
+
+$code=<<___;
+.text
+
+.extern OPENSSL_ia32cap_P
+.globl $func
+.type $func,\@function,3
+.align 16
+$func:
+___
+$code.=<<___ if ($SZ==4 || $avx);
+ lea OPENSSL_ia32cap_P(%rip),%r11
+ mov 0(%r11),%r9d
+ mov 4(%r11),%r10d
+ mov 8(%r11),%r11d
+___
+$code.=<<___ if ($avx && $SZ==8);
+ test \$`1<<11`,%r10d # check for XOP
+ jnz .Lxop_shortcut
+___
+$code.=<<___ if ($avx>1);
+ and \$`1<<8|1<<5|1<<3`,%r11d # check for BMI2+AVX2+BMI1
+ cmp \$`1<<8|1<<5|1<<3`,%r11d
+ je .Lavx2_shortcut
+___
+$code.=<<___ if ($avx);
+ and \$`1<<30`,%r9d # mask "Intel CPU" bit
+ and \$`1<<28|1<<9`,%r10d # mask AVX and SSSE3 bits
+ or %r9d,%r10d
+ cmp \$`1<<28|1<<9|1<<30`,%r10d
+ je .Lavx_shortcut
+___
+$code.=<<___ if ($SZ==4);
+ test \$`1<<9`,%r10d
+ jnz .Lssse3_shortcut
+___
+$code.=<<___;
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+ mov %rsp,%r11 # copy %rsp
+ shl \$4,%rdx # num*16
+ sub \$$framesz,%rsp
+ lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
+ and \$-64,%rsp # align stack frame
+ mov $ctx,$_ctx # save ctx, 1st arg
+ mov $inp,$_inp # save inp, 2nd arh
+ mov %rdx,$_end # save end pointer, "3rd" arg
+ mov %r11,$_rsp # save copy of %rsp
+.Lprologue:
+
+ mov $SZ*0($ctx),$A
+ mov $SZ*1($ctx),$B
+ mov $SZ*2($ctx),$C
+ mov $SZ*3($ctx),$D
+ mov $SZ*4($ctx),$E
+ mov $SZ*5($ctx),$F
+ mov $SZ*6($ctx),$G
+ mov $SZ*7($ctx),$H
+ jmp .Lloop
+
+.align 16
+.Lloop:
+ mov $B,$a3
+ lea $TABLE(%rip),$Tbl
+ xor $C,$a3 # magic
+___
+ for($i=0;$i<16;$i++) {
+ $code.=" mov $SZ*$i($inp),$T1\n";
+ $code.=" mov @ROT[4],$a0\n";
+ $code.=" mov @ROT[0],$a1\n";
+ $code.=" bswap $T1\n";
+ &ROUND_00_15($i,@ROT);
+ unshift(@ROT,pop(@ROT));
+ }
+$code.=<<___;
+ jmp .Lrounds_16_xx
+.align 16
+.Lrounds_16_xx:
+___
+ for(;$i<32;$i++) {
+ &ROUND_16_XX($i,@ROT);
+ unshift(@ROT,pop(@ROT));
+ }
+
+$code.=<<___;
+ cmpb \$0,`$SZ-1`($Tbl)
+ jnz .Lrounds_16_xx
+
+ mov $_ctx,$ctx
+ add $a1,$A # modulo-scheduled h+=Sigma0(a)
+ lea 16*$SZ($inp),$inp
+
+ add $SZ*0($ctx),$A
+ add $SZ*1($ctx),$B
+ add $SZ*2($ctx),$C
+ add $SZ*3($ctx),$D
+ add $SZ*4($ctx),$E
+ add $SZ*5($ctx),$F
+ add $SZ*6($ctx),$G
+ add $SZ*7($ctx),$H
+
+ cmp $_end,$inp
+
+ mov $A,$SZ*0($ctx)
+ mov $B,$SZ*1($ctx)
+ mov $C,$SZ*2($ctx)
+ mov $D,$SZ*3($ctx)
+ mov $E,$SZ*4($ctx)
+ mov $F,$SZ*5($ctx)
+ mov $G,$SZ*6($ctx)
+ mov $H,$SZ*7($ctx)
+ jb .Lloop
+
+ mov $_rsp,%rsi
+ mov (%rsi),%r15
+ mov 8(%rsi),%r14
+ mov 16(%rsi),%r13
+ mov 24(%rsi),%r12
+ mov 32(%rsi),%rbp
+ mov 40(%rsi),%rbx
+ lea 48(%rsi),%rsp
+.Lepilogue:
+ ret
+.size $func,.-$func
+___
+
+if ($SZ==4) {
+$code.=<<___;
+.align 64
+.type $TABLE,\@object
+$TABLE:
+ .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+ .long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+ .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+ .long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+ .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+ .long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+ .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+ .long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+ .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+ .long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+ .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+ .long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+ .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+ .long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+ .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+ .long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+ .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+ .long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+ .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+ .long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+ .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+ .long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+ .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+ .long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+ .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+ .long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+ .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+ .long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+ .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+ .long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+ .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+ .long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+
+ .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+ .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+ .long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+ .long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+ .long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+ .long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+ .asciz "SHA256 block transform for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
+___
+} else {
+$code.=<<___;
+.align 64
+.type $TABLE,\@object
+$TABLE:
+ .quad 0x428a2f98d728ae22,0x7137449123ef65cd
+ .quad 0x428a2f98d728ae22,0x7137449123ef65cd
+ .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc
+ .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc
+ .quad 0x3956c25bf348b538,0x59f111f1b605d019
+ .quad 0x3956c25bf348b538,0x59f111f1b605d019
+ .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118
+ .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118
+ .quad 0xd807aa98a3030242,0x12835b0145706fbe
+ .quad 0xd807aa98a3030242,0x12835b0145706fbe
+ .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2
+ .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2
+ .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1
+ .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1
+ .quad 0x9bdc06a725c71235,0xc19bf174cf692694
+ .quad 0x9bdc06a725c71235,0xc19bf174cf692694
+ .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3
+ .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3
+ .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65
+ .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65
+ .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483
+ .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483
+ .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5
+ .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5
+ .quad 0x983e5152ee66dfab,0xa831c66d2db43210
+ .quad 0x983e5152ee66dfab,0xa831c66d2db43210
+ .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4
+ .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4
+ .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725
+ .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725
+ .quad 0x06ca6351e003826f,0x142929670a0e6e70
+ .quad 0x06ca6351e003826f,0x142929670a0e6e70
+ .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926
+ .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926
+ .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df
+ .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df
+ .quad 0x650a73548baf63de,0x766a0abb3c77b2a8
+ .quad 0x650a73548baf63de,0x766a0abb3c77b2a8
+ .quad 0x81c2c92e47edaee6,0x92722c851482353b
+ .quad 0x81c2c92e47edaee6,0x92722c851482353b
+ .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001
+ .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001
+ .quad 0xc24b8b70d0f89791,0xc76c51a30654be30
+ .quad 0xc24b8b70d0f89791,0xc76c51a30654be30
+ .quad 0xd192e819d6ef5218,0xd69906245565a910
+ .quad 0xd192e819d6ef5218,0xd69906245565a910
+ .quad 0xf40e35855771202a,0x106aa07032bbd1b8
+ .quad 0xf40e35855771202a,0x106aa07032bbd1b8
+ .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53
+ .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53
+ .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8
+ .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8
+ .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb
+ .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb
+ .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3
+ .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3
+ .quad 0x748f82ee5defb2fc,0x78a5636f43172f60
+ .quad 0x748f82ee5defb2fc,0x78a5636f43172f60
+ .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec
+ .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec
+ .quad 0x90befffa23631e28,0xa4506cebde82bde9
+ .quad 0x90befffa23631e28,0xa4506cebde82bde9
+ .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b
+ .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b
+ .quad 0xca273eceea26619c,0xd186b8c721c0c207
+ .quad 0xca273eceea26619c,0xd186b8c721c0c207
+ .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178
+ .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178
+ .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6
+ .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6
+ .quad 0x113f9804bef90dae,0x1b710b35131c471b
+ .quad 0x113f9804bef90dae,0x1b710b35131c471b
+ .quad 0x28db77f523047d84,0x32caab7b40c72493
+ .quad 0x28db77f523047d84,0x32caab7b40c72493
+ .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c
+ .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c
+ .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a
+ .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a
+ .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817
+ .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817
+
+ .quad 0x0001020304050607,0x08090a0b0c0d0e0f
+ .quad 0x0001020304050607,0x08090a0b0c0d0e0f
+ .asciz "SHA512 block transform for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
+___
+}
+
+######################################################################
+# SIMD code paths
+#
+{{{
+
+my $a4=$T1;
+my ($a,$b,$c,$d,$e,$f,$g,$h);
+
+sub AUTOLOAD() # thunk [simplified] 32-bit style perlasm
+{ my $opcode = $AUTOLOAD; $opcode =~ s/.*:://;
+ my $arg = pop;
+ $arg = "\$$arg" if ($arg*1 eq $arg);
+ $code .= "\t$opcode\t".join(',',$arg,reverse @_)."\n";
+}
+
+sub body_00_15 () {
+ (
+ '($a,$b,$c,$d,$e,$f,$g,$h)=@ROT;'.
+
+ '&ror ($a0,$Sigma1[2]-$Sigma1[1])',
+ '&mov ($a,$a1)',
+ '&mov ($a4,$f)',
+
+ '&ror ($a1,$Sigma0[2]-$Sigma0[1])',
+ '&xor ($a0,$e)',
+ '&xor ($a4,$g)', # f^g
+
+ '&ror ($a0,$Sigma1[1]-$Sigma1[0])',
+ '&xor ($a1,$a)',
+ '&and ($a4,$e)', # (f^g)&e
+
+ '&xor ($a0,$e)',
+ '&add ($h,$SZ*($i&15)."(%rsp)")', # h+=X[i]+K[i]
+ '&mov ($a2,$a)',
+
+ '&xor ($a4,$g)', # Ch(e,f,g)=((f^g)&e)^g
+ '&ror ($a1,$Sigma0[1]-$Sigma0[0])',
+ '&xor ($a2,$b)', # a^b, b^c in next round
+
+ '&add ($h,$a4)', # h+=Ch(e,f,g)
+ '&ror ($a0,$Sigma1[0])', # Sigma1(e)
+ '&and ($a3,$a2)', # (b^c)&(a^b)
+
+ '&xor ($a1,$a)',
+ '&add ($h,$a0)', # h+=Sigma1(e)
+ '&xor ($a3,$b)', # Maj(a,b,c)=Ch(a^b,c,b)
+
+ '&ror ($a1,$Sigma0[0])', # Sigma0(a)
+ '&add ($d,$h)', # d+=h
+ '&add ($h,$a3)', # h+=Maj(a,b,c)
+
+ '&mov ($a0,$d)',
+ '&add ($a1,$h);'. # h+=Sigma0(a)
+ '($a2,$a3) = ($a3,$a2); unshift(@ROT,pop(@ROT)); $i++;'
+ );
+}
+
+######################################################################
+# SSSE3 code path
+#
+if ($SZ==4) { # SHA256 only
+my @X = map("%xmm$_",(0..3));
+my ($t0,$t1,$t2,$t3, $t4,$t5) = map("%xmm$_",(4..9));
+
+$code.=<<___;
+.type ${func}_ssse3,\@function,3
+.align 64
+${func}_ssse3:
+.Lssse3_shortcut:
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+ mov %rsp,%r11 # copy %rsp
+ shl \$4,%rdx # num*16
+ sub \$`$framesz+$win64*16*4`,%rsp
+ lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
+ and \$-64,%rsp # align stack frame
+ mov $ctx,$_ctx # save ctx, 1st arg
+ mov $inp,$_inp # save inp, 2nd arh
+ mov %rdx,$_end # save end pointer, "3rd" arg
+ mov %r11,$_rsp # save copy of %rsp
+___
+$code.=<<___ if ($win64);
+ movaps %xmm6,16*$SZ+32(%rsp)
+ movaps %xmm7,16*$SZ+48(%rsp)
+ movaps %xmm8,16*$SZ+64(%rsp)
+ movaps %xmm9,16*$SZ+80(%rsp)
+___
+$code.=<<___;
+.Lprologue_ssse3:
+
+ mov $SZ*0($ctx),$A
+ mov $SZ*1($ctx),$B
+ mov $SZ*2($ctx),$C
+ mov $SZ*3($ctx),$D
+ mov $SZ*4($ctx),$E
+ mov $SZ*5($ctx),$F
+ mov $SZ*6($ctx),$G
+ mov $SZ*7($ctx),$H
+___
+
+$code.=<<___;
+ #movdqa $TABLE+`$SZ*2*$rounds`+32(%rip),$t4
+ #movdqa $TABLE+`$SZ*2*$rounds`+64(%rip),$t5
+ jmp .Lloop_ssse3
+.align 16
+.Lloop_ssse3:
+ movdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
+ movdqu 0x00($inp),@X[0]
+ movdqu 0x10($inp),@X[1]
+ movdqu 0x20($inp),@X[2]
+ movdqu 0x30($inp),@X[3]
+ pshufb $t3,@X[0]
+ lea $TABLE(%rip),$Tbl
+ pshufb $t3,@X[1]
+ movdqa 0x00($Tbl),$t0
+ pshufb $t3,@X[2]
+ movdqa 0x20($Tbl),$t1
+ paddd @X[0],$t0
+ movdqa 0x40($Tbl),$t2
+ pshufb $t3,@X[3]
+ movdqa 0x60($Tbl),$t3
+ paddd @X[1],$t1
+ paddd @X[2],$t2
+ paddd @X[3],$t3
+ movdqa $t0,0x00(%rsp)
+ mov $A,$a1
+ movdqa $t1,0x10(%rsp)
+ mov $B,$a3
+ movdqa $t2,0x20(%rsp)
+ xor $C,$a3 # magic
+ movdqa $t3,0x30(%rsp)
+ mov $E,$a0
+ jmp .Lssse3_00_47
+
+.align 16
+.Lssse3_00_47:
+ sub \$-16*2*$SZ,$Tbl # size optimization
+___
+sub Xupdate_256_SSSE3 () {
+ (
+ '&movdqa ($t0,@X[1]);',
+ '&movdqa ($t3,@X[3])',
+ '&palignr ($t0,@X[0],$SZ)', # X[1..4]
+ '&palignr ($t3,@X[2],$SZ);', # X[9..12]
+ '&movdqa ($t1,$t0)',
+ '&movdqa ($t2,$t0);',
+ '&psrld ($t0,$sigma0[2])',
+ '&paddd (@X[0],$t3);', # X[0..3] += X[9..12]
+ '&psrld ($t2,$sigma0[0])',
+ '&pshufd ($t3,@X[3],0b11111010)',# X[14..15]
+ '&pslld ($t1,8*$SZ-$sigma0[1]);'.
+ '&pxor ($t0,$t2)',
+ '&psrld ($t2,$sigma0[1]-$sigma0[0]);'.
+ '&pxor ($t0,$t1)',
+ '&pslld ($t1,$sigma0[1]-$sigma0[0]);'.
+ '&pxor ($t0,$t2);',
+ '&movdqa ($t2,$t3)',
+ '&pxor ($t0,$t1);', # sigma0(X[1..4])
+ '&psrld ($t3,$sigma1[2])',
+ '&paddd (@X[0],$t0);', # X[0..3] += sigma0(X[1..4])
+ '&psrlq ($t2,$sigma1[0])',
+ '&pxor ($t3,$t2);',
+ '&psrlq ($t2,$sigma1[1]-$sigma1[0])',
+ '&pxor ($t3,$t2)',
+ '&pshufb ($t3,$t4)', # sigma1(X[14..15])
+ '&paddd (@X[0],$t3)', # X[0..1] += sigma1(X[14..15])
+ '&pshufd ($t3,@X[0],0b01010000)',# X[16..17]
+ '&movdqa ($t2,$t3);',
+ '&psrld ($t3,$sigma1[2])',
+ '&psrlq ($t2,$sigma1[0])',
+ '&pxor ($t3,$t2);',
+ '&psrlq ($t2,$sigma1[1]-$sigma1[0])',
+ '&pxor ($t3,$t2);',
+ '&movdqa ($t2,16*2*$j."($Tbl)")',
+ '&pshufb ($t3,$t5)',
+ '&paddd (@X[0],$t3)' # X[2..3] += sigma1(X[16..17])
+ );
+}
+
+sub SSSE3_256_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body,&$body,&$body); # 104 instructions
+
+ if (0) {
+ foreach (Xupdate_256_SSSE3()) { # 36 instructions
+ eval;
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ }
+ } else { # squeeze extra 4% on Westmere and 19% on Atom
+ eval(shift(@insns)); #@
+ &movdqa ($t0,@X[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa ($t3,@X[3]);
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ &palignr ($t0,@X[0],$SZ); # X[1..4]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &palignr ($t3,@X[2],$SZ); # X[9..12]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ &movdqa ($t1,$t0);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa ($t2,$t0);
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ &psrld ($t0,$sigma0[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &paddd (@X[0],$t3); # X[0..3] += X[9..12]
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ &psrld ($t2,$sigma0[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pshufd ($t3,@X[3],0b11111010); # X[4..15]
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ &pslld ($t1,8*$SZ-$sigma0[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor ($t0,$t2);
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ &psrld ($t2,$sigma0[1]-$sigma0[0]);
+ eval(shift(@insns));
+ &pxor ($t0,$t1);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pslld ($t1,$sigma0[1]-$sigma0[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor ($t0,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ &movdqa ($t2,$t3);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor ($t0,$t1); # sigma0(X[1..4])
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrld ($t3,$sigma1[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &paddd (@X[0],$t0); # X[0..3] += sigma0(X[1..4])
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ &psrlq ($t2,$sigma1[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor ($t3,$t2);
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ &psrlq ($t2,$sigma1[1]-$sigma1[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor ($t3,$t2);
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ eval(shift(@insns));
+ #&pshufb ($t3,$t4); # sigma1(X[14..15])
+ &pshufd ($t3,$t3,0b10000000);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrldq ($t3,8);
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ &paddd (@X[0],$t3); # X[0..1] += sigma1(X[14..15])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pshufd ($t3,@X[0],0b01010000); # X[16..17]
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ &movdqa ($t2,$t3);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &psrld ($t3,$sigma1[2]);
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ &psrlq ($t2,$sigma1[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor ($t3,$t2);
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ &psrlq ($t2,$sigma1[1]-$sigma1[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &pxor ($t3,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns)); #@
+ #&pshufb ($t3,$t5);
+ &pshufd ($t3,$t3,0b00001000);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &movdqa ($t2,16*2*$j."($Tbl)");
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ &pslldq ($t3,8);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &paddd (@X[0],$t3); # X[2..3] += sigma1(X[16..17])
+ eval(shift(@insns)); #@
+ eval(shift(@insns));
+ eval(shift(@insns));
+ }
+ &paddd ($t2,@X[0]);
+ foreach (@insns) { eval; } # remaining instructions
+ &movdqa (16*$j."(%rsp)",$t2);
+}
+
+ for ($i=0,$j=0; $j<4; $j++) {
+ &SSSE3_256_00_47($j,\&body_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &cmpb ($SZ-1+16*2*$SZ."($Tbl)",0);
+ &jne (".Lssse3_00_47");
+
+ for ($i=0; $i<16; ) {
+ foreach(body_00_15()) { eval; }
+ }
+$code.=<<___;
+ mov $_ctx,$ctx
+ mov $a1,$A
+
+ add $SZ*0($ctx),$A
+ lea 16*$SZ($inp),$inp
+ add $SZ*1($ctx),$B
+ add $SZ*2($ctx),$C
+ add $SZ*3($ctx),$D
+ add $SZ*4($ctx),$E
+ add $SZ*5($ctx),$F
+ add $SZ*6($ctx),$G
+ add $SZ*7($ctx),$H
+
+ cmp $_end,$inp
+
+ mov $A,$SZ*0($ctx)
+ mov $B,$SZ*1($ctx)
+ mov $C,$SZ*2($ctx)
+ mov $D,$SZ*3($ctx)
+ mov $E,$SZ*4($ctx)
+ mov $F,$SZ*5($ctx)
+ mov $G,$SZ*6($ctx)
+ mov $H,$SZ*7($ctx)
+ jb .Lloop_ssse3
+
+ mov $_rsp,%rsi
+___
+$code.=<<___ if ($win64);
+ movaps 16*$SZ+32(%rsp),%xmm6
+ movaps 16*$SZ+48(%rsp),%xmm7
+ movaps 16*$SZ+64(%rsp),%xmm8
+ movaps 16*$SZ+80(%rsp),%xmm9
+___
+$code.=<<___;
+ mov (%rsi),%r15
+ mov 8(%rsi),%r14
+ mov 16(%rsi),%r13
+ mov 24(%rsi),%r12
+ mov 32(%rsi),%rbp
+ mov 40(%rsi),%rbx
+ lea 48(%rsi),%rsp
+.Lepilogue_ssse3:
+ ret
+.size ${func}_ssse3,.-${func}_ssse3
+___
+}
+
+if ($avx) {{
+######################################################################
+# XOP code path
+#
+if ($SZ==8) { # SHA512 only
+$code.=<<___;
+.type ${func}_xop,\@function,3
+.align 64
+${func}_xop:
+.Lxop_shortcut:
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+ mov %rsp,%r11 # copy %rsp
+ shl \$4,%rdx # num*16
+ sub \$`$framesz+$win64*16*($SZ==4?4:6)`,%rsp
+ lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
+ and \$-64,%rsp # align stack frame
+ mov $ctx,$_ctx # save ctx, 1st arg
+ mov $inp,$_inp # save inp, 2nd arh
+ mov %rdx,$_end # save end pointer, "3rd" arg
+ mov %r11,$_rsp # save copy of %rsp
+___
+$code.=<<___ if ($win64);
+ movaps %xmm6,16*$SZ+32(%rsp)
+ movaps %xmm7,16*$SZ+48(%rsp)
+ movaps %xmm8,16*$SZ+64(%rsp)
+ movaps %xmm9,16*$SZ+80(%rsp)
+___
+$code.=<<___ if ($win64 && $SZ>4);
+ movaps %xmm10,16*$SZ+96(%rsp)
+ movaps %xmm11,16*$SZ+112(%rsp)
+___
+$code.=<<___;
+.Lprologue_xop:
+
+ vzeroupper
+ mov $SZ*0($ctx),$A
+ mov $SZ*1($ctx),$B
+ mov $SZ*2($ctx),$C
+ mov $SZ*3($ctx),$D
+ mov $SZ*4($ctx),$E
+ mov $SZ*5($ctx),$F
+ mov $SZ*6($ctx),$G
+ mov $SZ*7($ctx),$H
+ jmp .Lloop_xop
+___
+ if ($SZ==4) { # SHA256
+ my @X = map("%xmm$_",(0..3));
+ my ($t0,$t1,$t2,$t3) = map("%xmm$_",(4..7));
+
+$code.=<<___;
+.align 16
+.Lloop_xop:
+ vmovdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
+ vmovdqu 0x00($inp),@X[0]
+ vmovdqu 0x10($inp),@X[1]
+ vmovdqu 0x20($inp),@X[2]
+ vmovdqu 0x30($inp),@X[3]
+ vpshufb $t3,@X[0],@X[0]
+ lea $TABLE(%rip),$Tbl
+ vpshufb $t3,@X[1],@X[1]
+ vpshufb $t3,@X[2],@X[2]
+ vpaddd 0x00($Tbl),@X[0],$t0
+ vpshufb $t3,@X[3],@X[3]
+ vpaddd 0x20($Tbl),@X[1],$t1
+ vpaddd 0x40($Tbl),@X[2],$t2
+ vpaddd 0x60($Tbl),@X[3],$t3
+ vmovdqa $t0,0x00(%rsp)
+ mov $A,$a1
+ vmovdqa $t1,0x10(%rsp)
+ mov $B,$a3
+ vmovdqa $t2,0x20(%rsp)
+ xor $C,$a3 # magic
+ vmovdqa $t3,0x30(%rsp)
+ mov $E,$a0
+ jmp .Lxop_00_47
+
+.align 16
+.Lxop_00_47:
+ sub \$-16*2*$SZ,$Tbl # size optimization
+___
+sub XOP_256_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body,&$body,&$body); # 104 instructions
+
+ &vpalignr ($t0,@X[1],@X[0],$SZ); # X[1..4]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpalignr ($t3,@X[3],@X[2],$SZ); # X[9..12]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotd ($t1,$t0,8*$SZ-$sigma0[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpsrld ($t0,$t0,$sigma0[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddd (@X[0],@X[0],$t3); # X[0..3] += X[9..12]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotd ($t2,$t1,$sigma0[1]-$sigma0[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t0,$t0,$t1);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotd ($t3,@X[3],8*$SZ-$sigma1[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t0,$t0,$t2); # sigma0(X[1..4])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpsrld ($t2,@X[3],$sigma1[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddd (@X[0],@X[0],$t0); # X[0..3] += sigma0(X[1..4])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotd ($t1,$t3,$sigma1[1]-$sigma1[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t3,$t3,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t3,$t3,$t1); # sigma1(X[14..15])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpsrldq ($t3,$t3,8);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddd (@X[0],@X[0],$t3); # X[0..1] += sigma1(X[14..15])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotd ($t3,@X[0],8*$SZ-$sigma1[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpsrld ($t2,@X[0],$sigma1[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotd ($t1,$t3,$sigma1[1]-$sigma1[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t3,$t3,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t3,$t3,$t1); # sigma1(X[16..17])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpslldq ($t3,$t3,8); # 22 instructions
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddd (@X[0],@X[0],$t3); # X[2..3] += sigma1(X[16..17])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddd ($t2,@X[0],16*2*$j."($Tbl)");
+ foreach (@insns) { eval; } # remaining instructions
+ &vmovdqa (16*$j."(%rsp)",$t2);
+}
+
+ for ($i=0,$j=0; $j<4; $j++) {
+ &XOP_256_00_47($j,\&body_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &cmpb ($SZ-1+16*2*$SZ."($Tbl)",0);
+ &jne (".Lxop_00_47");
+
+ for ($i=0; $i<16; ) {
+ foreach(body_00_15()) { eval; }
+ }
+
+ } else { # SHA512
+ my @X = map("%xmm$_",(0..7));
+ my ($t0,$t1,$t2,$t3) = map("%xmm$_",(8..11));
+
+$code.=<<___;
+.align 16
+.Lloop_xop:
+ vmovdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
+ vmovdqu 0x00($inp),@X[0]
+ lea $TABLE+0x80(%rip),$Tbl # size optimization
+ vmovdqu 0x10($inp),@X[1]
+ vmovdqu 0x20($inp),@X[2]
+ vpshufb $t3,@X[0],@X[0]
+ vmovdqu 0x30($inp),@X[3]
+ vpshufb $t3,@X[1],@X[1]
+ vmovdqu 0x40($inp),@X[4]
+ vpshufb $t3,@X[2],@X[2]
+ vmovdqu 0x50($inp),@X[5]
+ vpshufb $t3,@X[3],@X[3]
+ vmovdqu 0x60($inp),@X[6]
+ vpshufb $t3,@X[4],@X[4]
+ vmovdqu 0x70($inp),@X[7]
+ vpshufb $t3,@X[5],@X[5]
+ vpaddq -0x80($Tbl),@X[0],$t0
+ vpshufb $t3,@X[6],@X[6]
+ vpaddq -0x60($Tbl),@X[1],$t1
+ vpshufb $t3,@X[7],@X[7]
+ vpaddq -0x40($Tbl),@X[2],$t2
+ vpaddq -0x20($Tbl),@X[3],$t3
+ vmovdqa $t0,0x00(%rsp)
+ vpaddq 0x00($Tbl),@X[4],$t0
+ vmovdqa $t1,0x10(%rsp)
+ vpaddq 0x20($Tbl),@X[5],$t1
+ vmovdqa $t2,0x20(%rsp)
+ vpaddq 0x40($Tbl),@X[6],$t2
+ vmovdqa $t3,0x30(%rsp)
+ vpaddq 0x60($Tbl),@X[7],$t3
+ vmovdqa $t0,0x40(%rsp)
+ mov $A,$a1
+ vmovdqa $t1,0x50(%rsp)
+ mov $B,$a3
+ vmovdqa $t2,0x60(%rsp)
+ xor $C,$a3 # magic
+ vmovdqa $t3,0x70(%rsp)
+ mov $E,$a0
+ jmp .Lxop_00_47
+
+.align 16
+.Lxop_00_47:
+ add \$16*2*$SZ,$Tbl
+___
+sub XOP_512_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body); # 52 instructions
+
+ &vpalignr ($t0,@X[1],@X[0],$SZ); # X[1..2]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpalignr ($t3,@X[5],@X[4],$SZ); # X[9..10]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotq ($t1,$t0,8*$SZ-$sigma0[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpsrlq ($t0,$t0,$sigma0[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddq (@X[0],@X[0],$t3); # X[0..1] += X[9..10]
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotq ($t2,$t1,$sigma0[1]-$sigma0[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t0,$t0,$t1);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotq ($t3,@X[7],8*$SZ-$sigma1[1]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t0,$t0,$t2); # sigma0(X[1..2])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpsrlq ($t2,@X[7],$sigma1[2]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddq (@X[0],@X[0],$t0); # X[0..1] += sigma0(X[1..2])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vprotq ($t1,$t3,$sigma1[1]-$sigma1[0]);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t3,$t3,$t2);
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpxor ($t3,$t3,$t1); # sigma1(X[14..15])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddq (@X[0],@X[0],$t3); # X[0..1] += sigma1(X[14..15])
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ &vpaddq ($t2,@X[0],16*2*$j-0x80."($Tbl)");
+ foreach (@insns) { eval; } # remaining instructions
+ &vmovdqa (16*$j."(%rsp)",$t2);
+}
+
+ for ($i=0,$j=0; $j<8; $j++) {
+ &XOP_512_00_47($j,\&body_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &cmpb ($SZ-1+16*2*$SZ-0x80."($Tbl)",0);
+ &jne (".Lxop_00_47");
+
+ for ($i=0; $i<16; ) {
+ foreach(body_00_15()) { eval; }
+ }
+}
+$code.=<<___;
+ mov $_ctx,$ctx
+ mov $a1,$A
+
+ add $SZ*0($ctx),$A
+ lea 16*$SZ($inp),$inp
+ add $SZ*1($ctx),$B
+ add $SZ*2($ctx),$C
+ add $SZ*3($ctx),$D
+ add $SZ*4($ctx),$E
+ add $SZ*5($ctx),$F
+ add $SZ*6($ctx),$G
+ add $SZ*7($ctx),$H
+
+ cmp $_end,$inp
+
+ mov $A,$SZ*0($ctx)
+ mov $B,$SZ*1($ctx)
+ mov $C,$SZ*2($ctx)
+ mov $D,$SZ*3($ctx)
+ mov $E,$SZ*4($ctx)
+ mov $F,$SZ*5($ctx)
+ mov $G,$SZ*6($ctx)
+ mov $H,$SZ*7($ctx)
+ jb .Lloop_xop
+
+ mov $_rsp,%rsi
+ vzeroupper
+___
+$code.=<<___ if ($win64);
+ movaps 16*$SZ+32(%rsp),%xmm6
+ movaps 16*$SZ+48(%rsp),%xmm7
+ movaps 16*$SZ+64(%rsp),%xmm8
+ movaps 16*$SZ+80(%rsp),%xmm9
+___
+$code.=<<___ if ($win64 && $SZ>4);
+ movaps 16*$SZ+96(%rsp),%xmm10
+ movaps 16*$SZ+112(%rsp),%xmm11
+___
+$code.=<<___;
+ mov (%rsi),%r15
+ mov 8(%rsi),%r14
+ mov 16(%rsi),%r13
+ mov 24(%rsi),%r12
+ mov 32(%rsi),%rbp
+ mov 40(%rsi),%rbx
+ lea 48(%rsi),%rsp
+.Lepilogue_xop:
+ ret
+.size ${func}_xop,.-${func}_xop
+___
+}
+######################################################################
+# AVX+shrd code path
+#
+local *ror = sub { &shrd(@_[0],@_) };
+
+$code.=<<___;
+.type ${func}_avx,\@function,3
+.align 64
+${func}_avx:
+.Lavx_shortcut:
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+ mov %rsp,%r11 # copy %rsp
+ shl \$4,%rdx # num*16
+ sub \$`$framesz+$win64*16*($SZ==4?4:6)`,%rsp
+ lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
+ and \$-64,%rsp # align stack frame
+ mov $ctx,$_ctx # save ctx, 1st arg
+ mov $inp,$_inp # save inp, 2nd arh
+ mov %rdx,$_end # save end pointer, "3rd" arg
+ mov %r11,$_rsp # save copy of %rsp
+___
+$code.=<<___ if ($win64);
+ movaps %xmm6,16*$SZ+32(%rsp)
+ movaps %xmm7,16*$SZ+48(%rsp)
+ movaps %xmm8,16*$SZ+64(%rsp)
+ movaps %xmm9,16*$SZ+80(%rsp)
+___
+$code.=<<___ if ($win64 && $SZ>4);
+ movaps %xmm10,16*$SZ+96(%rsp)
+ movaps %xmm11,16*$SZ+112(%rsp)
+___
+$code.=<<___;
+.Lprologue_avx:
+
+ vzeroupper
+ mov $SZ*0($ctx),$A
+ mov $SZ*1($ctx),$B
+ mov $SZ*2($ctx),$C
+ mov $SZ*3($ctx),$D
+ mov $SZ*4($ctx),$E
+ mov $SZ*5($ctx),$F
+ mov $SZ*6($ctx),$G
+ mov $SZ*7($ctx),$H
+___
+ if ($SZ==4) { # SHA256
+ my @X = map("%xmm$_",(0..3));
+ my ($t0,$t1,$t2,$t3, $t4,$t5) = map("%xmm$_",(4..9));
+
+$code.=<<___;
+ vmovdqa $TABLE+`$SZ*2*$rounds`+32(%rip),$t4
+ vmovdqa $TABLE+`$SZ*2*$rounds`+64(%rip),$t5
+ jmp .Lloop_avx
+.align 16
+.Lloop_avx:
+ vmovdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
+ vmovdqu 0x00($inp),@X[0]
+ vmovdqu 0x10($inp),@X[1]
+ vmovdqu 0x20($inp),@X[2]
+ vmovdqu 0x30($inp),@X[3]
+ vpshufb $t3,@X[0],@X[0]
+ lea $TABLE(%rip),$Tbl
+ vpshufb $t3,@X[1],@X[1]
+ vpshufb $t3,@X[2],@X[2]
+ vpaddd 0x00($Tbl),@X[0],$t0
+ vpshufb $t3,@X[3],@X[3]
+ vpaddd 0x20($Tbl),@X[1],$t1
+ vpaddd 0x40($Tbl),@X[2],$t2
+ vpaddd 0x60($Tbl),@X[3],$t3
+ vmovdqa $t0,0x00(%rsp)
+ mov $A,$a1
+ vmovdqa $t1,0x10(%rsp)
+ mov $B,$a3
+ vmovdqa $t2,0x20(%rsp)
+ xor $C,$a3 # magic
+ vmovdqa $t3,0x30(%rsp)
+ mov $E,$a0
+ jmp .Lavx_00_47
+
+.align 16
+.Lavx_00_47:
+ sub \$-16*2*$SZ,$Tbl # size optimization
+___
+sub Xupdate_256_AVX () {
+ (
+ '&vpalignr ($t0,@X[1],@X[0],$SZ)', # X[1..4]
+ '&vpalignr ($t3,@X[3],@X[2],$SZ)', # X[9..12]
+ '&vpsrld ($t2,$t0,$sigma0[0]);',
+ '&vpaddd (@X[0],@X[0],$t3)', # X[0..3] += X[9..12]
+ '&vpsrld ($t3,$t0,$sigma0[2])',
+ '&vpslld ($t1,$t0,8*$SZ-$sigma0[1]);',
+ '&vpxor ($t0,$t3,$t2)',
+ '&vpshufd ($t3,@X[3],0b11111010)',# X[14..15]
+ '&vpsrld ($t2,$t2,$sigma0[1]-$sigma0[0]);',
+ '&vpxor ($t0,$t0,$t1)',
+ '&vpslld ($t1,$t1,$sigma0[1]-$sigma0[0]);',
+ '&vpxor ($t0,$t0,$t2)',
+ '&vpsrld ($t2,$t3,$sigma1[2]);',
+ '&vpxor ($t0,$t0,$t1)', # sigma0(X[1..4])
+ '&vpsrlq ($t3,$t3,$sigma1[0]);',
+ '&vpaddd (@X[0],@X[0],$t0)', # X[0..3] += sigma0(X[1..4])
+ '&vpxor ($t2,$t2,$t3);',
+ '&vpsrlq ($t3,$t3,$sigma1[1]-$sigma1[0])',
+ '&vpxor ($t2,$t2,$t3)',
+ '&vpshufb ($t2,$t2,$t4)', # sigma1(X[14..15])
+ '&vpaddd (@X[0],@X[0],$t2)', # X[0..1] += sigma1(X[14..15])
+ '&vpshufd ($t3,@X[0],0b01010000)',# X[16..17]
+ '&vpsrld ($t2,$t3,$sigma1[2])',
+ '&vpsrlq ($t3,$t3,$sigma1[0])',
+ '&vpxor ($t2,$t2,$t3);',
+ '&vpsrlq ($t3,$t3,$sigma1[1]-$sigma1[0])',
+ '&vpxor ($t2,$t2,$t3)',
+ '&vpshufb ($t2,$t2,$t5)',
+ '&vpaddd (@X[0],@X[0],$t2)' # X[2..3] += sigma1(X[16..17])
+ );
+}
+
+sub AVX_256_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body,&$body,&$body); # 104 instructions
+
+ foreach (Xupdate_256_AVX()) { # 29 instructions
+ eval;
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ }
+ &vpaddd ($t2,@X[0],16*2*$j."($Tbl)");
+ foreach (@insns) { eval; } # remaining instructions
+ &vmovdqa (16*$j."(%rsp)",$t2);
+}
+
+ for ($i=0,$j=0; $j<4; $j++) {
+ &AVX_256_00_47($j,\&body_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &cmpb ($SZ-1+16*2*$SZ."($Tbl)",0);
+ &jne (".Lavx_00_47");
+
+ for ($i=0; $i<16; ) {
+ foreach(body_00_15()) { eval; }
+ }
+
+ } else { # SHA512
+ my @X = map("%xmm$_",(0..7));
+ my ($t0,$t1,$t2,$t3) = map("%xmm$_",(8..11));
+
+$code.=<<___;
+ jmp .Lloop_avx
+.align 16
+.Lloop_avx:
+ vmovdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
+ vmovdqu 0x00($inp),@X[0]
+ lea $TABLE+0x80(%rip),$Tbl # size optimization
+ vmovdqu 0x10($inp),@X[1]
+ vmovdqu 0x20($inp),@X[2]
+ vpshufb $t3,@X[0],@X[0]
+ vmovdqu 0x30($inp),@X[3]
+ vpshufb $t3,@X[1],@X[1]
+ vmovdqu 0x40($inp),@X[4]
+ vpshufb $t3,@X[2],@X[2]
+ vmovdqu 0x50($inp),@X[5]
+ vpshufb $t3,@X[3],@X[3]
+ vmovdqu 0x60($inp),@X[6]
+ vpshufb $t3,@X[4],@X[4]
+ vmovdqu 0x70($inp),@X[7]
+ vpshufb $t3,@X[5],@X[5]
+ vpaddq -0x80($Tbl),@X[0],$t0
+ vpshufb $t3,@X[6],@X[6]
+ vpaddq -0x60($Tbl),@X[1],$t1
+ vpshufb $t3,@X[7],@X[7]
+ vpaddq -0x40($Tbl),@X[2],$t2
+ vpaddq -0x20($Tbl),@X[3],$t3
+ vmovdqa $t0,0x00(%rsp)
+ vpaddq 0x00($Tbl),@X[4],$t0
+ vmovdqa $t1,0x10(%rsp)
+ vpaddq 0x20($Tbl),@X[5],$t1
+ vmovdqa $t2,0x20(%rsp)
+ vpaddq 0x40($Tbl),@X[6],$t2
+ vmovdqa $t3,0x30(%rsp)
+ vpaddq 0x60($Tbl),@X[7],$t3
+ vmovdqa $t0,0x40(%rsp)
+ mov $A,$a1
+ vmovdqa $t1,0x50(%rsp)
+ mov $B,$a3
+ vmovdqa $t2,0x60(%rsp)
+ xor $C,$a3 # magic
+ vmovdqa $t3,0x70(%rsp)
+ mov $E,$a0
+ jmp .Lavx_00_47
+
+.align 16
+.Lavx_00_47:
+ add \$16*2*$SZ,$Tbl
+___
+sub Xupdate_512_AVX () {
+ (
+ '&vpalignr ($t0,@X[1],@X[0],$SZ)', # X[1..2]
+ '&vpalignr ($t3,@X[5],@X[4],$SZ)', # X[9..10]
+ '&vpsrlq ($t2,$t0,$sigma0[0])',
+ '&vpaddq (@X[0],@X[0],$t3);', # X[0..1] += X[9..10]
+ '&vpsrlq ($t3,$t0,$sigma0[2])',
+ '&vpsllq ($t1,$t0,8*$SZ-$sigma0[1]);',
+ '&vpxor ($t0,$t3,$t2)',
+ '&vpsrlq ($t2,$t2,$sigma0[1]-$sigma0[0]);',
+ '&vpxor ($t0,$t0,$t1)',
+ '&vpsllq ($t1,$t1,$sigma0[1]-$sigma0[0]);',
+ '&vpxor ($t0,$t0,$t2)',
+ '&vpsrlq ($t3,@X[7],$sigma1[2]);',
+ '&vpxor ($t0,$t0,$t1)', # sigma0(X[1..2])
+ '&vpsllq ($t2,@X[7],8*$SZ-$sigma1[1]);',
+ '&vpaddq (@X[0],@X[0],$t0)', # X[0..1] += sigma0(X[1..2])
+ '&vpsrlq ($t1,@X[7],$sigma1[0]);',
+ '&vpxor ($t3,$t3,$t2)',
+ '&vpsllq ($t2,$t2,$sigma1[1]-$sigma1[0]);',
+ '&vpxor ($t3,$t3,$t1)',
+ '&vpsrlq ($t1,$t1,$sigma1[1]-$sigma1[0]);',
+ '&vpxor ($t3,$t3,$t2)',
+ '&vpxor ($t3,$t3,$t1)', # sigma1(X[14..15])
+ '&vpaddq (@X[0],@X[0],$t3)', # X[0..1] += sigma1(X[14..15])
+ );
+}
+
+sub AVX_512_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body); # 52 instructions
+
+ foreach (Xupdate_512_AVX()) { # 23 instructions
+ eval;
+ eval(shift(@insns));
+ eval(shift(@insns));
+ }
+ &vpaddq ($t2,@X[0],16*2*$j-0x80."($Tbl)");
+ foreach (@insns) { eval; } # remaining instructions
+ &vmovdqa (16*$j."(%rsp)",$t2);
+}
+
+ for ($i=0,$j=0; $j<8; $j++) {
+ &AVX_512_00_47($j,\&body_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &cmpb ($SZ-1+16*2*$SZ-0x80."($Tbl)",0);
+ &jne (".Lavx_00_47");
+
+ for ($i=0; $i<16; ) {
+ foreach(body_00_15()) { eval; }
+ }
+}
+$code.=<<___;
+ mov $_ctx,$ctx
+ mov $a1,$A
+
+ add $SZ*0($ctx),$A
+ lea 16*$SZ($inp),$inp
+ add $SZ*1($ctx),$B
+ add $SZ*2($ctx),$C
+ add $SZ*3($ctx),$D
+ add $SZ*4($ctx),$E
+ add $SZ*5($ctx),$F
+ add $SZ*6($ctx),$G
+ add $SZ*7($ctx),$H
+
+ cmp $_end,$inp
+
+ mov $A,$SZ*0($ctx)
+ mov $B,$SZ*1($ctx)
+ mov $C,$SZ*2($ctx)
+ mov $D,$SZ*3($ctx)
+ mov $E,$SZ*4($ctx)
+ mov $F,$SZ*5($ctx)
+ mov $G,$SZ*6($ctx)
+ mov $H,$SZ*7($ctx)
+ jb .Lloop_avx
+
+ mov $_rsp,%rsi
+ vzeroupper
+___
+$code.=<<___ if ($win64);
+ movaps 16*$SZ+32(%rsp),%xmm6
+ movaps 16*$SZ+48(%rsp),%xmm7
+ movaps 16*$SZ+64(%rsp),%xmm8
+ movaps 16*$SZ+80(%rsp),%xmm9
+___
+$code.=<<___ if ($win64 && $SZ>4);
+ movaps 16*$SZ+96(%rsp),%xmm10
+ movaps 16*$SZ+112(%rsp),%xmm11
+___
+$code.=<<___;
+ mov (%rsi),%r15
+ mov 8(%rsi),%r14
+ mov 16(%rsi),%r13
+ mov 24(%rsi),%r12
+ mov 32(%rsi),%rbp
+ mov 40(%rsi),%rbx
+ lea 48(%rsi),%rsp
+.Lepilogue_avx:
+ ret
+.size ${func}_avx,.-${func}_avx
+___
+
+if ($avx>1) {{
+######################################################################
+# AVX2+BMI code path
+#
+my $a5=$SZ==4?"%esi":"%rsi"; # zap $inp
+my $PUSH8=8*2*$SZ;
+use integer;
+
+sub bodyx_00_15 () {
+ # at start $a1 should be zero, $a3 - $b^$c and $a4 copy of $f
+ (
+ '($a,$b,$c,$d,$e,$f,$g,$h)=@ROT;'.
+
+ '&add ($h,(32*($i/(16/$SZ))+$SZ*($i%(16/$SZ)))%$PUSH8.$base)', # h+=X[i]+K[i]
+ '&and ($a4,$e)', # f&e
+ '&rorx ($a0,$e,$Sigma1[2])',
+ '&rorx ($a2,$e,$Sigma1[1])',
+
+ '&lea ($a,"($a,$a1)")', # h+=Sigma0(a) from the past
+ '&lea ($h,"($h,$a4)")',
+ '&andn ($a4,$e,$g)', # ~e&g
+ '&xor ($a0,$a2)',
+
+ '&rorx ($a1,$e,$Sigma1[0])',
+ '&lea ($h,"($h,$a4)")', # h+=Ch(e,f,g)=(e&f)+(~e&g)
+ '&xor ($a0,$a1)', # Sigma1(e)
+ '&mov ($a2,$a)',
+
+ '&rorx ($a4,$a,$Sigma0[2])',
+ '&lea ($h,"($h,$a0)")', # h+=Sigma1(e)
+ '&xor ($a2,$b)', # a^b, b^c in next round
+ '&rorx ($a1,$a,$Sigma0[1])',
+
+ '&rorx ($a0,$a,$Sigma0[0])',
+ '&lea ($d,"($d,$h)")', # d+=h
+ '&and ($a3,$a2)', # (b^c)&(a^b)
+ '&xor ($a1,$a4)',
+
+ '&xor ($a3,$b)', # Maj(a,b,c)=Ch(a^b,c,b)
+ '&xor ($a1,$a0)', # Sigma0(a)
+ '&lea ($h,"($h,$a3)");'. # h+=Maj(a,b,c)
+ '&mov ($a4,$e)', # copy of f in future
+
+ '($a2,$a3) = ($a3,$a2); unshift(@ROT,pop(@ROT)); $i++;'
+ );
+ # and at the finish one has to $a+=$a1
+}
+
+$code.=<<___;
+.type ${func}_avx2,\@function,3
+.align 64
+${func}_avx2:
+.Lavx2_shortcut:
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+ mov %rsp,%r11 # copy %rsp
+ sub \$`2*$SZ*$rounds+4*8+$win64*16*($SZ==4?4:6)`,%rsp
+ shl \$4,%rdx # num*16
+ and \$-256*$SZ,%rsp # align stack frame
+ lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
+ add \$`2*$SZ*($rounds-8)`,%rsp
+ mov $ctx,$_ctx # save ctx, 1st arg
+ mov $inp,$_inp # save inp, 2nd arh
+ mov %rdx,$_end # save end pointer, "3rd" arg
+ mov %r11,$_rsp # save copy of %rsp
+___
+$code.=<<___ if ($win64);
+ movaps %xmm6,16*$SZ+32(%rsp)
+ movaps %xmm7,16*$SZ+48(%rsp)
+ movaps %xmm8,16*$SZ+64(%rsp)
+ movaps %xmm9,16*$SZ+80(%rsp)
+___
+$code.=<<___ if ($win64 && $SZ>4);
+ movaps %xmm10,16*$SZ+96(%rsp)
+ movaps %xmm11,16*$SZ+112(%rsp)
+___
+$code.=<<___;
+.Lprologue_avx2:
+
+ vzeroupper
+ sub \$-16*$SZ,$inp # inp++, size optimization
+ mov $SZ*0($ctx),$A
+ mov $inp,%r12 # borrow $T1
+ mov $SZ*1($ctx),$B
+ cmp %rdx,$inp # $_end
+ mov $SZ*2($ctx),$C
+ cmove %rsp,%r12 # next block or random data
+ mov $SZ*3($ctx),$D
+ mov $SZ*4($ctx),$E
+ mov $SZ*5($ctx),$F
+ mov $SZ*6($ctx),$G
+ mov $SZ*7($ctx),$H
+___
+ if ($SZ==4) { # SHA256
+ my @X = map("%ymm$_",(0..3));
+ my ($t0,$t1,$t2,$t3, $t4,$t5) = map("%ymm$_",(4..9));
+
+$code.=<<___;
+ vmovdqa $TABLE+`$SZ*2*$rounds`+32(%rip),$t4
+ vmovdqa $TABLE+`$SZ*2*$rounds`+64(%rip),$t5
+ jmp .Loop_avx2
+.align 16
+.Loop_avx2:
+ vmovdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
+ vmovdqu -16*$SZ+0($inp),%xmm0
+ vmovdqu -16*$SZ+16($inp),%xmm1
+ vmovdqu -16*$SZ+32($inp),%xmm2
+ vmovdqu -16*$SZ+48($inp),%xmm3
+ #mov $inp,$_inp # offload $inp
+ vinserti128 \$1,(%r12),@X[0],@X[0]
+ vinserti128 \$1,16(%r12),@X[1],@X[1]
+ vpshufb $t3,@X[0],@X[0]
+ vinserti128 \$1,32(%r12),@X[2],@X[2]
+ vpshufb $t3,@X[1],@X[1]
+ vinserti128 \$1,48(%r12),@X[3],@X[3]
+
+ lea $TABLE(%rip),$Tbl
+ vpshufb $t3,@X[2],@X[2]
+ vpaddd 0x00($Tbl),@X[0],$t0
+ vpshufb $t3,@X[3],@X[3]
+ vpaddd 0x20($Tbl),@X[1],$t1
+ vpaddd 0x40($Tbl),@X[2],$t2
+ vpaddd 0x60($Tbl),@X[3],$t3
+ vmovdqa $t0,0x00(%rsp)
+ xor $a1,$a1
+ vmovdqa $t1,0x20(%rsp)
+ lea -$PUSH8(%rsp),%rsp
+ mov $B,$a3
+ vmovdqa $t2,0x00(%rsp)
+ xor $C,$a3 # magic
+ vmovdqa $t3,0x20(%rsp)
+ mov $F,$a4
+ sub \$-16*2*$SZ,$Tbl # size optimization
+ jmp .Lavx2_00_47
+
+.align 16
+.Lavx2_00_47:
+___
+
+sub AVX2_256_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body,&$body,&$body); # 96 instructions
+my $base = "+2*$PUSH8(%rsp)";
+
+ &lea ("%rsp","-$PUSH8(%rsp)") if (($j%2)==0);
+ foreach (Xupdate_256_AVX()) { # 29 instructions
+ eval;
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ }
+ &vpaddd ($t2,@X[0],16*2*$j."($Tbl)");
+ foreach (@insns) { eval; } # remaining instructions
+ &vmovdqa ((32*$j)%$PUSH8."(%rsp)",$t2);
+}
+
+ for ($i=0,$j=0; $j<4; $j++) {
+ &AVX2_256_00_47($j,\&bodyx_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &lea ($Tbl,16*2*$SZ."($Tbl)");
+ &cmpb (($SZ-1)."($Tbl)",0);
+ &jne (".Lavx2_00_47");
+
+ for ($i=0; $i<16; ) {
+ my $base=$i<8?"+$PUSH8(%rsp)":"(%rsp)";
+ foreach(bodyx_00_15()) { eval; }
+ }
+ } else { # SHA512
+ my @X = map("%ymm$_",(0..7));
+ my ($t0,$t1,$t2,$t3) = map("%ymm$_",(8..11));
+
+$code.=<<___;
+ jmp .Loop_avx2
+.align 16
+.Loop_avx2:
+ vmovdqu -16*$SZ($inp),%xmm0
+ vmovdqu -16*$SZ+16($inp),%xmm1
+ vmovdqu -16*$SZ+32($inp),%xmm2
+ lea $TABLE+0x80(%rip),$Tbl # size optimization
+ vmovdqu -16*$SZ+48($inp),%xmm3
+ vmovdqu -16*$SZ+64($inp),%xmm4
+ vmovdqu -16*$SZ+80($inp),%xmm5
+ vmovdqu -16*$SZ+96($inp),%xmm6
+ vmovdqu -16*$SZ+112($inp),%xmm7
+ #mov $inp,$_inp # offload $inp
+ vmovdqa `$SZ*2*$rounds-0x80`($Tbl),$t2
+ vinserti128 \$1,(%r12),@X[0],@X[0]
+ vinserti128 \$1,16(%r12),@X[1],@X[1]
+ vpshufb $t2,@X[0],@X[0]
+ vinserti128 \$1,32(%r12),@X[2],@X[2]
+ vpshufb $t2,@X[1],@X[1]
+ vinserti128 \$1,48(%r12),@X[3],@X[3]
+ vpshufb $t2,@X[2],@X[2]
+ vinserti128 \$1,64(%r12),@X[4],@X[4]
+ vpshufb $t2,@X[3],@X[3]
+ vinserti128 \$1,80(%r12),@X[5],@X[5]
+ vpshufb $t2,@X[4],@X[4]
+ vinserti128 \$1,96(%r12),@X[6],@X[6]
+ vpshufb $t2,@X[5],@X[5]
+ vinserti128 \$1,112(%r12),@X[7],@X[7]
+
+ vpaddq -0x80($Tbl),@X[0],$t0
+ vpshufb $t2,@X[6],@X[6]
+ vpaddq -0x60($Tbl),@X[1],$t1
+ vpshufb $t2,@X[7],@X[7]
+ vpaddq -0x40($Tbl),@X[2],$t2
+ vpaddq -0x20($Tbl),@X[3],$t3
+ vmovdqa $t0,0x00(%rsp)
+ vpaddq 0x00($Tbl),@X[4],$t0
+ vmovdqa $t1,0x20(%rsp)
+ vpaddq 0x20($Tbl),@X[5],$t1
+ vmovdqa $t2,0x40(%rsp)
+ vpaddq 0x40($Tbl),@X[6],$t2
+ vmovdqa $t3,0x60(%rsp)
+ lea -$PUSH8(%rsp),%rsp
+ vpaddq 0x60($Tbl),@X[7],$t3
+ vmovdqa $t0,0x00(%rsp)
+ xor $a1,$a1
+ vmovdqa $t1,0x20(%rsp)
+ mov $B,$a3
+ vmovdqa $t2,0x40(%rsp)
+ xor $C,$a3 # magic
+ vmovdqa $t3,0x60(%rsp)
+ mov $F,$a4
+ add \$16*2*$SZ,$Tbl
+ jmp .Lavx2_00_47
+
+.align 16
+.Lavx2_00_47:
+___
+
+sub AVX2_512_00_47 () {
+my $j = shift;
+my $body = shift;
+my @X = @_;
+my @insns = (&$body,&$body); # 48 instructions
+my $base = "+2*$PUSH8(%rsp)";
+
+ &lea ("%rsp","-$PUSH8(%rsp)") if (($j%4)==0);
+ foreach (Xupdate_512_AVX()) { # 23 instructions
+ eval;
+ if ($_ !~ /\;$/) {
+ eval(shift(@insns));
+ eval(shift(@insns));
+ eval(shift(@insns));
+ }
+ }
+ &vpaddq ($t2,@X[0],16*2*$j-0x80."($Tbl)");
+ foreach (@insns) { eval; } # remaining instructions
+ &vmovdqa ((32*$j)%$PUSH8."(%rsp)",$t2);
+}
+
+ for ($i=0,$j=0; $j<8; $j++) {
+ &AVX2_512_00_47($j,\&bodyx_00_15,@X);
+ push(@X,shift(@X)); # rotate(@X)
+ }
+ &lea ($Tbl,16*2*$SZ."($Tbl)");
+ &cmpb (($SZ-1-0x80)."($Tbl)",0);
+ &jne (".Lavx2_00_47");
+
+ for ($i=0; $i<16; ) {
+ my $base=$i<8?"+$PUSH8(%rsp)":"(%rsp)";
+ foreach(bodyx_00_15()) { eval; }
+ }
+}
+$code.=<<___;
+ mov `2*$SZ*$rounds`(%rsp),$ctx # $_ctx
+ add $a1,$A
+ #mov `2*$SZ*$rounds+8`(%rsp),$inp # $_inp
+ lea `2*$SZ*($rounds-8)`(%rsp),$Tbl
+
+ add $SZ*0($ctx),$A
+ add $SZ*1($ctx),$B
+ add $SZ*2($ctx),$C
+ add $SZ*3($ctx),$D
+ add $SZ*4($ctx),$E
+ add $SZ*5($ctx),$F
+ add $SZ*6($ctx),$G
+ add $SZ*7($ctx),$H
+
+ mov $A,$SZ*0($ctx)
+ mov $B,$SZ*1($ctx)
+ mov $C,$SZ*2($ctx)
+ mov $D,$SZ*3($ctx)
+ mov $E,$SZ*4($ctx)
+ mov $F,$SZ*5($ctx)
+ mov $G,$SZ*6($ctx)
+ mov $H,$SZ*7($ctx)
+
+ cmp `$PUSH8+2*8`($Tbl),$inp # $_end
+ je .Ldone_avx2
+
+ xor $a1,$a1
+ mov $B,$a3
+ xor $C,$a3 # magic
+ mov $F,$a4
+ jmp .Lower_avx2
+.align 16
+.Lower_avx2:
+___
+ for ($i=0; $i<8; ) {
+ my $base="+16($Tbl)";
+ foreach(bodyx_00_15()) { eval; }
+ }
+$code.=<<___;
+ lea -$PUSH8($Tbl),$Tbl
+ cmp %rsp,$Tbl
+ jae .Lower_avx2
+
+ mov `2*$SZ*$rounds`(%rsp),$ctx # $_ctx
+ add $a1,$A
+ #mov `2*$SZ*$rounds+8`(%rsp),$inp # $_inp
+ lea `2*$SZ*($rounds-8)`(%rsp),%rsp
+
+ add $SZ*0($ctx),$A
+ add $SZ*1($ctx),$B
+ add $SZ*2($ctx),$C
+ add $SZ*3($ctx),$D
+ add $SZ*4($ctx),$E
+ add $SZ*5($ctx),$F
+ lea `2*16*$SZ`($inp),$inp # inp+=2
+ add $SZ*6($ctx),$G
+ mov $inp,%r12
+ add $SZ*7($ctx),$H
+ cmp $_end,$inp
+
+ mov $A,$SZ*0($ctx)
+ cmove %rsp,%r12 # next block or stale data
+ mov $B,$SZ*1($ctx)
+ mov $C,$SZ*2($ctx)
+ mov $D,$SZ*3($ctx)
+ mov $E,$SZ*4($ctx)
+ mov $F,$SZ*5($ctx)
+ mov $G,$SZ*6($ctx)
+ mov $H,$SZ*7($ctx)
+
+ jbe .Loop_avx2
+ lea (%rsp),$Tbl
+
+.Ldone_avx2:
+ lea ($Tbl),%rsp
+ mov $_rsp,%rsi
+ vzeroupper
+___
+$code.=<<___ if ($win64);
+ movaps 16*$SZ+32(%rsp),%xmm6
+ movaps 16*$SZ+48(%rsp),%xmm7
+ movaps 16*$SZ+64(%rsp),%xmm8
+ movaps 16*$SZ+80(%rsp),%xmm9
+___
+$code.=<<___ if ($win64 && $SZ>4);
+ movaps 16*$SZ+96(%rsp),%xmm10
+ movaps 16*$SZ+112(%rsp),%xmm11
+___
+$code.=<<___;
+ mov (%rsi),%r15
+ mov 8(%rsi),%r14
+ mov 16(%rsi),%r13
+ mov 24(%rsi),%r12
+ mov 32(%rsi),%rbp
+ mov 40(%rsi),%rbx
+ lea 48(%rsi),%rsp
+.Lepilogue_avx2:
+ ret
+.size ${func}_avx2,.-${func}_avx2
+___
+}}
+}}}}}
+
+# EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
+# CONTEXT *context,DISPATCHER_CONTEXT *disp)
+if ($win64) {
+$rec="%rcx";
+$frame="%rdx";
+$context="%r8";
+$disp="%r9";
+
+$code.=<<___;
+.extern __imp_RtlVirtualUnwind
+.type se_handler,\@abi-omnipotent
+.align 16
+se_handler:
+ push %rsi
+ push %rdi
+ push %rbx
+ push %rbp
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+ pushfq
+ sub \$64,%rsp
+
+ mov 120($context),%rax # pull context->Rax
+ mov 248($context),%rbx # pull context->Rip
+
+ mov 8($disp),%rsi # disp->ImageBase
+ mov 56($disp),%r11 # disp->HanderlData
+
+ mov 0(%r11),%r10d # HandlerData[0]
+ lea (%rsi,%r10),%r10 # prologue label
+ cmp %r10,%rbx # context->Rip<prologue label
+ jb .Lin_prologue
+
+ mov 152($context),%rax # pull context->Rsp
+
+ mov 4(%r11),%r10d # HandlerData[1]
+ lea (%rsi,%r10),%r10 # epilogue label
+ cmp %r10,%rbx # context->Rip>=epilogue label
+ jae .Lin_prologue
+___
+$code.=<<___ if ($avx>1);
+ lea .Lavx2_shortcut(%rip),%r10
+ cmp %r10,%rbx # context->Rip<avx2_shortcut
+ jb .Lnot_in_avx2
+
+ and \$-256*$SZ,%rax
+ add \$`2*$SZ*($rounds-8)`,%rax
+.Lnot_in_avx2:
+___
+$code.=<<___;
+ mov %rax,%rsi # put aside Rsp
+ mov 16*$SZ+3*8(%rax),%rax # pull $_rsp
+ lea 48(%rax),%rax
+
+ mov -8(%rax),%rbx
+ mov -16(%rax),%rbp
+ mov -24(%rax),%r12
+ mov -32(%rax),%r13
+ mov -40(%rax),%r14
+ mov -48(%rax),%r15
+ mov %rbx,144($context) # restore context->Rbx
+ mov %rbp,160($context) # restore context->Rbp
+ mov %r12,216($context) # restore context->R12
+ mov %r13,224($context) # restore context->R13
+ mov %r14,232($context) # restore context->R14
+ mov %r15,240($context) # restore context->R15
+
+ lea .Lepilogue(%rip),%r10
+ cmp %r10,%rbx
+ jb .Lin_prologue # non-AVX code
+
+ lea 16*$SZ+4*8(%rsi),%rsi # Xmm6- save area
+ lea 512($context),%rdi # &context.Xmm6
+ mov \$`$SZ==4?8:12`,%ecx
+ .long 0xa548f3fc # cld; rep movsq
+
+.Lin_prologue:
+ mov 8(%rax),%rdi
+ mov 16(%rax),%rsi
+ mov %rax,152($context) # restore context->Rsp
+ mov %rsi,168($context) # restore context->Rsi
+ mov %rdi,176($context) # restore context->Rdi
+
+ mov 40($disp),%rdi # disp->ContextRecord
+ mov $context,%rsi # context
+ mov \$154,%ecx # sizeof(CONTEXT)
+ .long 0xa548f3fc # cld; rep movsq
+
+ mov $disp,%rsi
+ xor %rcx,%rcx # arg1, UNW_FLAG_NHANDLER
+ mov 8(%rsi),%rdx # arg2, disp->ImageBase
+ mov 0(%rsi),%r8 # arg3, disp->ControlPc
+ mov 16(%rsi),%r9 # arg4, disp->FunctionEntry
+ mov 40(%rsi),%r10 # disp->ContextRecord
+ lea 56(%rsi),%r11 # &disp->HandlerData
+ lea 24(%rsi),%r12 # &disp->EstablisherFrame
+ mov %r10,32(%rsp) # arg5
+ mov %r11,40(%rsp) # arg6
+ mov %r12,48(%rsp) # arg7
+ mov %rcx,56(%rsp) # arg8, (NULL)
+ call *__imp_RtlVirtualUnwind(%rip)
+
+ mov \$1,%eax # ExceptionContinueSearch
+ add \$64,%rsp
+ popfq
+ pop %r15
+ pop %r14
+ pop %r13
+ pop %r12
+ pop %rbp
+ pop %rbx
+ pop %rdi
+ pop %rsi
+ ret
+.size se_handler,.-se_handler
+
+.section .pdata
+.align 4
+ .rva .LSEH_begin_$func
+ .rva .LSEH_end_$func
+ .rva .LSEH_info_$func
+___
+$code.=<<___ if ($SZ==4);
+ .rva .LSEH_begin_${func}_ssse3
+ .rva .LSEH_end_${func}_ssse3
+ .rva .LSEH_info_${func}_ssse3
+___
+$code.=<<___ if ($avx && $SZ==8);
+ .rva .LSEH_begin_${func}_xop
+ .rva .LSEH_end_${func}_xop
+ .rva .LSEH_info_${func}_xop
+___
+$code.=<<___ if ($avx);
+ .rva .LSEH_begin_${func}_avx
+ .rva .LSEH_end_${func}_avx
+ .rva .LSEH_info_${func}_avx
+___
+$code.=<<___ if ($avx>1);
+ .rva .LSEH_begin_${func}_avx2
+ .rva .LSEH_end_${func}_avx2
+ .rva .LSEH_info_${func}_avx2
+___
+$code.=<<___;
+.section .xdata
+.align 8
+.LSEH_info_$func:
+ .byte 9,0,0,0
+ .rva se_handler
+ .rva .Lprologue,.Lepilogue # HandlerData[]
+___
+$code.=<<___ if ($SZ==4);
+.LSEH_info_${func}_ssse3:
+ .byte 9,0,0,0
+ .rva se_handler
+ .rva .Lprologue_ssse3,.Lepilogue_ssse3 # HandlerData[]
+___
+$code.=<<___ if ($avx && $SZ==8);
+.LSEH_info_${func}_xop:
+ .byte 9,0,0,0
+ .rva se_handler
+ .rva .Lprologue_xop,.Lepilogue_xop # HandlerData[]
+___
+$code.=<<___ if ($avx);
+.LSEH_info_${func}_avx:
+ .byte 9,0,0,0
+ .rva se_handler
+ .rva .Lprologue_avx,.Lepilogue_avx # HandlerData[]
+___
+$code.=<<___ if ($avx>1);
+.LSEH_info_${func}_avx2:
+ .byte 9,0,0,0
+ .rva se_handler
+ .rva .Lprologue_avx2,.Lepilogue_avx2 # HandlerData[]
+___
+}
+
+$code =~ s/\`([^\`]*)\`/eval $1/gem;
+print $code;
+close STDOUT;
diff --git a/lib/accelerated/x86/Makefile.am b/lib/accelerated/x86/Makefile.am
index 8edcbbb5f0..e231103dad 100644
--- a/lib/accelerated/x86/Makefile.am
+++ b/lib/accelerated/x86/Makefile.am
@@ -31,41 +31,44 @@ if ENABLE_MINITASN1
AM_CPPFLAGS += -I$(srcdir)/../../minitasn1
endif
-EXTRA_DIST = README license.txt
+EXTRA_DIST = README license.txt files.mk
noinst_LTLIBRARIES = libx86.la
-libx86_la_SOURCES = sha-padlock.c hmac-padlock.c aes-x86.c aes-padlock.c aes-gcm-padlock.c aes-padlock.h aes-x86.h x86.h sha-padlock.h
+libx86_la_SOURCES = sha-padlock.c hmac-padlock.c aes-x86.c aes-padlock.c aes-gcm-padlock.c \
+ aes-padlock.h aes-x86.h x86.h sha-padlock.h sha-x86.c sha-x86.h hmac-x86.c
+
+include files.mk
if ASM_X86_64
AM_CFLAGS += -DASM_X86_64 -DASM_X86
libx86_la_SOURCES += aes-gcm-x86.c
if WINDOWS
-libx86_la_SOURCES += coff/appro-aes-x86-64-coff.s coff/padlock-x86-64-coff.s coff/cpuid-x86-64-coff.s coff/appro-aes-gcm-x86-64-coff.s
+libx86_la_SOURCES += $(X86_64_FILES_COFF)
endif
if MACOSX
-libx86_la_SOURCES += macosx/appro-aes-x86-64-macosx.s macosx/padlock-x86-64-macosx.s macosx/cpuid-x86-64-macosx.s macosx/appro-aes-gcm-x86-64-macosx.s
+libx86_la_SOURCES += $(X86_64_FILES_MACOSX)
endif
if ELF
-libx86_la_SOURCES += elf/appro-aes-x86-64.s elf/appro-aes-gcm-x86-64.s elf/padlock-x86-64.s elf/cpuid-x86-64.s
+libx86_la_SOURCES += $(X86_64_FILES_ELF)
endif
else #ASM_X86_64
AM_CFLAGS += -DASM_X86_32 -DASM_X86
if WINDOWS
-libx86_la_SOURCES += coff/appro-aes-x86-coff.s coff/padlock-x86-coff.s coff/cpuid-x86-coff.s
+libx86_la_SOURCES += $(X86_FILES_ELF)
endif
if MACOSX
-libx86_la_SOURCES += macosx/appro-aes-x86-macosx.s macosx/padlock-x86-macosx.s macosx/cpuid-x86-macosx.s
+libx86_la_SOURCES += $(X86_FILES_MACOSX)
endif
if ELF
-libx86_la_SOURCES += elf/appro-aes-x86.s elf/padlock-x86.s elf/cpuid-x86.s
+libx86_la_SOURCES += $(X86_FILES_ELF)
endif
endif #ASM_X86_64
diff --git a/lib/accelerated/x86/aes-padlock.h b/lib/accelerated/x86/aes-padlock.h
index cd5d437c8f..851b40b6f9 100644
--- a/lib/accelerated/x86/aes-padlock.h
+++ b/lib/accelerated/x86/aes-padlock.h
@@ -31,9 +31,6 @@ extern const gnutls_crypto_cipher_st aes_gcm_padlock_struct;
extern const gnutls_crypto_mac_st hmac_sha_padlock_struct;
extern const gnutls_crypto_digest_st sha_padlock_struct;
-extern const gnutls_crypto_mac_st hmac_sha_padlock_nano_struct;
-extern const gnutls_crypto_digest_st sha_padlock_nano_struct;
-
int padlock_aes_cipher_setkey(void *_ctx, const void *userkey,
size_t keysize);
diff --git a/lib/accelerated/x86/aes-x86.c b/lib/accelerated/x86/aes-x86.c
index 59e2b13280..c50836a168 100644
--- a/lib/accelerated/x86/aes-x86.c
+++ b/lib/accelerated/x86/aes-x86.c
@@ -30,6 +30,7 @@
#include <gnutls/crypto.h>
#include <gnutls_errors.h>
#include <aes-x86.h>
+#include <sha-x86.h>
#include <x86.h>
struct aes_ctx {
@@ -38,6 +39,8 @@ struct aes_ctx {
int enc;
};
+unsigned int _gnutls_x86_cpuid_s[4];
+
static int
aes_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, int enc)
{
@@ -126,19 +129,18 @@ static const gnutls_crypto_cipher_st cipher_struct = {
static unsigned check_optimized_aes(void)
{
- unsigned int a, b, c, d;
- gnutls_cpuid(1, &a, &b, &c, &d);
+ return (_gnutls_x86_cpuid_s[2] & 0x2000000);
+}
- return (c & 0x2000000);
+static unsigned check_ssse3(void)
+{
+ return (_gnutls_x86_cpuid_s[2] & 0x0000200);
}
#ifdef ASM_X86_64
static unsigned check_pclmul(void)
{
- unsigned int a, b, c, d;
- gnutls_cpuid(1, &a, &b, &c, &d);
-
- return (c & 0x2);
+ return (_gnutls_x86_cpuid_s[2] & 0x2);
}
#endif
@@ -165,6 +167,88 @@ void register_x86_crypto(void)
if (check_intel_or_amd() == 0)
return;
+ gnutls_cpuid(1, &_gnutls_x86_cpuid_s[0], &_gnutls_x86_cpuid_s[1],
+ &_gnutls_x86_cpuid_s[2], &_gnutls_x86_cpuid_s[3]);
+
+ if (check_ssse3()) {
+ _gnutls_debug_log("Intel SSSE3 was detected\n");
+
+ ret =
+ gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1,
+ 80,
+ &sha_x86_struct);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA224,
+ 80,
+ &sha_x86_struct);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256,
+ 80,
+ &sha_x86_struct);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+
+ ret =
+ gnutls_crypto_single_mac_register(GNUTLS_DIG_SHA1,
+ 80,
+ &hmac_sha_x86_struct);
+ if (ret < 0)
+ gnutls_assert();
+
+ ret =
+ gnutls_crypto_single_mac_register(GNUTLS_DIG_SHA224,
+ 80,
+ &hmac_sha_x86_struct);
+ if (ret < 0)
+ gnutls_assert();
+
+ ret =
+ gnutls_crypto_single_mac_register(GNUTLS_DIG_SHA256,
+ 80,
+ &hmac_sha_x86_struct);
+ if (ret < 0)
+ gnutls_assert();
+
+#ifdef ENABLE_SHA512
+ ret =
+ gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA384,
+ 80,
+ &sha_x86_struct);
+ if (ret < 0)
+ gnutls_assert();
+
+ ret =
+ gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA512,
+ 80,
+ &sha_x86_struct);
+ if (ret < 0)
+ gnutls_assert();
+ ret =
+ gnutls_crypto_single_mac_register(GNUTLS_DIG_SHA384,
+ 80,
+ &hmac_sha_x86_struct);
+ if (ret < 0)
+ gnutls_assert();
+
+ ret =
+ gnutls_crypto_single_mac_register(GNUTLS_DIG_SHA512,
+ 80,
+ &hmac_sha_x86_struct);
+ if (ret < 0)
+ gnutls_assert();
+#endif
+ }
+
if (check_optimized_aes()) {
_gnutls_debug_log("Intel AES accelerator was detected\n");
ret =
@@ -211,5 +295,8 @@ void register_x86_crypto(void)
#endif
}
+ /* convert _gnutls_x86_cpuid_s the way openssl asm expects it */
+ _gnutls_x86_cpuid_s[1] = _gnutls_x86_cpuid_s[2];
+
return;
}
diff --git a/lib/accelerated/x86/coff/appro-aes-x86-coff.s b/lib/accelerated/x86/coff/aesni-x86.s
index d00c02f285..1970712d30 100644
--- a/lib/accelerated/x86/coff/appro-aes-x86-coff.s
+++ b/lib/accelerated/x86/coff/aesni-x86.s
@@ -2162,3 +2162,5 @@ _aesni_set_decrypt_key:
.byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
.byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
.byte 115,108,46,111,114,103,62,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/appro-aes-x86-64-coff.s b/lib/accelerated/x86/coff/aesni-x86_64.s
index 224a226b0d..85b51085a5 100644
--- a/lib/accelerated/x86/coff/appro-aes-x86-64-coff.s
+++ b/lib/accelerated/x86/coff/aesni-x86_64.s
@@ -3420,3 +3420,5 @@ cbc_se_handler:
.LSEH_info_key:
.byte 0x01,0x04,0x01,0x00
.byte 0x04,0x02,0x00,0x00
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/cpuid-x86-coff.s b/lib/accelerated/x86/coff/cpuid-x86.s
index d8074ed62f..f35cfba63a 100644
--- a/lib/accelerated/x86/coff/cpuid-x86-coff.s
+++ b/lib/accelerated/x86/coff/cpuid-x86.s
@@ -68,3 +68,5 @@ _gnutls_have_cpuid:
andl $2097152,%eax
ret
.byte 67,80,85,73,68,32,102,111,114,32,120,56,54,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/cpuid-x86-64-coff.s b/lib/accelerated/x86/coff/cpuid-x86_64.s
index 5662a4b586..033df92ebf 100644
--- a/lib/accelerated/x86/coff/cpuid-x86-64-coff.s
+++ b/lib/accelerated/x86/coff/cpuid-x86_64.s
@@ -67,3 +67,5 @@ gnutls_cpuid:
movq 16(%rsp),%rsi
.byte 0xf3,0xc3
.LSEH_end_gnutls_cpuid:
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/padlock-x86-coff.s b/lib/accelerated/x86/coff/e_padlock-x86.s
index d969f307b5..d51d62ff73 100644
--- a/lib/accelerated/x86/coff/padlock-x86-coff.s
+++ b/lib/accelerated/x86/coff/e_padlock-x86.s
@@ -1059,3 +1059,5 @@ _padlock_sha512_blocks:
.align 4
.Lpadlock_saved_context:
.long 0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/padlock-x86-64-coff.s b/lib/accelerated/x86/coff/e_padlock-x86_64.s
index a3a0e301e7..14c62fd176 100644
--- a/lib/accelerated/x86/coff/padlock-x86-64-coff.s
+++ b/lib/accelerated/x86/coff/e_padlock-x86_64.s
@@ -1187,3 +1187,5 @@ padlock_ctr32_encrypt:
.p2align 3
.Lpadlock_saved_context:
.quad 0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/appro-aes-gcm-x86-64-coff.s b/lib/accelerated/x86/coff/ghash-x86_64.s
index ceb9108c32..951ee891b9 100644
--- a/lib/accelerated/x86/coff/appro-aes-gcm-x86-64-coff.s
+++ b/lib/accelerated/x86/coff/ghash-x86_64.s
@@ -1525,3 +1525,5 @@ se_handler:
.byte 0x0c,0x78,0x01,0x00
.byte 0x08,0x68,0x00,0x00
.byte 0x04,0x01,0x15,0x00
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/openssl-cpuid-x86.s b/lib/accelerated/x86/coff/openssl-cpuid-x86.s
new file mode 100644
index 0000000000..2e1b08cd9a
--- /dev/null
+++ b/lib/accelerated/x86/coff/openssl-cpuid-x86.s
@@ -0,0 +1,396 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "x86cpuid.s"
+.text
+.globl _OPENSSL_ia32_cpuid
+.def _OPENSSL_ia32_cpuid; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_ia32_cpuid:
+.L_OPENSSL_ia32_cpuid_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ xorl %edx,%edx
+ pushfl
+ popl %eax
+ movl %eax,%ecx
+ xorl $2097152,%eax
+ pushl %eax
+ popfl
+ pushfl
+ popl %eax
+ xorl %eax,%ecx
+ xorl %eax,%eax
+ btl $21,%ecx
+ jnc .L000nocpuid
+ movl 20(%esp),%esi
+ movl %eax,8(%esi)
+ .byte 0x0f,0xa2
+ movl %eax,%edi
+ xorl %eax,%eax
+ cmpl $1970169159,%ebx
+ setne %al
+ movl %eax,%ebp
+ cmpl $1231384169,%edx
+ setne %al
+ orl %eax,%ebp
+ cmpl $1818588270,%ecx
+ setne %al
+ orl %eax,%ebp
+ jz .L001intel
+ cmpl $1752462657,%ebx
+ setne %al
+ movl %eax,%esi
+ cmpl $1769238117,%edx
+ setne %al
+ orl %eax,%esi
+ cmpl $1145913699,%ecx
+ setne %al
+ orl %eax,%esi
+ jnz .L001intel
+ movl $2147483648,%eax
+ .byte 0x0f,0xa2
+ cmpl $2147483649,%eax
+ jb .L001intel
+ movl %eax,%esi
+ movl $2147483649,%eax
+ .byte 0x0f,0xa2
+ orl %ecx,%ebp
+ andl $2049,%ebp
+ cmpl $2147483656,%esi
+ jb .L001intel
+ movl $2147483656,%eax
+ .byte 0x0f,0xa2
+ movzbl %cl,%esi
+ incl %esi
+ movl $1,%eax
+ xorl %ecx,%ecx
+ .byte 0x0f,0xa2
+ btl $28,%edx
+ jnc .L002generic
+ shrl $16,%ebx
+ andl $255,%ebx
+ cmpl %esi,%ebx
+ ja .L002generic
+ andl $4026531839,%edx
+ jmp .L002generic
+.L001intel:
+ cmpl $7,%edi
+ jb .L003cacheinfo
+ movl 20(%esp),%esi
+ movl $7,%eax
+ xorl %ecx,%ecx
+ .byte 0x0f,0xa2
+ movl %ebx,8(%esi)
+.L003cacheinfo:
+ cmpl $4,%edi
+ movl $-1,%edi
+ jb .L004nocacheinfo
+ movl $4,%eax
+ movl $0,%ecx
+ .byte 0x0f,0xa2
+ movl %eax,%edi
+ shrl $14,%edi
+ andl $4095,%edi
+.L004nocacheinfo:
+ movl $1,%eax
+ xorl %ecx,%ecx
+ .byte 0x0f,0xa2
+ andl $3220176895,%edx
+ cmpl $0,%ebp
+ jne .L005notintel
+ orl $1073741824,%edx
+ andb $15,%ah
+ cmpb $15,%ah
+ jne .L005notintel
+ orl $1048576,%edx
+.L005notintel:
+ btl $28,%edx
+ jnc .L002generic
+ andl $4026531839,%edx
+ cmpl $0,%edi
+ je .L002generic
+ orl $268435456,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja .L002generic
+ andl $4026531839,%edx
+.L002generic:
+ andl $2048,%ebp
+ andl $4294965247,%ecx
+ movl %edx,%esi
+ orl %ecx,%ebp
+ btl $27,%ecx
+ jnc .L006clear_avx
+ xorl %ecx,%ecx
+.byte 15,1,208
+ andl $6,%eax
+ cmpl $6,%eax
+ je .L007done
+ cmpl $2,%eax
+ je .L006clear_avx
+.L008clear_xmm:
+ andl $4261412861,%ebp
+ andl $4278190079,%esi
+.L006clear_avx:
+ andl $4026525695,%ebp
+ movl 20(%esp),%edi
+ andl $4294967263,8(%edi)
+.L007done:
+ movl %esi,%eax
+ movl %ebp,%edx
+.L000nocpuid:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _OPENSSL_rdtsc
+.def _OPENSSL_rdtsc; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_rdtsc:
+.L_OPENSSL_rdtsc_begin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ leal __gnutls_x86_cpuid_s,%ecx
+ btl $4,(%ecx)
+ jnc .L009notsc
+ .byte 0x0f,0x31
+.L009notsc:
+ ret
+.globl _OPENSSL_instrument_halt
+.def _OPENSSL_instrument_halt; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_instrument_halt:
+.L_OPENSSL_instrument_halt_begin:
+ leal __gnutls_x86_cpuid_s,%ecx
+ btl $4,(%ecx)
+ jnc .L010nohalt
+.long 2421723150
+ andl $3,%eax
+ jnz .L010nohalt
+ pushfl
+ popl %eax
+ btl $9,%eax
+ jnc .L010nohalt
+ .byte 0x0f,0x31
+ pushl %edx
+ pushl %eax
+ hlt
+ .byte 0x0f,0x31
+ subl (%esp),%eax
+ sbbl 4(%esp),%edx
+ addl $8,%esp
+ ret
+.L010nohalt:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ ret
+.globl _OPENSSL_far_spin
+.def _OPENSSL_far_spin; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_far_spin:
+.L_OPENSSL_far_spin_begin:
+ pushfl
+ popl %eax
+ btl $9,%eax
+ jnc .L011nospin
+ movl 4(%esp),%eax
+ movl 8(%esp),%ecx
+.long 2430111262
+ xorl %eax,%eax
+ movl (%ecx),%edx
+ jmp .L012spin
+.align 16
+.L012spin:
+ incl %eax
+ cmpl (%ecx),%edx
+ je .L012spin
+.long 529567888
+ ret
+.L011nospin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ ret
+.globl _OPENSSL_wipe_cpu
+.def _OPENSSL_wipe_cpu; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_wipe_cpu:
+.L_OPENSSL_wipe_cpu_begin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ leal __gnutls_x86_cpuid_s,%ecx
+ movl (%ecx),%ecx
+ btl $1,(%ecx)
+ jnc .L013no_x87
+.long 4007259865,4007259865,4007259865,4007259865,2430851995
+.L013no_x87:
+ leal 4(%esp),%eax
+ ret
+.globl _OPENSSL_atomic_add
+.def _OPENSSL_atomic_add; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_atomic_add:
+.L_OPENSSL_atomic_add_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%ecx
+ pushl %ebx
+ nop
+ movl (%edx),%eax
+.L014spin:
+ leal (%eax,%ecx,1),%ebx
+ nop
+.long 447811568
+ jne .L014spin
+ movl %ebx,%eax
+ popl %ebx
+ ret
+.globl _OPENSSL_indirect_call
+.def _OPENSSL_indirect_call; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_indirect_call:
+.L_OPENSSL_indirect_call_begin:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $28,%esp
+ movl 12(%ebp),%ecx
+ movl %ecx,(%esp)
+ movl 16(%ebp),%edx
+ movl %edx,4(%esp)
+ movl 20(%ebp),%eax
+ movl %eax,8(%esp)
+ movl 24(%ebp),%eax
+ movl %eax,12(%esp)
+ movl 28(%ebp),%eax
+ movl %eax,16(%esp)
+ movl 32(%ebp),%eax
+ movl %eax,20(%esp)
+ movl 36(%ebp),%eax
+ movl %eax,24(%esp)
+ call *8(%ebp)
+ movl %ebp,%esp
+ popl %ebp
+ ret
+.globl _OPENSSL_cleanse
+.def _OPENSSL_cleanse; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_cleanse:
+.L_OPENSSL_cleanse_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%ecx
+ xorl %eax,%eax
+ cmpl $7,%ecx
+ jae .L015lot
+ cmpl $0,%ecx
+ je .L016ret
+.L017little:
+ movb %al,(%edx)
+ subl $1,%ecx
+ leal 1(%edx),%edx
+ jnz .L017little
+.L016ret:
+ ret
+.align 16
+.L015lot:
+ testl $3,%edx
+ jz .L018aligned
+ movb %al,(%edx)
+ leal -1(%ecx),%ecx
+ leal 1(%edx),%edx
+ jmp .L015lot
+.L018aligned:
+ movl %eax,(%edx)
+ leal -4(%ecx),%ecx
+ testl $-4,%ecx
+ leal 4(%edx),%edx
+ jnz .L018aligned
+ cmpl $0,%ecx
+ jne .L017little
+ ret
+.globl _OPENSSL_instrument_bus
+.def _OPENSSL_instrument_bus; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_instrument_bus:
+.L_OPENSSL_instrument_bus_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl $0,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _OPENSSL_instrument_bus2
+.def _OPENSSL_instrument_bus2; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_instrument_bus2:
+.L_OPENSSL_instrument_bus2_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl $0,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _OPENSSL_ia32_rdrand
+.def _OPENSSL_ia32_rdrand; .scl 2; .type 32; .endef
+.align 16
+_OPENSSL_ia32_rdrand:
+.L_OPENSSL_ia32_rdrand_begin:
+ movl $8,%ecx
+.L019loop:
+.byte 15,199,240
+ jc .L020break
+ loop .L019loop
+.L020break:
+ cmpl $0,%eax
+ cmovel %ecx,%eax
+ ret
+.comm __gnutls_x86_cpuid_s,16
+.section .ctors
+.long _OPENSSL_cpuid_setup
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/openssl-cpuid-x86_64.s b/lib/accelerated/x86/coff/openssl-cpuid-x86_64.s
new file mode 100644
index 0000000000..d1439e8c13
--- /dev/null
+++ b/lib/accelerated/x86/coff/openssl-cpuid-x86_64.s
@@ -0,0 +1,361 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+
+
+.section .ctors
+ .p2align 3
+ .quad OPENSSL_cpuid_setup
+
+
+.comm _gnutls_x86_cpuid_s,16,4
+
+.text
+
+.globl OPENSSL_atomic_add
+.def OPENSSL_atomic_add; .scl 2; .type 32; .endef
+.p2align 4
+OPENSSL_atomic_add:
+ movl (%rcx),%eax
+.Lspin: leaq (%rdx,%rax,1),%r8
+.byte 0xf0
+ cmpxchgl %r8d,(%rcx)
+ jne .Lspin
+ movl %r8d,%eax
+.byte 0x48,0x98
+ .byte 0xf3,0xc3
+
+
+.globl OPENSSL_rdtsc
+.def OPENSSL_rdtsc; .scl 2; .type 32; .endef
+.p2align 4
+OPENSSL_rdtsc:
+ rdtsc
+ shlq $32,%rdx
+ orq %rdx,%rax
+ .byte 0xf3,0xc3
+
+
+.globl OPENSSL_ia32_cpuid
+.def OPENSSL_ia32_cpuid; .scl 2; .type 32; .endef
+.p2align 4
+OPENSSL_ia32_cpuid:
+ movq %rdi,8(%rsp)
+ movq %rsi,16(%rsp)
+ movq %rsp,%rax
+.LSEH_begin_OPENSSL_ia32_cpuid:
+ movq %rcx,%rdi
+
+ movq %rbx,%r8
+
+ xorl %eax,%eax
+ movl %eax,8(%rdi)
+ cpuid
+ movl %eax,%r11d
+
+ xorl %eax,%eax
+ cmpl $1970169159,%ebx
+ setne %al
+ movl %eax,%r9d
+ cmpl $1231384169,%edx
+ setne %al
+ orl %eax,%r9d
+ cmpl $1818588270,%ecx
+ setne %al
+ orl %eax,%r9d
+ jz .Lintel
+
+ cmpl $1752462657,%ebx
+ setne %al
+ movl %eax,%r10d
+ cmpl $1769238117,%edx
+ setne %al
+ orl %eax,%r10d
+ cmpl $1145913699,%ecx
+ setne %al
+ orl %eax,%r10d
+ jnz .Lintel
+
+
+ movl $2147483648,%eax
+ cpuid
+ cmpl $2147483649,%eax
+ jb .Lintel
+ movl %eax,%r10d
+ movl $2147483649,%eax
+ cpuid
+ orl %ecx,%r9d
+ andl $2049,%r9d
+
+ cmpl $2147483656,%r10d
+ jb .Lintel
+
+ movl $2147483656,%eax
+ cpuid
+ movzbq %cl,%r10
+ incq %r10
+
+ movl $1,%eax
+ cpuid
+ btl $28,%edx
+ jnc .Lgeneric
+ shrl $16,%ebx
+ cmpb %r10b,%bl
+ ja .Lgeneric
+ andl $4026531839,%edx
+ jmp .Lgeneric
+
+.Lintel:
+ cmpl $4,%r11d
+ movl $-1,%r10d
+ jb .Lnocacheinfo
+
+ movl $4,%eax
+ movl $0,%ecx
+ cpuid
+ movl %eax,%r10d
+ shrl $14,%r10d
+ andl $4095,%r10d
+
+ cmpl $7,%r11d
+ jb .Lnocacheinfo
+
+ movl $7,%eax
+ xorl %ecx,%ecx
+ cpuid
+ movl %ebx,8(%rdi)
+
+.Lnocacheinfo:
+ movl $1,%eax
+ cpuid
+ andl $3220176895,%edx
+ cmpl $0,%r9d
+ jne .Lnotintel
+ orl $1073741824,%edx
+ andb $15,%ah
+ cmpb $15,%ah
+ jne .Lnotintel
+ orl $1048576,%edx
+.Lnotintel:
+ btl $28,%edx
+ jnc .Lgeneric
+ andl $4026531839,%edx
+ cmpl $0,%r10d
+ je .Lgeneric
+
+ orl $268435456,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja .Lgeneric
+ andl $4026531839,%edx
+.Lgeneric:
+ andl $2048,%r9d
+ andl $4294965247,%ecx
+ orl %ecx,%r9d
+
+ movl %edx,%r10d
+ btl $27,%r9d
+ jnc .Lclear_avx
+ xorl %ecx,%ecx
+.byte 0x0f,0x01,0xd0
+ andl $6,%eax
+ cmpl $6,%eax
+ je .Ldone
+.Lclear_avx:
+ movl $4026525695,%eax
+ andl %eax,%r9d
+ andl $4294967263,8(%rdi)
+.Ldone:
+ shlq $32,%r9
+ movl %r10d,%eax
+ movq %r8,%rbx
+ orq %r9,%rax
+ movq 8(%rsp),%rdi
+ movq 16(%rsp),%rsi
+ .byte 0xf3,0xc3
+.LSEH_end_OPENSSL_ia32_cpuid:
+
+.globl OPENSSL_cleanse
+.def OPENSSL_cleanse; .scl 2; .type 32; .endef
+.p2align 4
+OPENSSL_cleanse:
+ xorq %rax,%rax
+ cmpq $15,%rdx
+ jae .Lot
+ cmpq $0,%rdx
+ je .Lret
+.Little:
+ movb %al,(%rcx)
+ subq $1,%rdx
+ leaq 1(%rcx),%rcx
+ jnz .Little
+.Lret:
+ .byte 0xf3,0xc3
+.p2align 4
+.Lot:
+ testq $7,%rcx
+ jz .Laligned
+ movb %al,(%rcx)
+ leaq -1(%rdx),%rdx
+ leaq 1(%rcx),%rcx
+ jmp .Lot
+.Laligned:
+ movq %rax,(%rcx)
+ leaq -8(%rdx),%rdx
+ testq $-8,%rdx
+ leaq 8(%rcx),%rcx
+ jnz .Laligned
+ cmpq $0,%rdx
+ jne .Little
+ .byte 0xf3,0xc3
+
+.globl OPENSSL_wipe_cpu
+.def OPENSSL_wipe_cpu; .scl 2; .type 32; .endef
+.p2align 4
+OPENSSL_wipe_cpu:
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ xorq %rcx,%rcx
+ xorq %rdx,%rdx
+ xorq %r8,%r8
+ xorq %r9,%r9
+ xorq %r10,%r10
+ xorq %r11,%r11
+ leaq 8(%rsp),%rax
+ .byte 0xf3,0xc3
+
+.globl OPENSSL_instrument_bus
+.def OPENSSL_instrument_bus; .scl 2; .type 32; .endef
+.p2align 4
+OPENSSL_instrument_bus:
+ movq %rcx,%r10
+ movq %rdx,%rcx
+ movq %rdx,%r11
+
+ rdtsc
+ movl %eax,%r8d
+ movl $0,%r9d
+ clflush (%r10)
+.byte 0xf0
+ addl %r9d,(%r10)
+ jmp .Loop
+.p2align 4
+.Loop: rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ movl %eax,%r9d
+ clflush (%r10)
+.byte 0xf0
+ addl %eax,(%r10)
+ leaq 4(%r10),%r10
+ subq $1,%rcx
+ jnz .Loop
+
+ movq %r11,%rax
+ .byte 0xf3,0xc3
+
+
+.globl OPENSSL_instrument_bus2
+.def OPENSSL_instrument_bus2; .scl 2; .type 32; .endef
+.p2align 4
+OPENSSL_instrument_bus2:
+ movq %rcx,%r10
+ movq %rdx,%rcx
+ movq %r8,%r11
+ movq %rcx,8(%rsp)
+
+ rdtsc
+ movl %eax,%r8d
+ movl $0,%r9d
+
+ clflush (%r10)
+.byte 0xf0
+ addl %r9d,(%r10)
+
+ rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ movl %eax,%r9d
+.Loop2:
+ clflush (%r10)
+.byte 0xf0
+ addl %eax,(%r10)
+
+ subq $1,%r11
+ jz .Ldone2
+
+ rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ cmpl %r9d,%eax
+ movl %eax,%r9d
+ movl $0,%edx
+ setne %dl
+ subq %rdx,%rcx
+ leaq (%r10,%rdx,4),%r10
+ jnz .Loop2
+
+.Ldone2:
+ movq 8(%rsp),%rax
+ subq %rcx,%rax
+ .byte 0xf3,0xc3
+
+.globl OPENSSL_ia32_rdrand
+.def OPENSSL_ia32_rdrand; .scl 2; .type 32; .endef
+.p2align 4
+OPENSSL_ia32_rdrand:
+ movl $8,%ecx
+.Loop_rdrand:
+.byte 72,15,199,240
+ jc .Lbreak_rdrand
+ loop .Loop_rdrand
+.Lbreak_rdrand:
+ cmpq $0,%rax
+ cmoveq %rcx,%rax
+ .byte 0xf3,0xc3
+
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/sha1-ssse3-x86.s b/lib/accelerated/x86/coff/sha1-ssse3-x86.s
new file mode 100644
index 0000000000..9bd41a0de4
--- /dev/null
+++ b/lib/accelerated/x86/coff/sha1-ssse3-x86.s
@@ -0,0 +1,1420 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha1-586.s"
+.text
+.globl _sha1_block_data_order
+.def _sha1_block_data_order; .scl 2; .type 32; .endef
+.align 16
+_sha1_block_data_order:
+.L_sha1_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%ebp
+ movl 24(%esp),%esi
+ movl 28(%esp),%eax
+ subl $76,%esp
+ shll $6,%eax
+ addl %esi,%eax
+ movl %eax,104(%esp)
+ movl 16(%ebp),%edi
+ jmp .L000loop
+.align 16
+.L000loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %edx,28(%esp)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edx,44(%esp)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,48(%esp)
+ movl %ebx,52(%esp)
+ movl %ecx,56(%esp)
+ movl %edx,60(%esp)
+ movl %esi,100(%esp)
+ movl (%ebp),%eax
+ movl 4(%ebp),%ebx
+ movl 8(%ebp),%ecx
+ movl 12(%ebp),%edx
+ # 00_15 0
+ movl %ecx,%esi
+ movl %eax,%ebp
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl (%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 1
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 4(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 2
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 8(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 3
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 12(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+ # 00_15 4
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ movl 16(%esp),%eax
+ andl %edx,%ebx
+ rorl $2,%edx
+ xorl %esi,%ebx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+ # 00_15 5
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ movl 20(%esp),%esi
+ andl %ecx,%eax
+ rorl $2,%ecx
+ xorl %edi,%eax
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+ # 00_15 6
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl 24(%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 7
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 28(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 8
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 32(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 9
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 36(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+ # 00_15 10
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ movl 40(%esp),%eax
+ andl %edx,%ebx
+ rorl $2,%edx
+ xorl %esi,%ebx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+ # 00_15 11
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ movl 44(%esp),%esi
+ andl %ecx,%eax
+ rorl $2,%ecx
+ xorl %edi,%eax
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+ # 00_15 12
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl 48(%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 13
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 52(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 14
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 56(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 15
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 60(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ movl (%esp),%ebx
+ addl %ebp,%ecx
+ # 16_19 16
+ movl %edi,%ebp
+ xorl 8(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 32(%esp),%ebx
+ andl %edx,%ebp
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ xorl %esi,%ebp
+ addl %ebp,%eax
+ movl %ecx,%ebp
+ rorl $2,%edx
+ movl %ebx,(%esp)
+ roll $5,%ebp
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl 4(%esp),%eax
+ addl %ebp,%ebx
+ # 16_19 17
+ movl %edx,%ebp
+ xorl 12(%esp),%eax
+ xorl %edi,%ebp
+ xorl 36(%esp),%eax
+ andl %ecx,%ebp
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ xorl %edi,%ebp
+ addl %ebp,%esi
+ movl %ebx,%ebp
+ rorl $2,%ecx
+ movl %eax,4(%esp)
+ roll $5,%ebp
+ leal 1518500249(%eax,%esi,1),%eax
+ movl 8(%esp),%esi
+ addl %ebp,%eax
+ # 16_19 18
+ movl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 40(%esp),%esi
+ andl %ebx,%ebp
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ xorl %edx,%ebp
+ addl %ebp,%edi
+ movl %eax,%ebp
+ rorl $2,%ebx
+ movl %esi,8(%esp)
+ roll $5,%ebp
+ leal 1518500249(%esi,%edi,1),%esi
+ movl 12(%esp),%edi
+ addl %ebp,%esi
+ # 16_19 19
+ movl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 44(%esp),%edi
+ andl %eax,%ebp
+ xorl (%esp),%edi
+ roll $1,%edi
+ xorl %ecx,%ebp
+ addl %ebp,%edx
+ movl %esi,%ebp
+ rorl $2,%eax
+ movl %edi,12(%esp)
+ roll $5,%ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl 16(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 20
+ movl %esi,%ebp
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,16(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 20(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 21
+ movl %edi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,20(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 24(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 22
+ movl %edx,%ebp
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,24(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 28(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 23
+ movl %ecx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,28(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 32(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 24
+ movl %ebx,%ebp
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,32(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 36(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 25
+ movl %eax,%ebp
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,36(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl 40(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 26
+ movl %esi,%ebp
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,40(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 44(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 27
+ movl %edi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,44(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 48(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 28
+ movl %edx,%ebp
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,48(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 52(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 29
+ movl %ecx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,52(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 56(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 30
+ movl %ebx,%ebp
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,56(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 60(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 31
+ movl %eax,%ebp
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,60(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl (%esp),%edx
+ addl %ebp,%edi
+ # 20_39 32
+ movl %esi,%ebp
+ xorl 8(%esp),%edx
+ xorl %eax,%ebp
+ xorl 32(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 4(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 33
+ movl %edi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 56(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,4(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 8(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 34
+ movl %edx,%ebp
+ xorl 16(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 40(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,8(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 12(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 35
+ movl %ecx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edx,%ebp
+ xorl 44(%esp),%eax
+ xorl %edi,%ebp
+ xorl (%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,12(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 16(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 36
+ movl %ebx,%ebp
+ xorl 24(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%esi
+ xorl %edx,%ebp
+ xorl 4(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,16(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 20(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 37
+ movl %eax,%ebp
+ xorl 28(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 8(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,20(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl 24(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 38
+ movl %esi,%ebp
+ xorl 32(%esp),%edx
+ xorl %eax,%ebp
+ xorl 56(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 12(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,24(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 28(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 39
+ movl %edi,%ebp
+ xorl 36(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 16(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,28(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 32(%esp),%ebx
+ addl %ebp,%ecx
+ # 40_59 40
+ movl %edi,%ebp
+ xorl 40(%esp),%ebx
+ xorl %esi,%ebp
+ xorl (%esp),%ebx
+ andl %edx,%ebp
+ xorl 20(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,32(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 36(%esp),%eax
+ addl %ebp,%ebx
+ # 40_59 41
+ movl %edx,%ebp
+ xorl 44(%esp),%eax
+ xorl %edi,%ebp
+ xorl 4(%esp),%eax
+ andl %ecx,%ebp
+ xorl 24(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,36(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 40(%esp),%esi
+ addl %ebp,%eax
+ # 40_59 42
+ movl %ecx,%ebp
+ xorl 48(%esp),%esi
+ xorl %edx,%ebp
+ xorl 8(%esp),%esi
+ andl %ebx,%ebp
+ xorl 28(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,40(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 44(%esp),%edi
+ addl %ebp,%esi
+ # 40_59 43
+ movl %ebx,%ebp
+ xorl 52(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 12(%esp),%edi
+ andl %eax,%ebp
+ xorl 32(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,44(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 48(%esp),%edx
+ addl %ebp,%edi
+ # 40_59 44
+ movl %eax,%ebp
+ xorl 56(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 16(%esp),%edx
+ andl %esi,%ebp
+ xorl 36(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,48(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 52(%esp),%ecx
+ addl %ebp,%edx
+ # 40_59 45
+ movl %esi,%ebp
+ xorl 60(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 20(%esp),%ecx
+ andl %edi,%ebp
+ xorl 40(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,52(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 56(%esp),%ebx
+ addl %ebp,%ecx
+ # 40_59 46
+ movl %edi,%ebp
+ xorl (%esp),%ebx
+ xorl %esi,%ebp
+ xorl 24(%esp),%ebx
+ andl %edx,%ebp
+ xorl 44(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,56(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 60(%esp),%eax
+ addl %ebp,%ebx
+ # 40_59 47
+ movl %edx,%ebp
+ xorl 4(%esp),%eax
+ xorl %edi,%ebp
+ xorl 28(%esp),%eax
+ andl %ecx,%ebp
+ xorl 48(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,60(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl (%esp),%esi
+ addl %ebp,%eax
+ # 40_59 48
+ movl %ecx,%ebp
+ xorl 8(%esp),%esi
+ xorl %edx,%ebp
+ xorl 32(%esp),%esi
+ andl %ebx,%ebp
+ xorl 52(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 4(%esp),%edi
+ addl %ebp,%esi
+ # 40_59 49
+ movl %ebx,%ebp
+ xorl 12(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 36(%esp),%edi
+ andl %eax,%ebp
+ xorl 56(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,4(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 8(%esp),%edx
+ addl %ebp,%edi
+ # 40_59 50
+ movl %eax,%ebp
+ xorl 16(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 40(%esp),%edx
+ andl %esi,%ebp
+ xorl 60(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,8(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 12(%esp),%ecx
+ addl %ebp,%edx
+ # 40_59 51
+ movl %esi,%ebp
+ xorl 20(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 44(%esp),%ecx
+ andl %edi,%ebp
+ xorl (%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,12(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 16(%esp),%ebx
+ addl %ebp,%ecx
+ # 40_59 52
+ movl %edi,%ebp
+ xorl 24(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 48(%esp),%ebx
+ andl %edx,%ebp
+ xorl 4(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,16(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 20(%esp),%eax
+ addl %ebp,%ebx
+ # 40_59 53
+ movl %edx,%ebp
+ xorl 28(%esp),%eax
+ xorl %edi,%ebp
+ xorl 52(%esp),%eax
+ andl %ecx,%ebp
+ xorl 8(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,20(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 24(%esp),%esi
+ addl %ebp,%eax
+ # 40_59 54
+ movl %ecx,%ebp
+ xorl 32(%esp),%esi
+ xorl %edx,%ebp
+ xorl 56(%esp),%esi
+ andl %ebx,%ebp
+ xorl 12(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,24(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 28(%esp),%edi
+ addl %ebp,%esi
+ # 40_59 55
+ movl %ebx,%ebp
+ xorl 36(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 60(%esp),%edi
+ andl %eax,%ebp
+ xorl 16(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,28(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 32(%esp),%edx
+ addl %ebp,%edi
+ # 40_59 56
+ movl %eax,%ebp
+ xorl 40(%esp),%edx
+ xorl %ebx,%ebp
+ xorl (%esp),%edx
+ andl %esi,%ebp
+ xorl 20(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,32(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 36(%esp),%ecx
+ addl %ebp,%edx
+ # 40_59 57
+ movl %esi,%ebp
+ xorl 44(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 4(%esp),%ecx
+ andl %edi,%ebp
+ xorl 24(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,36(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 40(%esp),%ebx
+ addl %ebp,%ecx
+ # 40_59 58
+ movl %edi,%ebp
+ xorl 48(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 8(%esp),%ebx
+ andl %edx,%ebp
+ xorl 28(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,40(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 44(%esp),%eax
+ addl %ebp,%ebx
+ # 40_59 59
+ movl %edx,%ebp
+ xorl 52(%esp),%eax
+ xorl %edi,%ebp
+ xorl 12(%esp),%eax
+ andl %ecx,%ebp
+ xorl 32(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,44(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 48(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 60
+ movl %ebx,%ebp
+ xorl 56(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 36(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,48(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 52(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 61
+ movl %eax,%ebp
+ xorl 60(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,52(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 56(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 62
+ movl %esi,%ebp
+ xorl (%esp),%edx
+ xorl %eax,%ebp
+ xorl 24(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,56(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 60(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 63
+ movl %edi,%ebp
+ xorl 4(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 48(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,60(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl (%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 64
+ movl %edx,%ebp
+ xorl 8(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 32(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 4(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 65
+ movl %ecx,%ebp
+ xorl 12(%esp),%eax
+ xorl %edx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edi,%ebp
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,4(%esp)
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 8(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 66
+ movl %ebx,%ebp
+ xorl 16(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%esi
+ xorl %edx,%ebp
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,8(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 12(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 67
+ movl %eax,%ebp
+ xorl 20(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edi
+ xorl %ecx,%ebp
+ xorl (%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,12(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 16(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 68
+ movl %esi,%ebp
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,16(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 20(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 69
+ movl %edi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,20(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl 24(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 70
+ movl %edx,%ebp
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,24(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 28(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 71
+ movl %ecx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,28(%esp)
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 32(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 72
+ movl %ebx,%ebp
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,32(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 36(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 73
+ movl %eax,%ebp
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,36(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 40(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 74
+ movl %esi,%ebp
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,40(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 44(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 75
+ movl %edi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,44(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl 48(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 76
+ movl %edx,%ebp
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,48(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 52(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 77
+ movl %ecx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 56(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 78
+ movl %ebx,%ebp
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 60(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 79
+ movl %eax,%ebp
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ addl %ebp,%edi
+ movl 96(%esp),%ebp
+ movl 100(%esp),%edx
+ addl (%ebp),%edi
+ addl 4(%ebp),%esi
+ addl 8(%ebp),%eax
+ addl 12(%ebp),%ebx
+ addl 16(%ebp),%ecx
+ movl %edi,(%ebp)
+ addl $64,%edx
+ movl %esi,4(%ebp)
+ cmpl 104(%esp),%edx
+ movl %eax,8(%ebp)
+ movl %ecx,%edi
+ movl %ebx,12(%ebp)
+ movl %edx,%esi
+ movl %ecx,16(%ebp)
+ jb .L000loop
+ addl $76,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+.byte 102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
+.byte 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
+.byte 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/sha1-ssse3-x86_64.s b/lib/accelerated/x86/coff/sha1-ssse3-x86_64.s
new file mode 100644
index 0000000000..75868a42c6
--- /dev/null
+++ b/lib/accelerated/x86/coff/sha1-ssse3-x86_64.s
@@ -0,0 +1,2693 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+.globl sha1_block_data_order
+.def sha1_block_data_order; .scl 2; .type 32; .endef
+.p2align 4
+sha1_block_data_order:
+ movq %rdi,8(%rsp)
+ movq %rsi,16(%rsp)
+ movq %rsp,%rax
+.LSEH_begin_sha1_block_data_order:
+ movq %rcx,%rdi
+ movq %rdx,%rsi
+ movq %r8,%rdx
+
+ movl _gnutls_x86_cpuid_s+0(%rip),%r9d
+ movl _gnutls_x86_cpuid_s+4(%rip),%r8d
+ movl _gnutls_x86_cpuid_s+8(%rip),%r10d
+ testl $512,%r8d
+ jz .Lialu
+ jmp _ssse3_shortcut
+
+.p2align 4
+.Lialu:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ movq %rsp,%r11
+ movq %rdi,%r8
+ subq $72,%rsp
+ movq %rsi,%r9
+ andq $-64,%rsp
+ movq %rdx,%r10
+ movq %r11,64(%rsp)
+.Lprologue:
+
+ movl 0(%r8),%esi
+ movl 4(%r8),%edi
+ movl 8(%r8),%r11d
+ movl 12(%r8),%r12d
+ movl 16(%r8),%r13d
+ jmp .Lloop
+
+.p2align 4
+.Lloop:
+ movl 0(%r9),%edx
+ bswapl %edx
+ movl %edx,0(%rsp)
+ movl %r11d,%eax
+ movl 4(%r9),%ebp
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r13,1),%r13d
+ andl %edi,%eax
+ movl %ebp,4(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 8(%r9),%edx
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r12,1),%r12d
+ andl %esi,%eax
+ movl %edx,8(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 12(%r9),%ebp
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %ebp,12(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 16(%r9),%edx
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %edx,16(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 20(%r9),%ebp
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %ebp,20(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl %r11d,%eax
+ movl 24(%r9),%edx
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r13,1),%r13d
+ andl %edi,%eax
+ movl %edx,24(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 28(%r9),%ebp
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r12,1),%r12d
+ andl %esi,%eax
+ movl %ebp,28(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 32(%r9),%edx
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %edx,32(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 36(%r9),%ebp
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %ebp,36(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 40(%r9),%edx
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %edx,40(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl %r11d,%eax
+ movl 44(%r9),%ebp
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r13,1),%r13d
+ andl %edi,%eax
+ movl %ebp,44(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 48(%r9),%edx
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r12,1),%r12d
+ andl %esi,%eax
+ movl %edx,48(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 52(%r9),%ebp
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %ebp,52(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 56(%r9),%edx
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %edx,56(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 60(%r9),%ebp
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %ebp,60(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl 0(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 32(%rsp),%edx
+ andl %edi,%eax
+ leal 1518500249(%rbp,%r13,1),%r13d
+ xorl 52(%rsp),%edx
+ xorl %r12d,%eax
+ roll $1,%edx
+ addl %ecx,%r13d
+ roll $30,%edi
+ movl %edx,0(%rsp)
+ addl %eax,%r13d
+ movl 4(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 36(%rsp),%ebp
+ andl %esi,%eax
+ leal 1518500249(%rdx,%r12,1),%r12d
+ xorl 56(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $1,%ebp
+ addl %ecx,%r12d
+ roll $30,%esi
+ movl %ebp,4(%rsp)
+ addl %eax,%r12d
+ movl 8(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 40(%rsp),%edx
+ andl %r13d,%eax
+ leal 1518500249(%rbp,%r11,1),%r11d
+ xorl 60(%rsp),%edx
+ xorl %edi,%eax
+ roll $1,%edx
+ addl %ecx,%r11d
+ roll $30,%r13d
+ movl %edx,8(%rsp)
+ addl %eax,%r11d
+ movl 12(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 44(%rsp),%ebp
+ andl %r12d,%eax
+ leal 1518500249(%rdx,%rdi,1),%edi
+ xorl 0(%rsp),%ebp
+ xorl %esi,%eax
+ roll $1,%ebp
+ addl %ecx,%edi
+ roll $30,%r12d
+ movl %ebp,12(%rsp)
+ addl %eax,%edi
+ movl 16(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 48(%rsp),%edx
+ andl %r11d,%eax
+ leal 1518500249(%rbp,%rsi,1),%esi
+ xorl 4(%rsp),%edx
+ xorl %r13d,%eax
+ roll $1,%edx
+ addl %ecx,%esi
+ roll $30,%r11d
+ movl %edx,16(%rsp)
+ addl %eax,%esi
+ movl 20(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r13,1),%r13d
+ xorl 52(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 8(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r12,1),%r12d
+ xorl 56(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 12(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r11,1),%r11d
+ xorl 60(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 16(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rdi,1),%edi
+ xorl 0(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 20(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rsi,1),%esi
+ xorl 4(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 24(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,36(%rsp)
+ movl 40(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 48(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r13,1),%r13d
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 28(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,40(%rsp)
+ movl 44(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r12,1),%r12d
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 32(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,44(%rsp)
+ movl 48(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 56(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r11,1),%r11d
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 36(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,48(%rsp)
+ movl 52(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rdi,1),%edi
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 40(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,52(%rsp)
+ movl 56(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rsi,1),%esi
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 44(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,56(%rsp)
+ movl 60(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r13,1),%r13d
+ xorl 28(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 48(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,60(%rsp)
+ movl 0(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r12,1),%r12d
+ xorl 32(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 52(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,0(%rsp)
+ movl 4(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r11,1),%r11d
+ xorl 36(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 56(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,4(%rsp)
+ movl 8(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rdi,1),%edi
+ xorl 40(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 60(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,8(%rsp)
+ movl 12(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rsi,1),%esi
+ xorl 44(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 0(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,12(%rsp)
+ movl 16(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r13,1),%r13d
+ xorl 48(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 4(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,16(%rsp)
+ movl 20(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r12,1),%r12d
+ xorl 52(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 8(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r11,1),%r11d
+ xorl 56(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 12(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rdi,1),%edi
+ xorl 60(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 16(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rsi,1),%esi
+ xorl 0(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 20(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 44(%rsp),%ebp
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r12d,%ebx
+ leal -1894007588(%rdx,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 24(%rsp),%ebp
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %ebp,36(%rsp)
+ addl %ecx,%r13d
+ movl 40(%rsp),%edx
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 48(%rsp),%edx
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r11d,%ebx
+ leal -1894007588(%rbp,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 28(%rsp),%edx
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%edx
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %edx,40(%rsp)
+ addl %ecx,%r12d
+ movl 44(%rsp),%ebp
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 52(%rsp),%ebp
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %edi,%ebx
+ leal -1894007588(%rdx,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 32(%rsp),%ebp
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%ebp
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %ebp,44(%rsp)
+ addl %ecx,%r11d
+ movl 48(%rsp),%edx
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 56(%rsp),%edx
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %esi,%ebx
+ leal -1894007588(%rbp,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 36(%rsp),%edx
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%edx
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %edx,48(%rsp)
+ addl %ecx,%edi
+ movl 52(%rsp),%ebp
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 60(%rsp),%ebp
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %r13d,%ebx
+ leal -1894007588(%rdx,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 40(%rsp),%ebp
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%ebp
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %ebp,52(%rsp)
+ addl %ecx,%esi
+ movl 56(%rsp),%edx
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 0(%rsp),%edx
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r12d,%ebx
+ leal -1894007588(%rbp,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 44(%rsp),%edx
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%edx
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %edx,56(%rsp)
+ addl %ecx,%r13d
+ movl 60(%rsp),%ebp
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 4(%rsp),%ebp
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %r11d,%ebx
+ leal -1894007588(%rdx,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 48(%rsp),%ebp
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %ebp,60(%rsp)
+ addl %ecx,%r12d
+ movl 0(%rsp),%edx
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 8(%rsp),%edx
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %edi,%ebx
+ leal -1894007588(%rbp,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 52(%rsp),%edx
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%edx
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %edx,0(%rsp)
+ addl %ecx,%r11d
+ movl 4(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 12(%rsp),%ebp
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %esi,%ebx
+ leal -1894007588(%rdx,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 56(%rsp),%ebp
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%ebp
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %ebp,4(%rsp)
+ addl %ecx,%edi
+ movl 8(%rsp),%edx
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 16(%rsp),%edx
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r13d,%ebx
+ leal -1894007588(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 60(%rsp),%edx
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%edx
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %edx,8(%rsp)
+ addl %ecx,%esi
+ movl 12(%rsp),%ebp
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 20(%rsp),%ebp
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r12d,%ebx
+ leal -1894007588(%rdx,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 0(%rsp),%ebp
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %ebp,12(%rsp)
+ addl %ecx,%r13d
+ movl 16(%rsp),%edx
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 24(%rsp),%edx
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 48(%rsp),%edx
+ xorl %r11d,%ebx
+ leal -1894007588(%rbp,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 4(%rsp),%edx
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%edx
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %edx,16(%rsp)
+ addl %ecx,%r12d
+ movl 20(%rsp),%ebp
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 28(%rsp),%ebp
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %edi,%ebx
+ leal -1894007588(%rdx,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 8(%rsp),%ebp
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%ebp
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %ebp,20(%rsp)
+ addl %ecx,%r11d
+ movl 24(%rsp),%edx
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 32(%rsp),%edx
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 56(%rsp),%edx
+ xorl %esi,%ebx
+ leal -1894007588(%rbp,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 12(%rsp),%edx
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%edx
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %edx,24(%rsp)
+ addl %ecx,%edi
+ movl 28(%rsp),%ebp
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 36(%rsp),%ebp
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %r13d,%ebx
+ leal -1894007588(%rdx,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 16(%rsp),%ebp
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%ebp
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %ebp,28(%rsp)
+ addl %ecx,%esi
+ movl 32(%rsp),%edx
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 40(%rsp),%edx
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r12d,%ebx
+ leal -1894007588(%rbp,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 20(%rsp),%edx
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%edx
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %edx,32(%rsp)
+ addl %ecx,%r13d
+ movl 36(%rsp),%ebp
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 44(%rsp),%ebp
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r11d,%ebx
+ leal -1894007588(%rdx,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 24(%rsp),%ebp
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %ebp,36(%rsp)
+ addl %ecx,%r12d
+ movl 40(%rsp),%edx
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 48(%rsp),%edx
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %edi,%ebx
+ leal -1894007588(%rbp,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 28(%rsp),%edx
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%edx
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %edx,40(%rsp)
+ addl %ecx,%r11d
+ movl 44(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 52(%rsp),%ebp
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %esi,%ebx
+ leal -1894007588(%rdx,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 32(%rsp),%ebp
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%ebp
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %ebp,44(%rsp)
+ addl %ecx,%edi
+ movl 48(%rsp),%edx
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 56(%rsp),%edx
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 16(%rsp),%edx
+ xorl %r13d,%ebx
+ leal -1894007588(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 36(%rsp),%edx
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%edx
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %edx,48(%rsp)
+ addl %ecx,%esi
+ movl 52(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r13,1),%r13d
+ xorl 20(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 40(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,52(%rsp)
+ movl 56(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 0(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r12,1),%r12d
+ xorl 24(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 44(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,56(%rsp)
+ movl 60(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r11,1),%r11d
+ xorl 28(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 48(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,60(%rsp)
+ movl 0(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rdi,1),%edi
+ xorl 32(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 52(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,0(%rsp)
+ movl 4(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rsi,1),%esi
+ xorl 36(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 56(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,4(%rsp)
+ movl 8(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r13,1),%r13d
+ xorl 40(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 60(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,8(%rsp)
+ movl 12(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r12,1),%r12d
+ xorl 44(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 0(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,12(%rsp)
+ movl 16(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r11,1),%r11d
+ xorl 48(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 4(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,16(%rsp)
+ movl 20(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rdi,1),%edi
+ xorl 52(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 8(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 32(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rsi,1),%esi
+ xorl 56(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 12(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r13,1),%r13d
+ xorl 60(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 16(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 40(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r12,1),%r12d
+ xorl 0(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 20(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r11,1),%r11d
+ xorl 4(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 24(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,36(%rsp)
+ movl 40(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 48(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rdi,1),%edi
+ xorl 8(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 28(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,40(%rsp)
+ movl 44(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rsi,1),%esi
+ xorl 12(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 32(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,44(%rsp)
+ movl 48(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 56(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r13,1),%r13d
+ xorl 16(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 36(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,48(%rsp)
+ movl 52(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r12,1),%r12d
+ xorl 20(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 40(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl 56(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r11,1),%r11d
+ xorl 24(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 44(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl 60(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rdi,1),%edi
+ xorl 28(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 48(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl %r11d,%eax
+ leal -899497514(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ addl 0(%r8),%esi
+ addl 4(%r8),%edi
+ addl 8(%r8),%r11d
+ addl 12(%r8),%r12d
+ addl 16(%r8),%r13d
+ movl %esi,0(%r8)
+ movl %edi,4(%r8)
+ movl %r11d,8(%r8)
+ movl %r12d,12(%r8)
+ movl %r13d,16(%r8)
+
+ subq $1,%r10
+ leaq 64(%r9),%r9
+ jnz .Lloop
+
+ movq 64(%rsp),%rsi
+ movq (%rsi),%r13
+ movq 8(%rsi),%r12
+ movq 16(%rsi),%rbp
+ movq 24(%rsi),%rbx
+ leaq 32(%rsi),%rsp
+.Lepilogue:
+ movq 8(%rsp),%rdi
+ movq 16(%rsp),%rsi
+ .byte 0xf3,0xc3
+.LSEH_end_sha1_block_data_order:
+.def sha1_block_data_order_ssse3; .scl 3; .type 32; .endef
+.p2align 4
+sha1_block_data_order_ssse3:
+ movq %rdi,8(%rsp)
+ movq %rsi,16(%rsp)
+ movq %rsp,%rax
+.LSEH_begin_sha1_block_data_order_ssse3:
+ movq %rcx,%rdi
+ movq %rdx,%rsi
+ movq %r8,%rdx
+
+_ssse3_shortcut:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ leaq -160(%rsp),%rsp
+ movaps %xmm6,64+0(%rsp)
+ movaps %xmm7,64+16(%rsp)
+ movaps %xmm8,64+32(%rsp)
+ movaps %xmm9,64+48(%rsp)
+ movaps %xmm10,64+64(%rsp)
+ movaps %xmm11,64+80(%rsp)
+.Lprologue_ssse3:
+ movq %rdi,%r8
+ movq %rsi,%r9
+ movq %rdx,%r10
+
+ shlq $6,%r10
+ addq %r9,%r10
+ leaq K_XX_XX+64(%rip),%r11
+
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movl %ebx,%esi
+ movl 16(%r8),%ebp
+ movl %ecx,%edi
+ xorl %edx,%edi
+ andl %edi,%esi
+
+ movdqa 64(%r11),%xmm6
+ movdqa -64(%r11),%xmm9
+ movdqu 0(%r9),%xmm0
+ movdqu 16(%r9),%xmm1
+ movdqu 32(%r9),%xmm2
+ movdqu 48(%r9),%xmm3
+.byte 102,15,56,0,198
+ addq $64,%r9
+.byte 102,15,56,0,206
+.byte 102,15,56,0,214
+.byte 102,15,56,0,222
+ paddd %xmm9,%xmm0
+ paddd %xmm9,%xmm1
+ paddd %xmm9,%xmm2
+ movdqa %xmm0,0(%rsp)
+ psubd %xmm9,%xmm0
+ movdqa %xmm1,16(%rsp)
+ psubd %xmm9,%xmm1
+ movdqa %xmm2,32(%rsp)
+ psubd %xmm9,%xmm2
+ jmp .Loop_ssse3
+.p2align 4
+.Loop_ssse3:
+ movdqa %xmm1,%xmm4
+ rorl $2,%ebx
+ xorl %edx,%esi
+ movdqa %xmm3,%xmm8
+.byte 102,15,58,15,224,8
+ movl %eax,%edi
+ addl 0(%rsp),%ebp
+ paddd %xmm3,%xmm9
+ xorl %ecx,%ebx
+ roll $5,%eax
+ psrldq $4,%xmm8
+ addl %esi,%ebp
+ andl %ebx,%edi
+ pxor %xmm0,%xmm4
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ pxor %xmm2,%xmm8
+ rorl $7,%eax
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 4(%rsp),%edx
+ pxor %xmm8,%xmm4
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa %xmm9,48(%rsp)
+ addl %edi,%edx
+ andl %eax,%esi
+ movdqa %xmm4,%xmm10
+ movdqa %xmm4,%xmm8
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ xorl %ebx,%esi
+ pslldq $12,%xmm10
+ paddd %xmm4,%xmm4
+ movl %edx,%edi
+ addl 8(%rsp),%ecx
+ xorl %eax,%ebp
+ roll $5,%edx
+ psrld $31,%xmm8
+ addl %esi,%ecx
+ andl %ebp,%edi
+ movdqa %xmm10,%xmm9
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ psrld $30,%xmm10
+ por %xmm8,%xmm4
+ rorl $7,%edx
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 12(%rsp),%ebx
+ pslld $2,%xmm9
+ pxor %xmm10,%xmm4
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa -64(%r11),%xmm10
+ addl %edi,%ebx
+ andl %edx,%esi
+ pxor %xmm9,%xmm4
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ movdqa %xmm2,%xmm5
+ rorl $7,%ecx
+ xorl %ebp,%esi
+ movdqa %xmm4,%xmm9
+.byte 102,15,58,15,233,8
+ movl %ebx,%edi
+ addl 16(%rsp),%eax
+ paddd %xmm4,%xmm10
+ xorl %edx,%ecx
+ roll $5,%ebx
+ psrldq $4,%xmm9
+ addl %esi,%eax
+ andl %ecx,%edi
+ pxor %xmm1,%xmm5
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ pxor %xmm3,%xmm9
+ rorl $7,%ebx
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 20(%rsp),%ebp
+ pxor %xmm9,%xmm5
+ xorl %ecx,%ebx
+ roll $5,%eax
+ movdqa %xmm10,0(%rsp)
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movdqa %xmm5,%xmm8
+ movdqa %xmm5,%xmm9
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ rorl $7,%eax
+ xorl %ecx,%esi
+ pslldq $12,%xmm8
+ paddd %xmm5,%xmm5
+ movl %ebp,%edi
+ addl 24(%rsp),%edx
+ xorl %ebx,%eax
+ roll $5,%ebp
+ psrld $31,%xmm9
+ addl %esi,%edx
+ andl %eax,%edi
+ movdqa %xmm8,%xmm10
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ psrld $30,%xmm8
+ por %xmm9,%xmm5
+ rorl $7,%ebp
+ xorl %ebx,%edi
+ movl %edx,%esi
+ addl 28(%rsp),%ecx
+ pslld $2,%xmm10
+ pxor %xmm8,%xmm5
+ xorl %eax,%ebp
+ roll $5,%edx
+ movdqa -32(%r11),%xmm8
+ addl %edi,%ecx
+ andl %ebp,%esi
+ pxor %xmm10,%xmm5
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ movdqa %xmm3,%xmm6
+ rorl $7,%edx
+ xorl %eax,%esi
+ movdqa %xmm5,%xmm10
+.byte 102,15,58,15,242,8
+ movl %ecx,%edi
+ addl 32(%rsp),%ebx
+ paddd %xmm5,%xmm8
+ xorl %ebp,%edx
+ roll $5,%ecx
+ psrldq $4,%xmm10
+ addl %esi,%ebx
+ andl %edx,%edi
+ pxor %xmm2,%xmm6
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ pxor %xmm4,%xmm10
+ rorl $7,%ecx
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ addl 36(%rsp),%eax
+ pxor %xmm10,%xmm6
+ xorl %edx,%ecx
+ roll $5,%ebx
+ movdqa %xmm8,16(%rsp)
+ addl %edi,%eax
+ andl %ecx,%esi
+ movdqa %xmm6,%xmm9
+ movdqa %xmm6,%xmm10
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ rorl $7,%ebx
+ xorl %edx,%esi
+ pslldq $12,%xmm9
+ paddd %xmm6,%xmm6
+ movl %eax,%edi
+ addl 40(%rsp),%ebp
+ xorl %ecx,%ebx
+ roll $5,%eax
+ psrld $31,%xmm10
+ addl %esi,%ebp
+ andl %ebx,%edi
+ movdqa %xmm9,%xmm8
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ psrld $30,%xmm9
+ por %xmm10,%xmm6
+ rorl $7,%eax
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 44(%rsp),%edx
+ pslld $2,%xmm8
+ pxor %xmm9,%xmm6
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa -32(%r11),%xmm9
+ addl %edi,%edx
+ andl %eax,%esi
+ pxor %xmm8,%xmm6
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ movdqa %xmm4,%xmm7
+ rorl $7,%ebp
+ xorl %ebx,%esi
+ movdqa %xmm6,%xmm8
+.byte 102,15,58,15,251,8
+ movl %edx,%edi
+ addl 48(%rsp),%ecx
+ paddd %xmm6,%xmm9
+ xorl %eax,%ebp
+ roll $5,%edx
+ psrldq $4,%xmm8
+ addl %esi,%ecx
+ andl %ebp,%edi
+ pxor %xmm3,%xmm7
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ pxor %xmm5,%xmm8
+ rorl $7,%edx
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 52(%rsp),%ebx
+ pxor %xmm8,%xmm7
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa %xmm9,32(%rsp)
+ addl %edi,%ebx
+ andl %edx,%esi
+ movdqa %xmm7,%xmm10
+ movdqa %xmm7,%xmm8
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ xorl %ebp,%esi
+ pslldq $12,%xmm10
+ paddd %xmm7,%xmm7
+ movl %ebx,%edi
+ addl 56(%rsp),%eax
+ xorl %edx,%ecx
+ roll $5,%ebx
+ psrld $31,%xmm8
+ addl %esi,%eax
+ andl %ecx,%edi
+ movdqa %xmm10,%xmm9
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ psrld $30,%xmm10
+ por %xmm8,%xmm7
+ rorl $7,%ebx
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 60(%rsp),%ebp
+ pslld $2,%xmm9
+ pxor %xmm10,%xmm7
+ xorl %ecx,%ebx
+ roll $5,%eax
+ movdqa -32(%r11),%xmm10
+ addl %edi,%ebp
+ andl %ebx,%esi
+ pxor %xmm9,%xmm7
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ movdqa %xmm7,%xmm9
+ rorl $7,%eax
+ pxor %xmm4,%xmm0
+.byte 102,68,15,58,15,206,8
+ xorl %ecx,%esi
+ movl %ebp,%edi
+ addl 0(%rsp),%edx
+ pxor %xmm1,%xmm0
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa %xmm10,%xmm8
+ paddd %xmm7,%xmm10
+ addl %esi,%edx
+ andl %eax,%edi
+ pxor %xmm9,%xmm0
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ xorl %ebx,%edi
+ movdqa %xmm0,%xmm9
+ movdqa %xmm10,48(%rsp)
+ movl %edx,%esi
+ addl 4(%rsp),%ecx
+ xorl %eax,%ebp
+ roll $5,%edx
+ pslld $2,%xmm0
+ addl %edi,%ecx
+ andl %ebp,%esi
+ psrld $30,%xmm9
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ rorl $7,%edx
+ xorl %eax,%esi
+ movl %ecx,%edi
+ addl 8(%rsp),%ebx
+ por %xmm9,%xmm0
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa %xmm0,%xmm10
+ addl %esi,%ebx
+ andl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 12(%rsp),%eax
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 16(%rsp),%ebp
+ pxor %xmm5,%xmm1
+.byte 102,68,15,58,15,215,8
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm2,%xmm1
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm8,%xmm9
+ paddd %xmm0,%xmm8
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pxor %xmm10,%xmm1
+ addl 20(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm1,%xmm10
+ movdqa %xmm8,0(%rsp)
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ pslld $2,%xmm1
+ addl 24(%rsp),%ecx
+ xorl %eax,%esi
+ psrld $30,%xmm10
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ por %xmm10,%xmm1
+ addl 28(%rsp),%ebx
+ xorl %ebp,%edi
+ movdqa %xmm1,%xmm8
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 32(%rsp),%eax
+ pxor %xmm6,%xmm2
+.byte 102,68,15,58,15,192,8
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ pxor %xmm3,%xmm2
+ addl %esi,%eax
+ xorl %edx,%edi
+ movdqa 0(%r11),%xmm10
+ paddd %xmm1,%xmm9
+ rorl $7,%ecx
+ addl %ebx,%eax
+ pxor %xmm8,%xmm2
+ addl 36(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ movdqa %xmm2,%xmm8
+ movdqa %xmm9,16(%rsp)
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pslld $2,%xmm2
+ addl 40(%rsp),%edx
+ xorl %ebx,%esi
+ psrld $30,%xmm8
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ por %xmm8,%xmm2
+ addl 44(%rsp),%ecx
+ xorl %eax,%edi
+ movdqa %xmm2,%xmm9
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 48(%rsp),%ebx
+ pxor %xmm7,%xmm3
+.byte 102,68,15,58,15,201,8
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ pxor %xmm4,%xmm3
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ movdqa %xmm10,%xmm8
+ paddd %xmm2,%xmm10
+ rorl $7,%edx
+ addl %ecx,%ebx
+ pxor %xmm9,%xmm3
+ addl 52(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ movdqa %xmm3,%xmm9
+ movdqa %xmm10,32(%rsp)
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ pslld $2,%xmm3
+ addl 56(%rsp),%ebp
+ xorl %ecx,%esi
+ psrld $30,%xmm9
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ por %xmm9,%xmm3
+ addl 60(%rsp),%edx
+ xorl %ebx,%edi
+ movdqa %xmm3,%xmm10
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 0(%rsp),%ecx
+ pxor %xmm0,%xmm4
+.byte 102,68,15,58,15,210,8
+ xorl %eax,%esi
+ movl %edx,%edi
+ roll $5,%edx
+ pxor %xmm5,%xmm4
+ addl %esi,%ecx
+ xorl %eax,%edi
+ movdqa %xmm8,%xmm9
+ paddd %xmm3,%xmm8
+ rorl $7,%ebp
+ addl %edx,%ecx
+ pxor %xmm10,%xmm4
+ addl 4(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ movdqa %xmm4,%xmm10
+ movdqa %xmm8,48(%rsp)
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ pslld $2,%xmm4
+ addl 8(%rsp),%eax
+ xorl %edx,%esi
+ psrld $30,%xmm10
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ por %xmm10,%xmm4
+ addl 12(%rsp),%ebp
+ xorl %ecx,%edi
+ movdqa %xmm4,%xmm8
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 16(%rsp),%edx
+ pxor %xmm1,%xmm5
+.byte 102,68,15,58,15,195,8
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ pxor %xmm6,%xmm5
+ addl %esi,%edx
+ xorl %ebx,%edi
+ movdqa %xmm9,%xmm10
+ paddd %xmm4,%xmm9
+ rorl $7,%eax
+ addl %ebp,%edx
+ pxor %xmm8,%xmm5
+ addl 20(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ movdqa %xmm5,%xmm8
+ movdqa %xmm9,0(%rsp)
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ pslld $2,%xmm5
+ addl 24(%rsp),%ebx
+ xorl %ebp,%esi
+ psrld $30,%xmm8
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ por %xmm8,%xmm5
+ addl 28(%rsp),%eax
+ movdqa %xmm5,%xmm9
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %ecx,%esi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 32(%rsp),%ebp
+ pxor %xmm2,%xmm6
+.byte 102,68,15,58,15,204,8
+ andl %ecx,%esi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ pxor %xmm7,%xmm6
+ movl %eax,%edi
+ xorl %ecx,%esi
+ movdqa %xmm10,%xmm8
+ paddd %xmm5,%xmm10
+ roll $5,%eax
+ addl %esi,%ebp
+ pxor %xmm9,%xmm6
+ xorl %ebx,%edi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 36(%rsp),%edx
+ movdqa %xmm6,%xmm9
+ movdqa %xmm10,16(%rsp)
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movl %ebp,%esi
+ pslld $2,%xmm6
+ xorl %ebx,%edi
+ roll $5,%ebp
+ psrld $30,%xmm9
+ addl %edi,%edx
+ xorl %eax,%esi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 40(%rsp),%ecx
+ andl %eax,%esi
+ por %xmm9,%xmm6
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movdqa %xmm6,%xmm10
+ movl %edx,%edi
+ xorl %eax,%esi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %ebp,%edi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 44(%rsp),%ebx
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movl %ecx,%esi
+ xorl %ebp,%edi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %edx,%esi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 48(%rsp),%eax
+ pxor %xmm3,%xmm7
+.byte 102,68,15,58,15,213,8
+ andl %edx,%esi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ pxor %xmm0,%xmm7
+ movl %ebx,%edi
+ xorl %edx,%esi
+ movdqa 32(%r11),%xmm9
+ paddd %xmm6,%xmm8
+ roll $5,%ebx
+ addl %esi,%eax
+ pxor %xmm10,%xmm7
+ xorl %ecx,%edi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 52(%rsp),%ebp
+ movdqa %xmm7,%xmm10
+ movdqa %xmm8,32(%rsp)
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movl %eax,%esi
+ pslld $2,%xmm7
+ xorl %ecx,%edi
+ roll $5,%eax
+ psrld $30,%xmm10
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 56(%rsp),%edx
+ andl %ebx,%esi
+ por %xmm10,%xmm7
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movdqa %xmm7,%xmm8
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %eax,%edi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 60(%rsp),%ecx
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movl %edx,%esi
+ xorl %eax,%edi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 0(%rsp),%ebx
+ pxor %xmm4,%xmm0
+.byte 102,68,15,58,15,198,8
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ pxor %xmm1,%xmm0
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ movdqa %xmm9,%xmm10
+ paddd %xmm7,%xmm9
+ roll $5,%ecx
+ addl %esi,%ebx
+ pxor %xmm8,%xmm0
+ xorl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 4(%rsp),%eax
+ movdqa %xmm0,%xmm8
+ movdqa %xmm9,48(%rsp)
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movl %ebx,%esi
+ pslld $2,%xmm0
+ xorl %edx,%edi
+ roll $5,%ebx
+ psrld $30,%xmm8
+ addl %edi,%eax
+ xorl %ecx,%esi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 8(%rsp),%ebp
+ andl %ecx,%esi
+ por %xmm8,%xmm0
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movdqa %xmm0,%xmm9
+ movl %eax,%edi
+ xorl %ecx,%esi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ebx,%edi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 12(%rsp),%edx
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movl %ebp,%esi
+ xorl %ebx,%edi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %eax,%esi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 16(%rsp),%ecx
+ pxor %xmm5,%xmm1
+.byte 102,68,15,58,15,207,8
+ andl %eax,%esi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ pxor %xmm2,%xmm1
+ movl %edx,%edi
+ xorl %eax,%esi
+ movdqa %xmm10,%xmm8
+ paddd %xmm0,%xmm10
+ roll $5,%edx
+ addl %esi,%ecx
+ pxor %xmm9,%xmm1
+ xorl %ebp,%edi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 20(%rsp),%ebx
+ movdqa %xmm1,%xmm9
+ movdqa %xmm10,0(%rsp)
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movl %ecx,%esi
+ pslld $2,%xmm1
+ xorl %ebp,%edi
+ roll $5,%ecx
+ psrld $30,%xmm9
+ addl %edi,%ebx
+ xorl %edx,%esi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 24(%rsp),%eax
+ andl %edx,%esi
+ por %xmm9,%xmm1
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movdqa %xmm1,%xmm10
+ movl %ebx,%edi
+ xorl %edx,%esi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %ecx,%edi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 28(%rsp),%ebp
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movl %eax,%esi
+ xorl %ecx,%edi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 32(%rsp),%edx
+ pxor %xmm6,%xmm2
+.byte 102,68,15,58,15,208,8
+ andl %ebx,%esi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ pxor %xmm3,%xmm2
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ movdqa %xmm8,%xmm9
+ paddd %xmm1,%xmm8
+ roll $5,%ebp
+ addl %esi,%edx
+ pxor %xmm10,%xmm2
+ xorl %eax,%edi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 36(%rsp),%ecx
+ movdqa %xmm2,%xmm10
+ movdqa %xmm8,16(%rsp)
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movl %edx,%esi
+ pslld $2,%xmm2
+ xorl %eax,%edi
+ roll $5,%edx
+ psrld $30,%xmm10
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 40(%rsp),%ebx
+ andl %ebp,%esi
+ por %xmm10,%xmm2
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movdqa %xmm2,%xmm8
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 44(%rsp),%eax
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ addl %ebx,%eax
+ addl 48(%rsp),%ebp
+ pxor %xmm7,%xmm3
+.byte 102,68,15,58,15,193,8
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm4,%xmm3
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm9,%xmm10
+ paddd %xmm2,%xmm9
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pxor %xmm8,%xmm3
+ addl 52(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm3,%xmm8
+ movdqa %xmm9,32(%rsp)
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ pslld $2,%xmm3
+ addl 56(%rsp),%ecx
+ xorl %eax,%esi
+ psrld $30,%xmm8
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ por %xmm8,%xmm3
+ addl 60(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 0(%rsp),%eax
+ paddd %xmm3,%xmm10
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ movdqa %xmm10,48(%rsp)
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 4(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 8(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 12(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ cmpq %r10,%r9
+ je .Ldone_ssse3
+ movdqa 64(%r11),%xmm6
+ movdqa -64(%r11),%xmm9
+ movdqu 0(%r9),%xmm0
+ movdqu 16(%r9),%xmm1
+ movdqu 32(%r9),%xmm2
+ movdqu 48(%r9),%xmm3
+.byte 102,15,56,0,198
+ addq $64,%r9
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+.byte 102,15,56,0,206
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ paddd %xmm9,%xmm0
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ movdqa %xmm0,0(%rsp)
+ xorl %edx,%edi
+ movl %ebx,%esi
+ psubd %xmm9,%xmm0
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+.byte 102,15,56,0,214
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ paddd %xmm9,%xmm1
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ movdqa %xmm1,16(%rsp)
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ psubd %xmm9,%xmm1
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+.byte 102,15,56,0,222
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ paddd %xmm9,%xmm2
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ movdqa %xmm2,32(%rsp)
+ xorl %eax,%edi
+ movl %edx,%esi
+ psubd %xmm9,%xmm2
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 0(%r8),%eax
+ addl 4(%r8),%esi
+ addl 8(%r8),%ecx
+ addl 12(%r8),%edx
+ movl %eax,0(%r8)
+ addl 16(%r8),%ebp
+ movl %esi,4(%r8)
+ movl %esi,%ebx
+ movl %ecx,8(%r8)
+ movl %ecx,%edi
+ movl %edx,12(%r8)
+ xorl %edx,%edi
+ movl %ebp,16(%r8)
+ andl %edi,%esi
+ jmp .Loop_ssse3
+
+.p2align 4
+.Ldone_ssse3:
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 0(%r8),%eax
+ addl 4(%r8),%esi
+ addl 8(%r8),%ecx
+ movl %eax,0(%r8)
+ addl 12(%r8),%edx
+ movl %esi,4(%r8)
+ addl 16(%r8),%ebp
+ movl %ecx,8(%r8)
+ movl %edx,12(%r8)
+ movl %ebp,16(%r8)
+ movaps 64+0(%rsp),%xmm6
+ movaps 64+16(%rsp),%xmm7
+ movaps 64+32(%rsp),%xmm8
+ movaps 64+48(%rsp),%xmm9
+ movaps 64+64(%rsp),%xmm10
+ movaps 64+80(%rsp),%xmm11
+ leaq 160(%rsp),%rsi
+ movq 0(%rsi),%r12
+ movq 8(%rsi),%rbp
+ movq 16(%rsi),%rbx
+ leaq 24(%rsi),%rsp
+.Lepilogue_ssse3:
+ movq 8(%rsp),%rdi
+ movq 16(%rsp),%rsi
+ .byte 0xf3,0xc3
+.LSEH_end_sha1_block_data_order_ssse3:
+.p2align 6
+K_XX_XX:
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.p2align 6
+
+.def se_handler; .scl 3; .type 32; .endef
+.p2align 4
+se_handler:
+ pushq %rsi
+ pushq %rdi
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ pushfq
+ subq $64,%rsp
+
+ movq 120(%r8),%rax
+ movq 248(%r8),%rbx
+
+ leaq .Lprologue(%rip),%r10
+ cmpq %r10,%rbx
+ jb .Lcommon_seh_tail
+
+ movq 152(%r8),%rax
+
+ leaq .Lepilogue(%rip),%r10
+ cmpq %r10,%rbx
+ jae .Lcommon_seh_tail
+
+ movq 64(%rax),%rax
+ leaq 32(%rax),%rax
+
+ movq -8(%rax),%rbx
+ movq -16(%rax),%rbp
+ movq -24(%rax),%r12
+ movq -32(%rax),%r13
+ movq %rbx,144(%r8)
+ movq %rbp,160(%r8)
+ movq %r12,216(%r8)
+ movq %r13,224(%r8)
+
+ jmp .Lcommon_seh_tail
+
+
+.def ssse3_handler; .scl 3; .type 32; .endef
+.p2align 4
+ssse3_handler:
+ pushq %rsi
+ pushq %rdi
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ pushfq
+ subq $64,%rsp
+
+ movq 120(%r8),%rax
+ movq 248(%r8),%rbx
+
+ movq 8(%r9),%rsi
+ movq 56(%r9),%r11
+
+ movl 0(%r11),%r10d
+ leaq (%rsi,%r10,1),%r10
+ cmpq %r10,%rbx
+ jb .Lcommon_seh_tail
+
+ movq 152(%r8),%rax
+
+ movl 4(%r11),%r10d
+ leaq (%rsi,%r10,1),%r10
+ cmpq %r10,%rbx
+ jae .Lcommon_seh_tail
+
+ leaq 64(%rax),%rsi
+ leaq 512(%r8),%rdi
+ movl $12,%ecx
+.long 0xa548f3fc
+ leaq 184(%rax),%rax
+
+ movq -8(%rax),%rbx
+ movq -16(%rax),%rbp
+ movq -24(%rax),%r12
+ movq %rbx,144(%r8)
+ movq %rbp,160(%r8)
+ movq %r12,216(%r8)
+
+.Lcommon_seh_tail:
+ movq 8(%rax),%rdi
+ movq 16(%rax),%rsi
+ movq %rax,152(%r8)
+ movq %rsi,168(%r8)
+ movq %rdi,176(%r8)
+
+ movq 40(%r9),%rdi
+ movq %r8,%rsi
+ movl $154,%ecx
+.long 0xa548f3fc
+
+ movq %r9,%rsi
+ xorq %rcx,%rcx
+ movq 8(%rsi),%rdx
+ movq 0(%rsi),%r8
+ movq 16(%rsi),%r9
+ movq 40(%rsi),%r10
+ leaq 56(%rsi),%r11
+ leaq 24(%rsi),%r12
+ movq %r10,32(%rsp)
+ movq %r11,40(%rsp)
+ movq %r12,48(%rsp)
+ movq %rcx,56(%rsp)
+ call *__imp_RtlVirtualUnwind(%rip)
+
+ movl $1,%eax
+ addq $64,%rsp
+ popfq
+ popq %r15
+ popq %r14
+ popq %r13
+ popq %r12
+ popq %rbp
+ popq %rbx
+ popq %rdi
+ popq %rsi
+ .byte 0xf3,0xc3
+
+
+.section .pdata
+.p2align 2
+.rva .LSEH_begin_sha1_block_data_order
+.rva .LSEH_end_sha1_block_data_order
+.rva .LSEH_info_sha1_block_data_order
+.rva .LSEH_begin_sha1_block_data_order_ssse3
+.rva .LSEH_end_sha1_block_data_order_ssse3
+.rva .LSEH_info_sha1_block_data_order_ssse3
+.section .xdata
+.p2align 3
+.LSEH_info_sha1_block_data_order:
+.byte 9,0,0,0
+.rva se_handler
+.LSEH_info_sha1_block_data_order_ssse3:
+.byte 9,0,0,0
+.rva ssse3_handler
+.rva .Lprologue_ssse3,.Lepilogue_ssse3
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/sha256-avx-x86_64.s b/lib/accelerated/x86/coff/sha256-avx-x86_64.s
new file mode 100644
index 0000000000..2198b8e187
--- /dev/null
+++ b/lib/accelerated/x86/coff/sha256-avx-x86_64.s
@@ -0,0 +1,2645 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+
+.globl sha256_multi_block
+.def sha256_multi_block; .scl 2; .type 32; .endef
+.p2align 5
+sha256_multi_block:
+ movq %rdi,8(%rsp)
+ movq %rsi,16(%rsp)
+ movq %rsp,%rax
+.LSEH_begin_sha256_multi_block:
+ movq %rcx,%rdi
+ movq %rdx,%rsi
+ movq %r8,%rdx
+
+ movq %rsp,%rax
+ pushq %rbx
+ pushq %rbp
+ leaq -168(%rsp),%rsp
+ movaps %xmm6,(%rsp)
+ movaps %xmm7,16(%rsp)
+ movaps %xmm8,32(%rsp)
+ movaps %xmm9,48(%rsp)
+ movaps %xmm10,-120(%rax)
+ movaps %xmm11,-104(%rax)
+ movaps %xmm12,-88(%rax)
+ movaps %xmm13,-72(%rax)
+ movaps %xmm14,-56(%rax)
+ movaps %xmm15,-40(%rax)
+ subq $288,%rsp
+ andq $-256,%rsp
+ movq %rax,272(%rsp)
+ leaq K256+128(%rip),%rbp
+ leaq 256(%rsp),%rbx
+ leaq 128(%rdi),%rdi
+
+.Loop_grande:
+ movl %edx,280(%rsp)
+ xorl %edx,%edx
+ movq 0(%rsi),%r8
+ movl 8(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,0(%rbx)
+ cmovleq %rbp,%r8
+ movq 16(%rsi),%r9
+ movl 24(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,4(%rbx)
+ cmovleq %rbp,%r9
+ movq 32(%rsi),%r10
+ movl 40(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,8(%rbx)
+ cmovleq %rbp,%r10
+ movq 48(%rsi),%r11
+ movl 56(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,12(%rbx)
+ cmovleq %rbp,%r11
+ testl %edx,%edx
+ jz .Ldone
+
+ movdqu 0-128(%rdi),%xmm8
+ leaq 128(%rsp),%rax
+ movdqu 32-128(%rdi),%xmm9
+ movdqu 64-128(%rdi),%xmm10
+ movdqu 96-128(%rdi),%xmm11
+ movdqu 128-128(%rdi),%xmm12
+ movdqu 160-128(%rdi),%xmm13
+ movdqu 192-128(%rdi),%xmm14
+ movdqu 224-128(%rdi),%xmm15
+ movdqu .Lpbswap(%rip),%xmm6
+ jmp .Loop
+
+.p2align 5
+.Loop:
+ movdqa %xmm10,%xmm4
+ pxor %xmm9,%xmm4
+ movd 0(%r8),%xmm5
+ movd 0(%r9),%xmm0
+ movd 0(%r10),%xmm1
+ movd 0(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,0-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movd 4(%r8),%xmm5
+ movd 4(%r9),%xmm0
+ movd 4(%r10),%xmm1
+ movd 4(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,16-128(%rax)
+ paddd %xmm14,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm5,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm14
+ paddd %xmm7,%xmm14
+ movd 8(%r8),%xmm5
+ movd 8(%r9),%xmm0
+ movd 8(%r10),%xmm1
+ movd 8(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,32-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movd 12(%r8),%xmm5
+ movd 12(%r9),%xmm0
+ movd 12(%r10),%xmm1
+ movd 12(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,48-128(%rax)
+ paddd %xmm12,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm5,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm12
+ paddd %xmm7,%xmm12
+ movd 16(%r8),%xmm5
+ movd 16(%r9),%xmm0
+ movd 16(%r10),%xmm1
+ movd 16(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,64-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movd 20(%r8),%xmm5
+ movd 20(%r9),%xmm0
+ movd 20(%r10),%xmm1
+ movd 20(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,80-128(%rax)
+ paddd %xmm10,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm5,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm10
+ paddd %xmm7,%xmm10
+ movd 24(%r8),%xmm5
+ movd 24(%r9),%xmm0
+ movd 24(%r10),%xmm1
+ movd 24(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,96-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movd 28(%r8),%xmm5
+ movd 28(%r9),%xmm0
+ movd 28(%r10),%xmm1
+ movd 28(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,112-128(%rax)
+ paddd %xmm8,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm5,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movd 32(%r8),%xmm5
+ movd 32(%r9),%xmm0
+ movd 32(%r10),%xmm1
+ movd 32(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,128-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movd 36(%r8),%xmm5
+ movd 36(%r9),%xmm0
+ movd 36(%r10),%xmm1
+ movd 36(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,144-128(%rax)
+ paddd %xmm14,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm5,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm14
+ paddd %xmm7,%xmm14
+ movd 40(%r8),%xmm5
+ movd 40(%r9),%xmm0
+ movd 40(%r10),%xmm1
+ movd 40(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,160-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movd 44(%r8),%xmm5
+ movd 44(%r9),%xmm0
+ movd 44(%r10),%xmm1
+ movd 44(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,176-128(%rax)
+ paddd %xmm12,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm5,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm12
+ paddd %xmm7,%xmm12
+ movd 48(%r8),%xmm5
+ movd 48(%r9),%xmm0
+ movd 48(%r10),%xmm1
+ movd 48(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,192-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movd 52(%r8),%xmm5
+ movd 52(%r9),%xmm0
+ movd 52(%r10),%xmm1
+ movd 52(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,208-128(%rax)
+ paddd %xmm10,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm5,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm10
+ paddd %xmm7,%xmm10
+ movd 56(%r8),%xmm5
+ movd 56(%r9),%xmm0
+ movd 56(%r10),%xmm1
+ movd 56(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,224-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movd 60(%r8),%xmm5
+ leaq 64(%r8),%r8
+ movd 60(%r9),%xmm0
+ leaq 64(%r9),%r9
+ movd 60(%r10),%xmm1
+ leaq 64(%r10),%r10
+ movd 60(%r11),%xmm2
+ leaq 64(%r11),%r11
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,240-128(%rax)
+ paddd %xmm8,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm5,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movdqu 0-128(%rax),%xmm5
+ movl $3,%ecx
+ jmp .Loop_16_xx
+.p2align 5
+.Loop_16_xx:
+ movdqa 16-128(%rax),%xmm6
+ paddd 144-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 224-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,0-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movdqa 32-128(%rax),%xmm5
+ paddd 160-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 240-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,16-128(%rax)
+ paddd %xmm14,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm6,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm14
+ paddd %xmm7,%xmm14
+ movdqa 48-128(%rax),%xmm6
+ paddd 176-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 0-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,32-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movdqa 64-128(%rax),%xmm5
+ paddd 192-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 16-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,48-128(%rax)
+ paddd %xmm12,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm6,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm12
+ paddd %xmm7,%xmm12
+ movdqa 80-128(%rax),%xmm6
+ paddd 208-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 32-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,64-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movdqa 96-128(%rax),%xmm5
+ paddd 224-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 48-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,80-128(%rax)
+ paddd %xmm10,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm6,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm10
+ paddd %xmm7,%xmm10
+ movdqa 112-128(%rax),%xmm6
+ paddd 240-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 64-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,96-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movdqa 128-128(%rax),%xmm5
+ paddd 0-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 80-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,112-128(%rax)
+ paddd %xmm8,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm6,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movdqa 144-128(%rax),%xmm6
+ paddd 16-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 96-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,128-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movdqa 160-128(%rax),%xmm5
+ paddd 32-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 112-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,144-128(%rax)
+ paddd %xmm14,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm6,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm14
+ paddd %xmm7,%xmm14
+ movdqa 176-128(%rax),%xmm6
+ paddd 48-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 128-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,160-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movdqa 192-128(%rax),%xmm5
+ paddd 64-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 144-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,176-128(%rax)
+ paddd %xmm12,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm6,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm12
+ paddd %xmm7,%xmm12
+ movdqa 208-128(%rax),%xmm6
+ paddd 80-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 160-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,192-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movdqa 224-128(%rax),%xmm5
+ paddd 96-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 176-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,208-128(%rax)
+ paddd %xmm10,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm6,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm10
+ paddd %xmm7,%xmm10
+ movdqa 240-128(%rax),%xmm6
+ paddd 112-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 192-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,224-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movdqa 0-128(%rax),%xmm5
+ paddd 128-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 208-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,240-128(%rax)
+ paddd %xmm8,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm6,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ decl %ecx
+ jnz .Loop_16_xx
+
+ movl $1,%ecx
+ leaq K256+128(%rip),%rbp
+
+ movdqa (%rbx),%xmm7
+ cmpl 0(%rbx),%ecx
+ pxor %xmm0,%xmm0
+ cmovgeq %rbp,%r8
+ cmpl 4(%rbx),%ecx
+ movdqa %xmm7,%xmm6
+ cmovgeq %rbp,%r9
+ cmpl 8(%rbx),%ecx
+ pcmpgtd %xmm0,%xmm6
+ cmovgeq %rbp,%r10
+ cmpl 12(%rbx),%ecx
+ paddd %xmm6,%xmm7
+ cmovgeq %rbp,%r11
+
+ movdqu 0-128(%rdi),%xmm0
+ pand %xmm6,%xmm8
+ movdqu 32-128(%rdi),%xmm1
+ pand %xmm6,%xmm9
+ movdqu 64-128(%rdi),%xmm2
+ pand %xmm6,%xmm10
+ movdqu 96-128(%rdi),%xmm5
+ pand %xmm6,%xmm11
+ paddd %xmm0,%xmm8
+ movdqu 128-128(%rdi),%xmm0
+ pand %xmm6,%xmm12
+ paddd %xmm1,%xmm9
+ movdqu 160-128(%rdi),%xmm1
+ pand %xmm6,%xmm13
+ paddd %xmm2,%xmm10
+ movdqu 192-128(%rdi),%xmm2
+ pand %xmm6,%xmm14
+ paddd %xmm5,%xmm11
+ movdqu 224-128(%rdi),%xmm5
+ pand %xmm6,%xmm15
+ paddd %xmm0,%xmm12
+ paddd %xmm1,%xmm13
+ movdqu %xmm8,0-128(%rdi)
+ paddd %xmm2,%xmm14
+ movdqu %xmm9,32-128(%rdi)
+ paddd %xmm5,%xmm15
+ movdqu %xmm10,64-128(%rdi)
+ movdqu %xmm11,96-128(%rdi)
+ movdqu %xmm12,128-128(%rdi)
+ movdqu %xmm13,160-128(%rdi)
+ movdqu %xmm14,192-128(%rdi)
+ movdqu %xmm15,224-128(%rdi)
+
+ movdqa %xmm7,(%rbx)
+ movdqa .Lpbswap(%rip),%xmm6
+ decl %edx
+ jnz .Loop
+
+ movl 280(%rsp),%edx
+ leaq 16(%rdi),%rdi
+ leaq 64(%rsi),%rsi
+ decl %edx
+ jnz .Loop_grande
+
+.Ldone:
+ movq 272(%rsp),%rax
+ movaps -184(%rax),%xmm6
+ movaps -168(%rax),%xmm7
+ movaps -152(%rax),%xmm8
+ movaps -136(%rax),%xmm9
+ movaps -120(%rax),%xmm10
+ movaps -104(%rax),%xmm11
+ movaps -88(%rax),%xmm12
+ movaps -72(%rax),%xmm13
+ movaps -56(%rax),%xmm14
+ movaps -40(%rax),%xmm15
+ movq -16(%rax),%rbp
+ movq -8(%rax),%rbx
+ leaq (%rax),%rsp
+ movq 8(%rsp),%rdi
+ movq 16(%rsp),%rsi
+ .byte 0xf3,0xc3
+.LSEH_end_sha256_multi_block:
+.p2align 8
+K256:
+.long 1116352408,1116352408,1116352408,1116352408
+.long 1116352408,1116352408,1116352408,1116352408
+.long 1899447441,1899447441,1899447441,1899447441
+.long 1899447441,1899447441,1899447441,1899447441
+.long 3049323471,3049323471,3049323471,3049323471
+.long 3049323471,3049323471,3049323471,3049323471
+.long 3921009573,3921009573,3921009573,3921009573
+.long 3921009573,3921009573,3921009573,3921009573
+.long 961987163,961987163,961987163,961987163
+.long 961987163,961987163,961987163,961987163
+.long 1508970993,1508970993,1508970993,1508970993
+.long 1508970993,1508970993,1508970993,1508970993
+.long 2453635748,2453635748,2453635748,2453635748
+.long 2453635748,2453635748,2453635748,2453635748
+.long 2870763221,2870763221,2870763221,2870763221
+.long 2870763221,2870763221,2870763221,2870763221
+.long 3624381080,3624381080,3624381080,3624381080
+.long 3624381080,3624381080,3624381080,3624381080
+.long 310598401,310598401,310598401,310598401
+.long 310598401,310598401,310598401,310598401
+.long 607225278,607225278,607225278,607225278
+.long 607225278,607225278,607225278,607225278
+.long 1426881987,1426881987,1426881987,1426881987
+.long 1426881987,1426881987,1426881987,1426881987
+.long 1925078388,1925078388,1925078388,1925078388
+.long 1925078388,1925078388,1925078388,1925078388
+.long 2162078206,2162078206,2162078206,2162078206
+.long 2162078206,2162078206,2162078206,2162078206
+.long 2614888103,2614888103,2614888103,2614888103
+.long 2614888103,2614888103,2614888103,2614888103
+.long 3248222580,3248222580,3248222580,3248222580
+.long 3248222580,3248222580,3248222580,3248222580
+.long 3835390401,3835390401,3835390401,3835390401
+.long 3835390401,3835390401,3835390401,3835390401
+.long 4022224774,4022224774,4022224774,4022224774
+.long 4022224774,4022224774,4022224774,4022224774
+.long 264347078,264347078,264347078,264347078
+.long 264347078,264347078,264347078,264347078
+.long 604807628,604807628,604807628,604807628
+.long 604807628,604807628,604807628,604807628
+.long 770255983,770255983,770255983,770255983
+.long 770255983,770255983,770255983,770255983
+.long 1249150122,1249150122,1249150122,1249150122
+.long 1249150122,1249150122,1249150122,1249150122
+.long 1555081692,1555081692,1555081692,1555081692
+.long 1555081692,1555081692,1555081692,1555081692
+.long 1996064986,1996064986,1996064986,1996064986
+.long 1996064986,1996064986,1996064986,1996064986
+.long 2554220882,2554220882,2554220882,2554220882
+.long 2554220882,2554220882,2554220882,2554220882
+.long 2821834349,2821834349,2821834349,2821834349
+.long 2821834349,2821834349,2821834349,2821834349
+.long 2952996808,2952996808,2952996808,2952996808
+.long 2952996808,2952996808,2952996808,2952996808
+.long 3210313671,3210313671,3210313671,3210313671
+.long 3210313671,3210313671,3210313671,3210313671
+.long 3336571891,3336571891,3336571891,3336571891
+.long 3336571891,3336571891,3336571891,3336571891
+.long 3584528711,3584528711,3584528711,3584528711
+.long 3584528711,3584528711,3584528711,3584528711
+.long 113926993,113926993,113926993,113926993
+.long 113926993,113926993,113926993,113926993
+.long 338241895,338241895,338241895,338241895
+.long 338241895,338241895,338241895,338241895
+.long 666307205,666307205,666307205,666307205
+.long 666307205,666307205,666307205,666307205
+.long 773529912,773529912,773529912,773529912
+.long 773529912,773529912,773529912,773529912
+.long 1294757372,1294757372,1294757372,1294757372
+.long 1294757372,1294757372,1294757372,1294757372
+.long 1396182291,1396182291,1396182291,1396182291
+.long 1396182291,1396182291,1396182291,1396182291
+.long 1695183700,1695183700,1695183700,1695183700
+.long 1695183700,1695183700,1695183700,1695183700
+.long 1986661051,1986661051,1986661051,1986661051
+.long 1986661051,1986661051,1986661051,1986661051
+.long 2177026350,2177026350,2177026350,2177026350
+.long 2177026350,2177026350,2177026350,2177026350
+.long 2456956037,2456956037,2456956037,2456956037
+.long 2456956037,2456956037,2456956037,2456956037
+.long 2730485921,2730485921,2730485921,2730485921
+.long 2730485921,2730485921,2730485921,2730485921
+.long 2820302411,2820302411,2820302411,2820302411
+.long 2820302411,2820302411,2820302411,2820302411
+.long 3259730800,3259730800,3259730800,3259730800
+.long 3259730800,3259730800,3259730800,3259730800
+.long 3345764771,3345764771,3345764771,3345764771
+.long 3345764771,3345764771,3345764771,3345764771
+.long 3516065817,3516065817,3516065817,3516065817
+.long 3516065817,3516065817,3516065817,3516065817
+.long 3600352804,3600352804,3600352804,3600352804
+.long 3600352804,3600352804,3600352804,3600352804
+.long 4094571909,4094571909,4094571909,4094571909
+.long 4094571909,4094571909,4094571909,4094571909
+.long 275423344,275423344,275423344,275423344
+.long 275423344,275423344,275423344,275423344
+.long 430227734,430227734,430227734,430227734
+.long 430227734,430227734,430227734,430227734
+.long 506948616,506948616,506948616,506948616
+.long 506948616,506948616,506948616,506948616
+.long 659060556,659060556,659060556,659060556
+.long 659060556,659060556,659060556,659060556
+.long 883997877,883997877,883997877,883997877
+.long 883997877,883997877,883997877,883997877
+.long 958139571,958139571,958139571,958139571
+.long 958139571,958139571,958139571,958139571
+.long 1322822218,1322822218,1322822218,1322822218
+.long 1322822218,1322822218,1322822218,1322822218
+.long 1537002063,1537002063,1537002063,1537002063
+.long 1537002063,1537002063,1537002063,1537002063
+.long 1747873779,1747873779,1747873779,1747873779
+.long 1747873779,1747873779,1747873779,1747873779
+.long 1955562222,1955562222,1955562222,1955562222
+.long 1955562222,1955562222,1955562222,1955562222
+.long 2024104815,2024104815,2024104815,2024104815
+.long 2024104815,2024104815,2024104815,2024104815
+.long 2227730452,2227730452,2227730452,2227730452
+.long 2227730452,2227730452,2227730452,2227730452
+.long 2361852424,2361852424,2361852424,2361852424
+.long 2361852424,2361852424,2361852424,2361852424
+.long 2428436474,2428436474,2428436474,2428436474
+.long 2428436474,2428436474,2428436474,2428436474
+.long 2756734187,2756734187,2756734187,2756734187
+.long 2756734187,2756734187,2756734187,2756734187
+.long 3204031479,3204031479,3204031479,3204031479
+.long 3204031479,3204031479,3204031479,3204031479
+.long 3329325298,3329325298,3329325298,3329325298
+.long 3329325298,3329325298,3329325298,3329325298
+.Lpbswap:
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/sha256-ssse3-x86.s b/lib/accelerated/x86/coff/sha256-ssse3-x86.s
new file mode 100644
index 0000000000..6fe27746ce
--- /dev/null
+++ b/lib/accelerated/x86/coff/sha256-ssse3-x86.s
@@ -0,0 +1,3402 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha512-586.s"
+.text
+.globl _sha256_block_data_order
+.def _sha256_block_data_order; .scl 2; .type 32; .endef
+.align 16
+_sha256_block_data_order:
+.L_sha256_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal .L001K256-.L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $6,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ leal __gnutls_x86_cpuid_s-.L001K256(%ebp),%edx
+ movl (%edx),%ecx
+ movl 4(%edx),%ebx
+ testl $1048576,%ecx
+ jnz .L002loop
+ andl $1073741824,%ecx
+ andl $268435968,%ebx
+ orl %ebx,%ecx
+ andl $1342177280,%ecx
+ cmpl $1342177280,%ecx
+ je .L003loop_shrd
+ subl %edi,%eax
+ cmpl $256,%eax
+ jae .L004unrolled
+ jmp .L002loop
+.align 16
+.L002loop:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ bswap %eax
+ movl 12(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ bswap %eax
+ movl 28(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %eax
+ movl 44(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ bswap %eax
+ movl 60(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ addl $64,%edi
+ leal -36(%esp),%esp
+ movl %edi,104(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edi
+ movl %ebx,8(%esp)
+ xorl %ecx,%ebx
+ movl %ecx,12(%esp)
+ movl %edi,16(%esp)
+ movl %ebx,(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ movl %edi,32(%esp)
+.align 16
+.L00500_15:
+ movl %edx,%ecx
+ movl 24(%esp),%esi
+ rorl $14,%ecx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl 96(%esp),%ebx
+ rorl $5,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ rorl $9,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ rorl $11,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ rorl $2,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3248222580,%esi
+ jne .L00500_15
+ movl 156(%esp),%ecx
+ jmp .L00616_63
+.align 16
+.L00616_63:
+ movl %ecx,%ebx
+ movl 104(%esp),%esi
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 160(%esp),%ebx
+ shrl $10,%edi
+ addl 124(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 24(%esp),%esi
+ rorl $14,%ecx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl %ebx,96(%esp)
+ rorl $5,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ rorl $9,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ rorl $11,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ rorl $2,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ movl 156(%esp),%ecx
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3329325298,%esi
+ jne .L00616_63
+ movl 356(%esp),%esi
+ movl 8(%esp),%ebx
+ movl 16(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebx
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ movl 32(%esp),%ecx
+ movl 360(%esp),%edi
+ addl 16(%esi),%edx
+ addl 20(%esi),%eax
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %eax,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ leal 356(%esp),%esp
+ subl $256,%ebp
+ cmpl 8(%esp),%edi
+ jb .L002loop
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 32
+.L003loop_shrd:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ bswap %eax
+ movl 12(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ bswap %eax
+ movl 28(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %eax
+ movl 44(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ bswap %eax
+ movl 60(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ addl $64,%edi
+ leal -36(%esp),%esp
+ movl %edi,104(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edi
+ movl %ebx,8(%esp)
+ xorl %ecx,%ebx
+ movl %ecx,12(%esp)
+ movl %edi,16(%esp)
+ movl %ebx,(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ movl %edi,32(%esp)
+.align 16
+.L00700_15_shrd:
+ movl %edx,%ecx
+ movl 24(%esp),%esi
+ shrdl $14,%ecx,%ecx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl 96(%esp),%ebx
+ shrdl $5,%ecx,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ shrdl $6,%edx,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ shrdl $9,%ecx,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ shrdl $11,%ecx,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ shrdl $2,%ecx,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3248222580,%esi
+ jne .L00700_15_shrd
+ movl 156(%esp),%ecx
+ jmp .L00816_63_shrd
+.align 16
+.L00816_63_shrd:
+ movl %ecx,%ebx
+ movl 104(%esp),%esi
+ shrdl $11,%ecx,%ecx
+ movl %esi,%edi
+ shrdl $2,%esi,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ shrdl $7,%ecx,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ shrdl $17,%esi,%esi
+ addl 160(%esp),%ebx
+ shrl $10,%edi
+ addl 124(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 24(%esp),%esi
+ shrdl $14,%ecx,%ecx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl %ebx,96(%esp)
+ shrdl $5,%ecx,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ shrdl $6,%edx,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ shrdl $9,%ecx,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ shrdl $11,%ecx,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ shrdl $2,%ecx,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ movl 156(%esp),%ecx
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3329325298,%esi
+ jne .L00816_63_shrd
+ movl 356(%esp),%esi
+ movl 8(%esp),%ebx
+ movl 16(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebx
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ movl 32(%esp),%ecx
+ movl 360(%esp),%edi
+ addl 16(%esi),%edx
+ addl 20(%esi),%eax
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %eax,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ leal 356(%esp),%esp
+ subl $256,%ebp
+ cmpl 8(%esp),%edi
+ jb .L003loop_shrd
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L001K256:
+.long 1116352408,1899447441,3049323471,3921009573,961987163,1508970993,2453635748,2870763221,3624381080,310598401,607225278,1426881987,1925078388,2162078206,2614888103,3248222580,3835390401,4022224774,264347078,604807628,770255983,1249150122,1555081692,1996064986,2554220882,2821834349,2952996808,3210313671,3336571891,3584528711,113926993,338241895,666307205,773529912,1294757372,1396182291,1695183700,1986661051,2177026350,2456956037,2730485921,2820302411,3259730800,3345764771,3516065817,3600352804,4094571909,275423344,430227734,506948616,659060556,883997877,958139571,1322822218,1537002063,1747873779,1955562222,2024104815,2227730452,2361852424,2428436474,2756734187,3204031479,3329325298
+.long 66051,67438087,134810123,202182159
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
+.align 16
+.L004unrolled:
+ leal -96(%esp),%esp
+ movl (%esi),%eax
+ movl 4(%esi),%ebp
+ movl 8(%esi),%ecx
+ movl 12(%esi),%ebx
+ movl %ebp,4(%esp)
+ xorl %ecx,%ebp
+ movl %ecx,8(%esp)
+ movl %ebx,12(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%esi
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %esi,28(%esp)
+ jmp .L009grand_loop
+.align 16
+.L009grand_loop:
+ movl (%edi),%ebx
+ movl 4(%edi),%ecx
+ bswap %ebx
+ movl 8(%edi),%esi
+ bswap %ecx
+ movl %ebx,32(%esp)
+ bswap %esi
+ movl %ecx,36(%esp)
+ movl %esi,40(%esp)
+ movl 12(%edi),%ebx
+ movl 16(%edi),%ecx
+ bswap %ebx
+ movl 20(%edi),%esi
+ bswap %ecx
+ movl %ebx,44(%esp)
+ bswap %esi
+ movl %ecx,48(%esp)
+ movl %esi,52(%esp)
+ movl 24(%edi),%ebx
+ movl 28(%edi),%ecx
+ bswap %ebx
+ movl 32(%edi),%esi
+ bswap %ecx
+ movl %ebx,56(%esp)
+ bswap %esi
+ movl %ecx,60(%esp)
+ movl %esi,64(%esp)
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %ebx
+ movl 44(%edi),%esi
+ bswap %ecx
+ movl %ebx,68(%esp)
+ bswap %esi
+ movl %ecx,72(%esp)
+ movl %esi,76(%esp)
+ movl 48(%edi),%ebx
+ movl 52(%edi),%ecx
+ bswap %ebx
+ movl 56(%edi),%esi
+ bswap %ecx
+ movl %ebx,80(%esp)
+ bswap %esi
+ movl %ecx,84(%esp)
+ movl %esi,88(%esp)
+ movl 60(%edi),%ebx
+ addl $64,%edi
+ bswap %ebx
+ movl %edi,100(%esp)
+ movl %ebx,92(%esp)
+ movl %edx,%ecx
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1116352408(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl 36(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1899447441(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl 40(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3049323471(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl 44(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3921009573(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl 48(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 961987163(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl 52(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1508970993(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl 56(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2453635748(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl 60(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2870763221(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl 64(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3624381080(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl 68(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 310598401(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl 72(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 607225278(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl 76(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1426881987(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl 80(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1925078388(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl 84(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2162078206(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl 88(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2614888103(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl 92(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3248222580(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3835390401(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 4022224774(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 264347078(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 604807628(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 770255983(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1249150122(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1555081692(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1996064986(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2554220882(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2821834349(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2952996808(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3210313671(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3336571891(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3584528711(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,88(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 113926993(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,92(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 338241895(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 666307205(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 773529912(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1294757372(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1396182291(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1695183700(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1986661051(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2177026350(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2456956037(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2730485921(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2820302411(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3259730800(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3345764771(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3516065817(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3600352804(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,88(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 4094571909(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,92(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 275423344(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 430227734(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 506948616(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 659060556(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 883997877(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 958139571(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1322822218(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1537002063(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1747873779(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1955562222(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2024104815(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2227730452(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2361852424(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2428436474(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2756734187(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3204031479(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3329325298(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 96(%esp),%esi
+ xorl %edi,%ebp
+ movl 12(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebp
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebp,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ movl %edi,8(%esp)
+ movl %ecx,12(%esp)
+ movl 20(%esp),%edi
+ movl 24(%esp),%ebx
+ movl 28(%esp),%ecx
+ addl 16(%esi),%edx
+ addl 20(%esi),%edi
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %edi,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ movl %edi,20(%esp)
+ movl 100(%esp),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ cmpl 104(%esp),%edi
+ jb .L009grand_loop
+ movl 108(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.comm __gnutls_x86_cpuid_s,16
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/sha512-ssse3-x86.s b/lib/accelerated/x86/coff/sha512-ssse3-x86.s
new file mode 100644
index 0000000000..79098da5c2
--- /dev/null
+++ b/lib/accelerated/x86/coff/sha512-ssse3-x86.s
@@ -0,0 +1,605 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha512-586.s"
+.text
+.globl _sha512_block_data_order
+.def _sha512_block_data_order; .scl 2; .type 32; .endef
+.align 16
+_sha512_block_data_order:
+.L_sha512_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal .L001K512-.L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $7,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+.align 16
+.L002loop_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ movl 28(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ movl 44(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ movl 60(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 64(%edi),%eax
+ movl 68(%edi),%ebx
+ movl 72(%edi),%ecx
+ movl 76(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 80(%edi),%eax
+ movl 84(%edi),%ebx
+ movl 88(%edi),%ecx
+ movl 92(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 96(%edi),%eax
+ movl 100(%edi),%ebx
+ movl 104(%edi),%ecx
+ movl 108(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 112(%edi),%eax
+ movl 116(%edi),%ebx
+ movl 120(%edi),%ecx
+ movl 124(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ addl $128,%edi
+ subl $72,%esp
+ movl %edi,204(%esp)
+ leal 8(%esp),%edi
+ movl $16,%ecx
+.long 2784229001
+.align 16
+.L00300_15_x86:
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $148,%dl
+ jne .L00300_15_x86
+.align 16
+.L00416_79_x86:
+ movl 312(%esp),%ecx
+ movl 316(%esp),%edx
+ movl %ecx,%esi
+ shrl $1,%ecx
+ movl %edx,%edi
+ shrl $1,%edx
+ movl %ecx,%eax
+ shll $24,%esi
+ movl %edx,%ebx
+ shll $24,%edi
+ xorl %esi,%ebx
+ shrl $6,%ecx
+ xorl %edi,%eax
+ shrl $6,%edx
+ xorl %ecx,%eax
+ shll $7,%esi
+ xorl %edx,%ebx
+ shll $1,%edi
+ xorl %esi,%ebx
+ shrl $1,%ecx
+ xorl %edi,%eax
+ shrl $1,%edx
+ xorl %ecx,%eax
+ shll $6,%edi
+ xorl %edx,%ebx
+ xorl %edi,%eax
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl 208(%esp),%ecx
+ movl 212(%esp),%edx
+ movl %ecx,%esi
+ shrl $6,%ecx
+ movl %edx,%edi
+ shrl $6,%edx
+ movl %ecx,%eax
+ shll $3,%esi
+ movl %edx,%ebx
+ shll $3,%edi
+ xorl %esi,%eax
+ shrl $13,%ecx
+ xorl %edi,%ebx
+ shrl $13,%edx
+ xorl %ecx,%eax
+ shll $10,%esi
+ xorl %edx,%ebx
+ shll $10,%edi
+ xorl %esi,%ebx
+ shrl $10,%ecx
+ xorl %edi,%eax
+ shrl $10,%edx
+ xorl %ecx,%ebx
+ shll $13,%edi
+ xorl %edx,%eax
+ xorl %edi,%eax
+ movl 320(%esp),%ecx
+ movl 324(%esp),%edx
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ movl 248(%esp),%esi
+ movl 252(%esp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,192(%esp)
+ movl %ebx,196(%esp)
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $23,%dl
+ jne .L00416_79_x86
+ movl 840(%esp),%esi
+ movl 844(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ addl 8(%esp),%eax
+ adcl 12(%esp),%ebx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ addl 16(%esp),%ecx
+ adcl 20(%esp),%edx
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ addl 24(%esp),%eax
+ adcl 28(%esp),%ebx
+ movl %eax,16(%esi)
+ movl %ebx,20(%esi)
+ addl 32(%esp),%ecx
+ adcl 36(%esp),%edx
+ movl %ecx,24(%esi)
+ movl %edx,28(%esi)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ addl 40(%esp),%eax
+ adcl 44(%esp),%ebx
+ movl %eax,32(%esi)
+ movl %ebx,36(%esi)
+ addl 48(%esp),%ecx
+ adcl 52(%esp),%edx
+ movl %ecx,40(%esi)
+ movl %edx,44(%esi)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ addl 56(%esp),%eax
+ adcl 60(%esp),%ebx
+ movl %eax,48(%esi)
+ movl %ebx,52(%esi)
+ addl 64(%esp),%ecx
+ adcl 68(%esp),%edx
+ movl %ecx,56(%esi)
+ movl %edx,60(%esi)
+ addl $840,%esp
+ subl $640,%ebp
+ cmpl 8(%esp),%edi
+ jb .L002loop_x86
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L001K512:
+.long 3609767458,1116352408
+.long 602891725,1899447441
+.long 3964484399,3049323471
+.long 2173295548,3921009573
+.long 4081628472,961987163
+.long 3053834265,1508970993
+.long 2937671579,2453635748
+.long 3664609560,2870763221
+.long 2734883394,3624381080
+.long 1164996542,310598401
+.long 1323610764,607225278
+.long 3590304994,1426881987
+.long 4068182383,1925078388
+.long 991336113,2162078206
+.long 633803317,2614888103
+.long 3479774868,3248222580
+.long 2666613458,3835390401
+.long 944711139,4022224774
+.long 2341262773,264347078
+.long 2007800933,604807628
+.long 1495990901,770255983
+.long 1856431235,1249150122
+.long 3175218132,1555081692
+.long 2198950837,1996064986
+.long 3999719339,2554220882
+.long 766784016,2821834349
+.long 2566594879,2952996808
+.long 3203337956,3210313671
+.long 1034457026,3336571891
+.long 2466948901,3584528711
+.long 3758326383,113926993
+.long 168717936,338241895
+.long 1188179964,666307205
+.long 1546045734,773529912
+.long 1522805485,1294757372
+.long 2643833823,1396182291
+.long 2343527390,1695183700
+.long 1014477480,1986661051
+.long 1206759142,2177026350
+.long 344077627,2456956037
+.long 1290863460,2730485921
+.long 3158454273,2820302411
+.long 3505952657,3259730800
+.long 106217008,3345764771
+.long 3606008344,3516065817
+.long 1432725776,3600352804
+.long 1467031594,4094571909
+.long 851169720,275423344
+.long 3100823752,430227734
+.long 1363258195,506948616
+.long 3750685593,659060556
+.long 3785050280,883997877
+.long 3318307427,958139571
+.long 3812723403,1322822218
+.long 2003034995,1537002063
+.long 3602036899,1747873779
+.long 1575990012,1955562222
+.long 1125592928,2024104815
+.long 2716904306,2227730452
+.long 442776044,2361852424
+.long 593698344,2428436474
+.long 3733110249,2756734187
+.long 2999351573,3204031479
+.long 3815920427,3329325298
+.long 3928383900,3391569614
+.long 566280711,3515267271
+.long 3454069534,3940187606
+.long 4000239992,4118630271
+.long 1914138554,116418474
+.long 2731055270,174292421
+.long 3203993006,289380356
+.long 320620315,460393269
+.long 587496836,685471733
+.long 1086792851,852142971
+.long 365543100,1017036298
+.long 2618297676,1126000580
+.long 3409855158,1288033470
+.long 4234509866,1501505948
+.long 987167468,1607167915
+.long 1246189591,1816402316
+.long 67438087,66051
+.long 202182159,134810123
+.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/coff/sha512-ssse3-x86_64.s b/lib/accelerated/x86/coff/sha512-ssse3-x86_64.s
new file mode 100644
index 0000000000..bbb2661f26
--- /dev/null
+++ b/lib/accelerated/x86/coff/sha512-ssse3-x86_64.s
@@ -0,0 +1,3025 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+.globl sha256_block_data_order
+.def sha256_block_data_order; .scl 2; .type 32; .endef
+.p2align 4
+sha256_block_data_order:
+ movq %rdi,8(%rsp)
+ movq %rsi,16(%rsp)
+ movq %rsp,%rax
+.LSEH_begin_sha256_block_data_order:
+ movq %rcx,%rdi
+ movq %rdx,%rsi
+ movq %r8,%rdx
+
+ leaq _gnutls_x86_cpuid_s(%rip),%r11
+ movl 0(%r11),%r9d
+ movl 4(%r11),%r10d
+ movl 8(%r11),%r11d
+ testl $512,%r10d
+ jnz .Lssse3_shortcut
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp,%r11
+ shlq $4,%rdx
+ subq $64+32,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+.Lprologue:
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+ jmp .Lloop
+
+.p2align 4
+.Lloop:
+ movl %ebx,%edi
+ leaq K256(%rip),%rbp
+ xorl %ecx,%edi
+ movl 0(%rsi),%r12d
+ movl %r8d,%r13d
+ movl %eax,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,0(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r11d
+ movl 4(%rsi),%r12d
+ movl %edx,%r13d
+ movl %r11d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,4(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r10d
+ movl 8(%rsi),%r12d
+ movl %ecx,%r13d
+ movl %r10d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,8(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r9d
+ movl 12(%rsi),%r12d
+ movl %ebx,%r13d
+ movl %r9d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,12(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%r8d
+ movl 16(%rsi),%r12d
+ movl %eax,%r13d
+ movl %r8d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,16(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%edx
+ movl 20(%rsi),%r12d
+ movl %r11d,%r13d
+ movl %edx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,20(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ecx
+ movl 24(%rsi),%r12d
+ movl %r10d,%r13d
+ movl %ecx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,24(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ebx
+ movl 28(%rsi),%r12d
+ movl %r9d,%r13d
+ movl %ebx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,28(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%eax
+ movl 32(%rsi),%r12d
+ movl %r8d,%r13d
+ movl %eax,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,32(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r11d
+ movl 36(%rsi),%r12d
+ movl %edx,%r13d
+ movl %r11d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,36(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r10d
+ movl 40(%rsi),%r12d
+ movl %ecx,%r13d
+ movl %r10d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,40(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r9d
+ movl 44(%rsi),%r12d
+ movl %ebx,%r13d
+ movl %r9d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,44(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%r8d
+ movl 48(%rsi),%r12d
+ movl %eax,%r13d
+ movl %r8d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,48(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%edx
+ movl 52(%rsi),%r12d
+ movl %r11d,%r13d
+ movl %edx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,52(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ecx
+ movl 56(%rsi),%r12d
+ movl %r10d,%r13d
+ movl %ecx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,56(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ebx
+ movl 60(%rsi),%r12d
+ movl %r9d,%r13d
+ movl %ebx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,60(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ jmp .Lrounds_16_xx
+.p2align 4
+.Lrounds_16_xx:
+ movl 4(%rsp),%r13d
+ movl 56(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%eax
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 36(%rsp),%r12d
+
+ addl 0(%rsp),%r12d
+ movl %r8d,%r13d
+ addl %r15d,%r12d
+ movl %eax,%r14d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,0(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ movl 8(%rsp),%r13d
+ movl 60(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r11d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 40(%rsp),%r12d
+
+ addl 4(%rsp),%r12d
+ movl %edx,%r13d
+ addl %edi,%r12d
+ movl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,4(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ movl 12(%rsp),%r13d
+ movl 0(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r10d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 44(%rsp),%r12d
+
+ addl 8(%rsp),%r12d
+ movl %ecx,%r13d
+ addl %r15d,%r12d
+ movl %r10d,%r14d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,8(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ movl 16(%rsp),%r13d
+ movl 4(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r9d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 48(%rsp),%r12d
+
+ addl 12(%rsp),%r12d
+ movl %ebx,%r13d
+ addl %edi,%r12d
+ movl %r9d,%r14d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,12(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ movl 20(%rsp),%r13d
+ movl 8(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r8d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 52(%rsp),%r12d
+
+ addl 16(%rsp),%r12d
+ movl %eax,%r13d
+ addl %r15d,%r12d
+ movl %r8d,%r14d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,16(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ movl 24(%rsp),%r13d
+ movl 12(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%edx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 56(%rsp),%r12d
+
+ addl 20(%rsp),%r12d
+ movl %r11d,%r13d
+ addl %edi,%r12d
+ movl %edx,%r14d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,20(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ movl 28(%rsp),%r13d
+ movl 16(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ecx
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 60(%rsp),%r12d
+
+ addl 24(%rsp),%r12d
+ movl %r10d,%r13d
+ addl %r15d,%r12d
+ movl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,24(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ movl 32(%rsp),%r13d
+ movl 20(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ebx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 0(%rsp),%r12d
+
+ addl 28(%rsp),%r12d
+ movl %r9d,%r13d
+ addl %edi,%r12d
+ movl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,28(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ movl 36(%rsp),%r13d
+ movl 24(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%eax
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 4(%rsp),%r12d
+
+ addl 32(%rsp),%r12d
+ movl %r8d,%r13d
+ addl %r15d,%r12d
+ movl %eax,%r14d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,32(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ movl 40(%rsp),%r13d
+ movl 28(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r11d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 8(%rsp),%r12d
+
+ addl 36(%rsp),%r12d
+ movl %edx,%r13d
+ addl %edi,%r12d
+ movl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,36(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ movl 44(%rsp),%r13d
+ movl 32(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r10d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 12(%rsp),%r12d
+
+ addl 40(%rsp),%r12d
+ movl %ecx,%r13d
+ addl %r15d,%r12d
+ movl %r10d,%r14d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,40(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ movl 48(%rsp),%r13d
+ movl 36(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r9d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 16(%rsp),%r12d
+
+ addl 44(%rsp),%r12d
+ movl %ebx,%r13d
+ addl %edi,%r12d
+ movl %r9d,%r14d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,44(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ movl 52(%rsp),%r13d
+ movl 40(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r8d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 20(%rsp),%r12d
+
+ addl 48(%rsp),%r12d
+ movl %eax,%r13d
+ addl %r15d,%r12d
+ movl %r8d,%r14d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,48(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ movl 56(%rsp),%r13d
+ movl 44(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%edx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 24(%rsp),%r12d
+
+ addl 52(%rsp),%r12d
+ movl %r11d,%r13d
+ addl %edi,%r12d
+ movl %edx,%r14d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,52(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ movl 60(%rsp),%r13d
+ movl 48(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ecx
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 28(%rsp),%r12d
+
+ addl 56(%rsp),%r12d
+ movl %r10d,%r13d
+ addl %r15d,%r12d
+ movl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,56(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ movl 0(%rsp),%r13d
+ movl 52(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ebx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 32(%rsp),%r12d
+
+ addl 60(%rsp),%r12d
+ movl %r9d,%r13d
+ addl %edi,%r12d
+ movl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,60(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ cmpb $0,3(%rbp)
+ jnz .Lrounds_16_xx
+
+ movq 64+0(%rsp),%rdi
+ addl %r14d,%eax
+ leaq 64(%rsi),%rsi
+
+ addl 0(%rdi),%eax
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb .Lloop
+
+ movq 64+24(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lepilogue:
+ movq 8(%rsp),%rdi
+ movq 16(%rsp),%rsi
+ .byte 0xf3,0xc3
+.LSEH_end_sha256_block_data_order:
+.p2align 6
+
+K256:
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.def sha256_block_data_order_ssse3; .scl 3; .type 32; .endef
+.p2align 6
+sha256_block_data_order_ssse3:
+ movq %rdi,8(%rsp)
+ movq %rsi,16(%rsp)
+ movq %rsp,%rax
+.LSEH_begin_sha256_block_data_order_ssse3:
+ movq %rcx,%rdi
+ movq %rdx,%rsi
+ movq %r8,%rdx
+
+.Lssse3_shortcut:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp,%r11
+ shlq $4,%rdx
+ subq $160,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+ movaps %xmm6,64+32(%rsp)
+ movaps %xmm7,64+48(%rsp)
+ movaps %xmm8,64+64(%rsp)
+ movaps %xmm9,64+80(%rsp)
+.Lprologue_ssse3:
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+
+
+ jmp .Lloop_ssse3
+.p2align 4
+.Lloop_ssse3:
+ movdqa K256+512(%rip),%xmm7
+ movdqu 0(%rsi),%xmm0
+ movdqu 16(%rsi),%xmm1
+ movdqu 32(%rsi),%xmm2
+ movdqu 48(%rsi),%xmm3
+.byte 102,15,56,0,199
+ leaq K256(%rip),%rbp
+.byte 102,15,56,0,207
+ movdqa 0(%rbp),%xmm4
+.byte 102,15,56,0,215
+ movdqa 32(%rbp),%xmm5
+ paddd %xmm0,%xmm4
+ movdqa 64(%rbp),%xmm6
+.byte 102,15,56,0,223
+ movdqa 96(%rbp),%xmm7
+ paddd %xmm1,%xmm5
+ paddd %xmm2,%xmm6
+ paddd %xmm3,%xmm7
+ movdqa %xmm4,0(%rsp)
+ movl %eax,%r14d
+ movdqa %xmm5,16(%rsp)
+ movl %ebx,%edi
+ movdqa %xmm6,32(%rsp)
+ xorl %ecx,%edi
+ movdqa %xmm7,48(%rsp)
+ movl %r8d,%r13d
+ jmp .Lssse3_00_47
+
+.p2align 4
+.Lssse3_00_47:
+ subq $-32*4,%rbp
+ rorl $14,%r13d
+ movdqa %xmm1,%xmm4
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ movdqa %xmm3,%xmm7
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+.byte 102,15,58,15,224,4
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+.byte 102,15,58,15,250,4
+ addl 0(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ paddd %xmm7,%xmm0
+ rorl $2,%r14d
+ addl %r11d,%edx
+ psrld $7,%xmm6
+ addl %edi,%r11d
+ movl %edx,%r13d
+ pshufd $250,%xmm3,%xmm7
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %r11d,%r14d
+ pxor %xmm5,%xmm4
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ pslld $11,%xmm5
+ addl 4(%rsp),%r10d
+ movl %r11d,%edi
+ pxor %xmm6,%xmm4
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ paddd %xmm4,%xmm0
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ psrlq $17,%xmm6
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %ecx,%r13d
+ addl 8(%rsp),%r9d
+ movl %r10d,%r15d
+ psrldq $8,%xmm7
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ paddd %xmm7,%xmm0
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ pshufd $80,%xmm0,%xmm7
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ movdqa %xmm7,%xmm6
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ psrld $10,%xmm7
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ psrlq $2,%xmm6
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 12(%rsp),%r8d
+ pxor %xmm6,%xmm7
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ movdqa 0(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ paddd %xmm7,%xmm0
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ paddd %xmm0,%xmm6
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ movdqa %xmm6,0(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm2,%xmm4
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ movdqa %xmm0,%xmm7
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+.byte 102,15,58,15,225,4
+ andl %eax,%r12d
+ xorl %eax,%r13d
+.byte 102,15,58,15,251,4
+ addl 16(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ paddd %xmm7,%xmm1
+ rorl $2,%r14d
+ addl %edx,%r11d
+ psrld $7,%xmm6
+ addl %edi,%edx
+ movl %r11d,%r13d
+ pshufd $250,%xmm0,%xmm7
+ addl %edx,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%edx
+ movl %eax,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %edx,%r14d
+ pxor %xmm5,%xmm4
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ pslld $11,%xmm5
+ addl 20(%rsp),%ecx
+ movl %edx,%edi
+ pxor %xmm6,%xmm4
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ paddd %xmm4,%xmm1
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ psrlq $17,%xmm6
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %r10d,%r13d
+ addl 24(%rsp),%ebx
+ movl %ecx,%r15d
+ psrldq $8,%xmm7
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ paddd %xmm7,%xmm1
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ pshufd $80,%xmm1,%xmm7
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ movdqa %xmm7,%xmm6
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ psrld $10,%xmm7
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ psrlq $2,%xmm6
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 28(%rsp),%eax
+ pxor %xmm6,%xmm7
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ movdqa 32(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ paddd %xmm7,%xmm1
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ paddd %xmm1,%xmm6
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movdqa %xmm6,16(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm3,%xmm4
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ movdqa %xmm1,%xmm7
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+.byte 102,15,58,15,226,4
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+.byte 102,15,58,15,248,4
+ addl 32(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ paddd %xmm7,%xmm2
+ rorl $2,%r14d
+ addl %r11d,%edx
+ psrld $7,%xmm6
+ addl %edi,%r11d
+ movl %edx,%r13d
+ pshufd $250,%xmm1,%xmm7
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %r11d,%r14d
+ pxor %xmm5,%xmm4
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ pslld $11,%xmm5
+ addl 36(%rsp),%r10d
+ movl %r11d,%edi
+ pxor %xmm6,%xmm4
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ paddd %xmm4,%xmm2
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ psrlq $17,%xmm6
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %ecx,%r13d
+ addl 40(%rsp),%r9d
+ movl %r10d,%r15d
+ psrldq $8,%xmm7
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ paddd %xmm7,%xmm2
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ pshufd $80,%xmm2,%xmm7
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ movdqa %xmm7,%xmm6
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ psrld $10,%xmm7
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ psrlq $2,%xmm6
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 44(%rsp),%r8d
+ pxor %xmm6,%xmm7
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ movdqa 64(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ paddd %xmm7,%xmm2
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ paddd %xmm2,%xmm6
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ movdqa %xmm6,32(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm0,%xmm4
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ movdqa %xmm2,%xmm7
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+.byte 102,15,58,15,227,4
+ andl %eax,%r12d
+ xorl %eax,%r13d
+.byte 102,15,58,15,249,4
+ addl 48(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ paddd %xmm7,%xmm3
+ rorl $2,%r14d
+ addl %edx,%r11d
+ psrld $7,%xmm6
+ addl %edi,%edx
+ movl %r11d,%r13d
+ pshufd $250,%xmm2,%xmm7
+ addl %edx,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%edx
+ movl %eax,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %edx,%r14d
+ pxor %xmm5,%xmm4
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ pslld $11,%xmm5
+ addl 52(%rsp),%ecx
+ movl %edx,%edi
+ pxor %xmm6,%xmm4
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ paddd %xmm4,%xmm3
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ psrlq $17,%xmm6
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %r10d,%r13d
+ addl 56(%rsp),%ebx
+ movl %ecx,%r15d
+ psrldq $8,%xmm7
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ paddd %xmm7,%xmm3
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ pshufd $80,%xmm3,%xmm7
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ movdqa %xmm7,%xmm6
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ psrld $10,%xmm7
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ psrlq $2,%xmm6
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 60(%rsp),%eax
+ pxor %xmm6,%xmm7
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ movdqa 96(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ paddd %xmm7,%xmm3
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ paddd %xmm3,%xmm6
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movdqa %xmm6,48(%rsp)
+ cmpb $0,131(%rbp)
+ jne .Lssse3_00_47
+ rorl $14,%r13d
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+ addl 0(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ rorl $2,%r14d
+ addl %r11d,%edx
+ addl %edi,%r11d
+ movl %edx,%r13d
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ xorl %r11d,%r14d
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ addl 4(%rsp),%r10d
+ movl %r11d,%edi
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ xorl %ecx,%r13d
+ addl 8(%rsp),%r9d
+ movl %r10d,%r15d
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 12(%rsp),%r8d
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+ andl %eax,%r12d
+ xorl %eax,%r13d
+ addl 16(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ rorl $2,%r14d
+ addl %edx,%r11d
+ addl %edi,%edx
+ movl %r11d,%r13d
+ addl %edx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%edx
+ movl %eax,%r12d
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ xorl %edx,%r14d
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ addl 20(%rsp),%ecx
+ movl %edx,%edi
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ xorl %r10d,%r13d
+ addl 24(%rsp),%ebx
+ movl %ecx,%r15d
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 28(%rsp),%eax
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ rorl $14,%r13d
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+ addl 32(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ rorl $2,%r14d
+ addl %r11d,%edx
+ addl %edi,%r11d
+ movl %edx,%r13d
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ xorl %r11d,%r14d
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ addl 36(%rsp),%r10d
+ movl %r11d,%edi
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ xorl %ecx,%r13d
+ addl 40(%rsp),%r9d
+ movl %r10d,%r15d
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 44(%rsp),%r8d
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+ andl %eax,%r12d
+ xorl %eax,%r13d
+ addl 48(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ rorl $2,%r14d
+ addl %edx,%r11d
+ addl %edi,%edx
+ movl %r11d,%r13d
+ addl %edx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%edx
+ movl %eax,%r12d
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ xorl %edx,%r14d
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ addl 52(%rsp),%ecx
+ movl %edx,%edi
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ xorl %r10d,%r13d
+ addl 56(%rsp),%ebx
+ movl %ecx,%r15d
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 60(%rsp),%eax
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movq 64+0(%rsp),%rdi
+ movl %r14d,%eax
+
+ addl 0(%rdi),%eax
+ leaq 64(%rsi),%rsi
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb .Lloop_ssse3
+
+ movq 64+24(%rsp),%rsi
+ movaps 64+32(%rsp),%xmm6
+ movaps 64+48(%rsp),%xmm7
+ movaps 64+64(%rsp),%xmm8
+ movaps 64+80(%rsp),%xmm9
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lepilogue_ssse3:
+ movq 8(%rsp),%rdi
+ movq 16(%rsp),%rsi
+ .byte 0xf3,0xc3
+.LSEH_end_sha256_block_data_order_ssse3:
+
+.def se_handler; .scl 3; .type 32; .endef
+.p2align 4
+se_handler:
+ pushq %rsi
+ pushq %rdi
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ pushfq
+ subq $64,%rsp
+
+ movq 120(%r8),%rax
+ movq 248(%r8),%rbx
+
+ movq 8(%r9),%rsi
+ movq 56(%r9),%r11
+
+ movl 0(%r11),%r10d
+ leaq (%rsi,%r10,1),%r10
+ cmpq %r10,%rbx
+ jb .Lin_prologue
+
+ movq 152(%r8),%rax
+
+ movl 4(%r11),%r10d
+ leaq (%rsi,%r10,1),%r10
+ cmpq %r10,%rbx
+ jae .Lin_prologue
+ movq %rax,%rsi
+ movq 64+24(%rax),%rax
+ leaq 48(%rax),%rax
+
+ movq -8(%rax),%rbx
+ movq -16(%rax),%rbp
+ movq -24(%rax),%r12
+ movq -32(%rax),%r13
+ movq -40(%rax),%r14
+ movq -48(%rax),%r15
+ movq %rbx,144(%r8)
+ movq %rbp,160(%r8)
+ movq %r12,216(%r8)
+ movq %r13,224(%r8)
+ movq %r14,232(%r8)
+ movq %r15,240(%r8)
+
+ leaq .Lepilogue(%rip),%r10
+ cmpq %r10,%rbx
+ jb .Lin_prologue
+
+ leaq 64+32(%rsi),%rsi
+ leaq 512(%r8),%rdi
+ movl $8,%ecx
+.long 0xa548f3fc
+
+.Lin_prologue:
+ movq 8(%rax),%rdi
+ movq 16(%rax),%rsi
+ movq %rax,152(%r8)
+ movq %rsi,168(%r8)
+ movq %rdi,176(%r8)
+
+ movq 40(%r9),%rdi
+ movq %r8,%rsi
+ movl $154,%ecx
+.long 0xa548f3fc
+
+ movq %r9,%rsi
+ xorq %rcx,%rcx
+ movq 8(%rsi),%rdx
+ movq 0(%rsi),%r8
+ movq 16(%rsi),%r9
+ movq 40(%rsi),%r10
+ leaq 56(%rsi),%r11
+ leaq 24(%rsi),%r12
+ movq %r10,32(%rsp)
+ movq %r11,40(%rsp)
+ movq %r12,48(%rsp)
+ movq %rcx,56(%rsp)
+ call *__imp_RtlVirtualUnwind(%rip)
+
+ movl $1,%eax
+ addq $64,%rsp
+ popfq
+ popq %r15
+ popq %r14
+ popq %r13
+ popq %r12
+ popq %rbp
+ popq %rbx
+ popq %rdi
+ popq %rsi
+ .byte 0xf3,0xc3
+
+
+.section .pdata
+.p2align 2
+.rva .LSEH_begin_sha256_block_data_order
+.rva .LSEH_end_sha256_block_data_order
+.rva .LSEH_info_sha256_block_data_order
+.rva .LSEH_begin_sha256_block_data_order_ssse3
+.rva .LSEH_end_sha256_block_data_order_ssse3
+.rva .LSEH_info_sha256_block_data_order_ssse3
+.section .xdata
+.p2align 3
+.LSEH_info_sha256_block_data_order:
+.byte 9,0,0,0
+.rva se_handler
+.rva .Lprologue,.Lepilogue
+.LSEH_info_sha256_block_data_order_ssse3:
+.byte 9,0,0,0
+.rva se_handler
+.rva .Lprologue_ssse3,.Lepilogue_ssse3
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/elf/appro-aes-x86.s b/lib/accelerated/x86/elf/aesni-x86.s
index 5f0779756c..5f0779756c 100644
--- a/lib/accelerated/x86/elf/appro-aes-x86.s
+++ b/lib/accelerated/x86/elf/aesni-x86.s
diff --git a/lib/accelerated/x86/elf/appro-aes-x86-64.s b/lib/accelerated/x86/elf/aesni-x86_64.s
index d3734a6edd..d3734a6edd 100644
--- a/lib/accelerated/x86/elf/appro-aes-x86-64.s
+++ b/lib/accelerated/x86/elf/aesni-x86_64.s
diff --git a/lib/accelerated/x86/elf/cpuid-x86-64.s b/lib/accelerated/x86/elf/cpuid-x86_64.s
index 41a0061f71..41a0061f71 100644
--- a/lib/accelerated/x86/elf/cpuid-x86-64.s
+++ b/lib/accelerated/x86/elf/cpuid-x86_64.s
diff --git a/lib/accelerated/x86/elf/padlock-x86.s b/lib/accelerated/x86/elf/e_padlock-x86.s
index 2199255efe..2199255efe 100644
--- a/lib/accelerated/x86/elf/padlock-x86.s
+++ b/lib/accelerated/x86/elf/e_padlock-x86.s
diff --git a/lib/accelerated/x86/elf/padlock-x86-64.s b/lib/accelerated/x86/elf/e_padlock-x86_64.s
index 2ac113d72c..2ac113d72c 100644
--- a/lib/accelerated/x86/elf/padlock-x86-64.s
+++ b/lib/accelerated/x86/elf/e_padlock-x86_64.s
diff --git a/lib/accelerated/x86/elf/appro-aes-gcm-x86-64.s b/lib/accelerated/x86/elf/ghash-x86_64.s
index 9755951f7b..9755951f7b 100644
--- a/lib/accelerated/x86/elf/appro-aes-gcm-x86-64.s
+++ b/lib/accelerated/x86/elf/ghash-x86_64.s
diff --git a/lib/accelerated/x86/elf/sha1-ssse3-x86.s b/lib/accelerated/x86/elf/sha1-ssse3-x86.s
new file mode 100644
index 0000000000..e2f22e7c7e
--- /dev/null
+++ b/lib/accelerated/x86/elf/sha1-ssse3-x86.s
@@ -0,0 +1,1421 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha1-586.s"
+.text
+.globl sha1_block_data_order
+.type sha1_block_data_order,@function
+.align 16
+sha1_block_data_order:
+.L_sha1_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%ebp
+ movl 24(%esp),%esi
+ movl 28(%esp),%eax
+ subl $76,%esp
+ shll $6,%eax
+ addl %esi,%eax
+ movl %eax,104(%esp)
+ movl 16(%ebp),%edi
+ jmp .L000loop
+.align 16
+.L000loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %edx,28(%esp)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edx,44(%esp)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,48(%esp)
+ movl %ebx,52(%esp)
+ movl %ecx,56(%esp)
+ movl %edx,60(%esp)
+ movl %esi,100(%esp)
+ movl (%ebp),%eax
+ movl 4(%ebp),%ebx
+ movl 8(%ebp),%ecx
+ movl 12(%ebp),%edx
+
+ movl %ecx,%esi
+ movl %eax,%ebp
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl (%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 4(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 8(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 12(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ movl 16(%esp),%eax
+ andl %edx,%ebx
+ rorl $2,%edx
+ xorl %esi,%ebx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ movl 20(%esp),%esi
+ andl %ecx,%eax
+ rorl $2,%ecx
+ xorl %edi,%eax
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl 24(%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 28(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 32(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 36(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ movl 40(%esp),%eax
+ andl %edx,%ebx
+ rorl $2,%edx
+ xorl %esi,%ebx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ movl 44(%esp),%esi
+ andl %ecx,%eax
+ rorl $2,%ecx
+ xorl %edi,%eax
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl 48(%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 52(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 56(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 60(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ movl (%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edi,%ebp
+ xorl 8(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 32(%esp),%ebx
+ andl %edx,%ebp
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ xorl %esi,%ebp
+ addl %ebp,%eax
+ movl %ecx,%ebp
+ rorl $2,%edx
+ movl %ebx,(%esp)
+ roll $5,%ebp
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl 4(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %edx,%ebp
+ xorl 12(%esp),%eax
+ xorl %edi,%ebp
+ xorl 36(%esp),%eax
+ andl %ecx,%ebp
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ xorl %edi,%ebp
+ addl %ebp,%esi
+ movl %ebx,%ebp
+ rorl $2,%ecx
+ movl %eax,4(%esp)
+ roll $5,%ebp
+ leal 1518500249(%eax,%esi,1),%eax
+ movl 8(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 40(%esp),%esi
+ andl %ebx,%ebp
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ xorl %edx,%ebp
+ addl %ebp,%edi
+ movl %eax,%ebp
+ rorl $2,%ebx
+ movl %esi,8(%esp)
+ roll $5,%ebp
+ leal 1518500249(%esi,%edi,1),%esi
+ movl 12(%esp),%edi
+ addl %ebp,%esi
+
+ movl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 44(%esp),%edi
+ andl %eax,%ebp
+ xorl (%esp),%edi
+ roll $1,%edi
+ xorl %ecx,%ebp
+ addl %ebp,%edx
+ movl %esi,%ebp
+ rorl $2,%eax
+ movl %edi,12(%esp)
+ roll $5,%ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl 16(%esp),%edx
+ addl %ebp,%edi
+
+ movl %esi,%ebp
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,16(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 20(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %edi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,20(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 24(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edx,%ebp
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,24(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 28(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %ecx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,28(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 32(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ebx,%ebp
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,32(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 36(%esp),%edi
+ addl %ebp,%esi
+
+ movl %eax,%ebp
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,36(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl 40(%esp),%edx
+ addl %ebp,%edi
+
+ movl %esi,%ebp
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,40(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 44(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %edi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,44(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 48(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edx,%ebp
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,48(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 52(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %ecx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,52(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 56(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ebx,%ebp
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,56(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 60(%esp),%edi
+ addl %ebp,%esi
+
+ movl %eax,%ebp
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,60(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl (%esp),%edx
+ addl %ebp,%edi
+
+ movl %esi,%ebp
+ xorl 8(%esp),%edx
+ xorl %eax,%ebp
+ xorl 32(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 4(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %edi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 56(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,4(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 8(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edx,%ebp
+ xorl 16(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 40(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,8(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 12(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %ecx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edx,%ebp
+ xorl 44(%esp),%eax
+ xorl %edi,%ebp
+ xorl (%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,12(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 16(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ebx,%ebp
+ xorl 24(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%esi
+ xorl %edx,%ebp
+ xorl 4(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,16(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 20(%esp),%edi
+ addl %ebp,%esi
+
+ movl %eax,%ebp
+ xorl 28(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 8(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,20(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl 24(%esp),%edx
+ addl %ebp,%edi
+
+ movl %esi,%ebp
+ xorl 32(%esp),%edx
+ xorl %eax,%ebp
+ xorl 56(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 12(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,24(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 28(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %edi,%ebp
+ xorl 36(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 16(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,28(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 32(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edi,%ebp
+ xorl 40(%esp),%ebx
+ xorl %esi,%ebp
+ xorl (%esp),%ebx
+ andl %edx,%ebp
+ xorl 20(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,32(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 36(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %edx,%ebp
+ xorl 44(%esp),%eax
+ xorl %edi,%ebp
+ xorl 4(%esp),%eax
+ andl %ecx,%ebp
+ xorl 24(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,36(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 40(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ecx,%ebp
+ xorl 48(%esp),%esi
+ xorl %edx,%ebp
+ xorl 8(%esp),%esi
+ andl %ebx,%ebp
+ xorl 28(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,40(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 44(%esp),%edi
+ addl %ebp,%esi
+
+ movl %ebx,%ebp
+ xorl 52(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 12(%esp),%edi
+ andl %eax,%ebp
+ xorl 32(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,44(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 48(%esp),%edx
+ addl %ebp,%edi
+
+ movl %eax,%ebp
+ xorl 56(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 16(%esp),%edx
+ andl %esi,%ebp
+ xorl 36(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,48(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 52(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %esi,%ebp
+ xorl 60(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 20(%esp),%ecx
+ andl %edi,%ebp
+ xorl 40(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,52(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 56(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edi,%ebp
+ xorl (%esp),%ebx
+ xorl %esi,%ebp
+ xorl 24(%esp),%ebx
+ andl %edx,%ebp
+ xorl 44(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,56(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 60(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %edx,%ebp
+ xorl 4(%esp),%eax
+ xorl %edi,%ebp
+ xorl 28(%esp),%eax
+ andl %ecx,%ebp
+ xorl 48(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,60(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl (%esp),%esi
+ addl %ebp,%eax
+
+ movl %ecx,%ebp
+ xorl 8(%esp),%esi
+ xorl %edx,%ebp
+ xorl 32(%esp),%esi
+ andl %ebx,%ebp
+ xorl 52(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 4(%esp),%edi
+ addl %ebp,%esi
+
+ movl %ebx,%ebp
+ xorl 12(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 36(%esp),%edi
+ andl %eax,%ebp
+ xorl 56(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,4(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 8(%esp),%edx
+ addl %ebp,%edi
+
+ movl %eax,%ebp
+ xorl 16(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 40(%esp),%edx
+ andl %esi,%ebp
+ xorl 60(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,8(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 12(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %esi,%ebp
+ xorl 20(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 44(%esp),%ecx
+ andl %edi,%ebp
+ xorl (%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,12(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 16(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edi,%ebp
+ xorl 24(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 48(%esp),%ebx
+ andl %edx,%ebp
+ xorl 4(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,16(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 20(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %edx,%ebp
+ xorl 28(%esp),%eax
+ xorl %edi,%ebp
+ xorl 52(%esp),%eax
+ andl %ecx,%ebp
+ xorl 8(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,20(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 24(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ecx,%ebp
+ xorl 32(%esp),%esi
+ xorl %edx,%ebp
+ xorl 56(%esp),%esi
+ andl %ebx,%ebp
+ xorl 12(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,24(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 28(%esp),%edi
+ addl %ebp,%esi
+
+ movl %ebx,%ebp
+ xorl 36(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 60(%esp),%edi
+ andl %eax,%ebp
+ xorl 16(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,28(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 32(%esp),%edx
+ addl %ebp,%edi
+
+ movl %eax,%ebp
+ xorl 40(%esp),%edx
+ xorl %ebx,%ebp
+ xorl (%esp),%edx
+ andl %esi,%ebp
+ xorl 20(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,32(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 36(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %esi,%ebp
+ xorl 44(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 4(%esp),%ecx
+ andl %edi,%ebp
+ xorl 24(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,36(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 40(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edi,%ebp
+ xorl 48(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 8(%esp),%ebx
+ andl %edx,%ebp
+ xorl 28(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,40(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 44(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %edx,%ebp
+ xorl 52(%esp),%eax
+ xorl %edi,%ebp
+ xorl 12(%esp),%eax
+ andl %ecx,%ebp
+ xorl 32(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,44(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 48(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ebx,%ebp
+ xorl 56(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 36(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,48(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 52(%esp),%edi
+ addl %ebp,%esi
+
+ movl %eax,%ebp
+ xorl 60(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,52(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 56(%esp),%edx
+ addl %ebp,%edi
+
+ movl %esi,%ebp
+ xorl (%esp),%edx
+ xorl %eax,%ebp
+ xorl 24(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,56(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 60(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %edi,%ebp
+ xorl 4(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 48(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,60(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl (%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edx,%ebp
+ xorl 8(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 32(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 4(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %ecx,%ebp
+ xorl 12(%esp),%eax
+ xorl %edx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edi,%ebp
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,4(%esp)
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 8(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ebx,%ebp
+ xorl 16(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%esi
+ xorl %edx,%ebp
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,8(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 12(%esp),%edi
+ addl %ebp,%esi
+
+ movl %eax,%ebp
+ xorl 20(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edi
+ xorl %ecx,%ebp
+ xorl (%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,12(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 16(%esp),%edx
+ addl %ebp,%edi
+
+ movl %esi,%ebp
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,16(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 20(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %edi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,20(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl 24(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edx,%ebp
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,24(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 28(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %ecx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,28(%esp)
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 32(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ebx,%ebp
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,32(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 36(%esp),%edi
+ addl %ebp,%esi
+
+ movl %eax,%ebp
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,36(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 40(%esp),%edx
+ addl %ebp,%edi
+
+ movl %esi,%ebp
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,40(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 44(%esp),%ecx
+ addl %ebp,%edx
+
+ movl %edi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,44(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl 48(%esp),%ebx
+ addl %ebp,%ecx
+
+ movl %edx,%ebp
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,48(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 52(%esp),%eax
+ addl %ebp,%ebx
+
+ movl %ecx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 56(%esp),%esi
+ addl %ebp,%eax
+
+ movl %ebx,%ebp
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 60(%esp),%edi
+ addl %ebp,%esi
+
+ movl %eax,%ebp
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ addl %ebp,%edi
+ movl 96(%esp),%ebp
+ movl 100(%esp),%edx
+ addl (%ebp),%edi
+ addl 4(%ebp),%esi
+ addl 8(%ebp),%eax
+ addl 12(%ebp),%ebx
+ addl 16(%ebp),%ecx
+ movl %edi,(%ebp)
+ addl $64,%edx
+ movl %esi,4(%ebp)
+ cmpl 104(%esp),%edx
+ movl %eax,8(%ebp)
+ movl %ecx,%edi
+ movl %ebx,12(%ebp)
+ movl %edx,%esi
+ movl %ecx,16(%ebp)
+ jb .L000loop
+ addl $76,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size sha1_block_data_order,.-.L_sha1_block_data_order_begin
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+.byte 102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
+.byte 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
+.byte 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/elf/sha1-ssse3-x86_64.s b/lib/accelerated/x86/elf/sha1-ssse3-x86_64.s
new file mode 100644
index 0000000000..149edef796
--- /dev/null
+++ b/lib/accelerated/x86/elf/sha1-ssse3-x86_64.s
@@ -0,0 +1,2515 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+.globl sha1_block_data_order
+.type sha1_block_data_order,@function
+.align 16
+sha1_block_data_order:
+ movl _gnutls_x86_cpuid_s+0(%rip),%r9d
+ movl _gnutls_x86_cpuid_s+4(%rip),%r8d
+ movl _gnutls_x86_cpuid_s+8(%rip),%r10d
+ testl $512,%r8d
+ jz .Lialu
+ jmp _ssse3_shortcut
+
+.align 16
+.Lialu:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ movq %rsp,%r11
+ movq %rdi,%r8
+ subq $72,%rsp
+ movq %rsi,%r9
+ andq $-64,%rsp
+ movq %rdx,%r10
+ movq %r11,64(%rsp)
+.Lprologue:
+
+ movl 0(%r8),%esi
+ movl 4(%r8),%edi
+ movl 8(%r8),%r11d
+ movl 12(%r8),%r12d
+ movl 16(%r8),%r13d
+ jmp .Lloop
+
+.align 16
+.Lloop:
+ movl 0(%r9),%edx
+ bswapl %edx
+ movl %edx,0(%rsp)
+ movl %r11d,%eax
+ movl 4(%r9),%ebp
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r13,1),%r13d
+ andl %edi,%eax
+ movl %ebp,4(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 8(%r9),%edx
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r12,1),%r12d
+ andl %esi,%eax
+ movl %edx,8(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 12(%r9),%ebp
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %ebp,12(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 16(%r9),%edx
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %edx,16(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 20(%r9),%ebp
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %ebp,20(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl %r11d,%eax
+ movl 24(%r9),%edx
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r13,1),%r13d
+ andl %edi,%eax
+ movl %edx,24(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 28(%r9),%ebp
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r12,1),%r12d
+ andl %esi,%eax
+ movl %ebp,28(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 32(%r9),%edx
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %edx,32(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 36(%r9),%ebp
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %ebp,36(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 40(%r9),%edx
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %edx,40(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl %r11d,%eax
+ movl 44(%r9),%ebp
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r13,1),%r13d
+ andl %edi,%eax
+ movl %ebp,44(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 48(%r9),%edx
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r12,1),%r12d
+ andl %esi,%eax
+ movl %edx,48(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 52(%r9),%ebp
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %ebp,52(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 56(%r9),%edx
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %edx,56(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 60(%r9),%ebp
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %ebp,60(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl 0(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 32(%rsp),%edx
+ andl %edi,%eax
+ leal 1518500249(%rbp,%r13,1),%r13d
+ xorl 52(%rsp),%edx
+ xorl %r12d,%eax
+ roll $1,%edx
+ addl %ecx,%r13d
+ roll $30,%edi
+ movl %edx,0(%rsp)
+ addl %eax,%r13d
+ movl 4(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 36(%rsp),%ebp
+ andl %esi,%eax
+ leal 1518500249(%rdx,%r12,1),%r12d
+ xorl 56(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $1,%ebp
+ addl %ecx,%r12d
+ roll $30,%esi
+ movl %ebp,4(%rsp)
+ addl %eax,%r12d
+ movl 8(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 40(%rsp),%edx
+ andl %r13d,%eax
+ leal 1518500249(%rbp,%r11,1),%r11d
+ xorl 60(%rsp),%edx
+ xorl %edi,%eax
+ roll $1,%edx
+ addl %ecx,%r11d
+ roll $30,%r13d
+ movl %edx,8(%rsp)
+ addl %eax,%r11d
+ movl 12(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 44(%rsp),%ebp
+ andl %r12d,%eax
+ leal 1518500249(%rdx,%rdi,1),%edi
+ xorl 0(%rsp),%ebp
+ xorl %esi,%eax
+ roll $1,%ebp
+ addl %ecx,%edi
+ roll $30,%r12d
+ movl %ebp,12(%rsp)
+ addl %eax,%edi
+ movl 16(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 48(%rsp),%edx
+ andl %r11d,%eax
+ leal 1518500249(%rbp,%rsi,1),%esi
+ xorl 4(%rsp),%edx
+ xorl %r13d,%eax
+ roll $1,%edx
+ addl %ecx,%esi
+ roll $30,%r11d
+ movl %edx,16(%rsp)
+ addl %eax,%esi
+ movl 20(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r13,1),%r13d
+ xorl 52(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 8(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r12,1),%r12d
+ xorl 56(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 12(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r11,1),%r11d
+ xorl 60(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 16(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rdi,1),%edi
+ xorl 0(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 20(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rsi,1),%esi
+ xorl 4(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 24(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,36(%rsp)
+ movl 40(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 48(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r13,1),%r13d
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 28(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,40(%rsp)
+ movl 44(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r12,1),%r12d
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 32(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,44(%rsp)
+ movl 48(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 56(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r11,1),%r11d
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 36(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,48(%rsp)
+ movl 52(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rdi,1),%edi
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 40(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,52(%rsp)
+ movl 56(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rsi,1),%esi
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 44(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,56(%rsp)
+ movl 60(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r13,1),%r13d
+ xorl 28(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 48(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,60(%rsp)
+ movl 0(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r12,1),%r12d
+ xorl 32(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 52(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,0(%rsp)
+ movl 4(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r11,1),%r11d
+ xorl 36(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 56(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,4(%rsp)
+ movl 8(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rdi,1),%edi
+ xorl 40(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 60(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,8(%rsp)
+ movl 12(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rsi,1),%esi
+ xorl 44(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 0(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,12(%rsp)
+ movl 16(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r13,1),%r13d
+ xorl 48(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 4(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,16(%rsp)
+ movl 20(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r12,1),%r12d
+ xorl 52(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 8(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r11,1),%r11d
+ xorl 56(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 12(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rdi,1),%edi
+ xorl 60(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 16(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rsi,1),%esi
+ xorl 0(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 20(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 44(%rsp),%ebp
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r12d,%ebx
+ leal -1894007588(%rdx,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 24(%rsp),%ebp
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %ebp,36(%rsp)
+ addl %ecx,%r13d
+ movl 40(%rsp),%edx
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 48(%rsp),%edx
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r11d,%ebx
+ leal -1894007588(%rbp,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 28(%rsp),%edx
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%edx
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %edx,40(%rsp)
+ addl %ecx,%r12d
+ movl 44(%rsp),%ebp
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 52(%rsp),%ebp
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %edi,%ebx
+ leal -1894007588(%rdx,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 32(%rsp),%ebp
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%ebp
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %ebp,44(%rsp)
+ addl %ecx,%r11d
+ movl 48(%rsp),%edx
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 56(%rsp),%edx
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %esi,%ebx
+ leal -1894007588(%rbp,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 36(%rsp),%edx
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%edx
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %edx,48(%rsp)
+ addl %ecx,%edi
+ movl 52(%rsp),%ebp
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 60(%rsp),%ebp
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %r13d,%ebx
+ leal -1894007588(%rdx,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 40(%rsp),%ebp
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%ebp
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %ebp,52(%rsp)
+ addl %ecx,%esi
+ movl 56(%rsp),%edx
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 0(%rsp),%edx
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r12d,%ebx
+ leal -1894007588(%rbp,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 44(%rsp),%edx
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%edx
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %edx,56(%rsp)
+ addl %ecx,%r13d
+ movl 60(%rsp),%ebp
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 4(%rsp),%ebp
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %r11d,%ebx
+ leal -1894007588(%rdx,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 48(%rsp),%ebp
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %ebp,60(%rsp)
+ addl %ecx,%r12d
+ movl 0(%rsp),%edx
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 8(%rsp),%edx
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %edi,%ebx
+ leal -1894007588(%rbp,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 52(%rsp),%edx
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%edx
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %edx,0(%rsp)
+ addl %ecx,%r11d
+ movl 4(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 12(%rsp),%ebp
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %esi,%ebx
+ leal -1894007588(%rdx,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 56(%rsp),%ebp
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%ebp
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %ebp,4(%rsp)
+ addl %ecx,%edi
+ movl 8(%rsp),%edx
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 16(%rsp),%edx
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r13d,%ebx
+ leal -1894007588(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 60(%rsp),%edx
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%edx
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %edx,8(%rsp)
+ addl %ecx,%esi
+ movl 12(%rsp),%ebp
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 20(%rsp),%ebp
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r12d,%ebx
+ leal -1894007588(%rdx,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 0(%rsp),%ebp
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %ebp,12(%rsp)
+ addl %ecx,%r13d
+ movl 16(%rsp),%edx
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 24(%rsp),%edx
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 48(%rsp),%edx
+ xorl %r11d,%ebx
+ leal -1894007588(%rbp,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 4(%rsp),%edx
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%edx
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %edx,16(%rsp)
+ addl %ecx,%r12d
+ movl 20(%rsp),%ebp
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 28(%rsp),%ebp
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %edi,%ebx
+ leal -1894007588(%rdx,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 8(%rsp),%ebp
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%ebp
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %ebp,20(%rsp)
+ addl %ecx,%r11d
+ movl 24(%rsp),%edx
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 32(%rsp),%edx
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 56(%rsp),%edx
+ xorl %esi,%ebx
+ leal -1894007588(%rbp,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 12(%rsp),%edx
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%edx
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %edx,24(%rsp)
+ addl %ecx,%edi
+ movl 28(%rsp),%ebp
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 36(%rsp),%ebp
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %r13d,%ebx
+ leal -1894007588(%rdx,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 16(%rsp),%ebp
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%ebp
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %ebp,28(%rsp)
+ addl %ecx,%esi
+ movl 32(%rsp),%edx
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 40(%rsp),%edx
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r12d,%ebx
+ leal -1894007588(%rbp,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 20(%rsp),%edx
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%edx
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %edx,32(%rsp)
+ addl %ecx,%r13d
+ movl 36(%rsp),%ebp
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 44(%rsp),%ebp
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r11d,%ebx
+ leal -1894007588(%rdx,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 24(%rsp),%ebp
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %ebp,36(%rsp)
+ addl %ecx,%r12d
+ movl 40(%rsp),%edx
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 48(%rsp),%edx
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %edi,%ebx
+ leal -1894007588(%rbp,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 28(%rsp),%edx
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%edx
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %edx,40(%rsp)
+ addl %ecx,%r11d
+ movl 44(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 52(%rsp),%ebp
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %esi,%ebx
+ leal -1894007588(%rdx,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 32(%rsp),%ebp
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%ebp
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %ebp,44(%rsp)
+ addl %ecx,%edi
+ movl 48(%rsp),%edx
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 56(%rsp),%edx
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 16(%rsp),%edx
+ xorl %r13d,%ebx
+ leal -1894007588(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 36(%rsp),%edx
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%edx
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %edx,48(%rsp)
+ addl %ecx,%esi
+ movl 52(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r13,1),%r13d
+ xorl 20(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 40(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,52(%rsp)
+ movl 56(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 0(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r12,1),%r12d
+ xorl 24(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 44(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,56(%rsp)
+ movl 60(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r11,1),%r11d
+ xorl 28(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 48(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,60(%rsp)
+ movl 0(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rdi,1),%edi
+ xorl 32(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 52(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,0(%rsp)
+ movl 4(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rsi,1),%esi
+ xorl 36(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 56(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,4(%rsp)
+ movl 8(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r13,1),%r13d
+ xorl 40(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 60(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,8(%rsp)
+ movl 12(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r12,1),%r12d
+ xorl 44(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 0(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,12(%rsp)
+ movl 16(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r11,1),%r11d
+ xorl 48(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 4(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,16(%rsp)
+ movl 20(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rdi,1),%edi
+ xorl 52(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 8(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 32(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rsi,1),%esi
+ xorl 56(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 12(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r13,1),%r13d
+ xorl 60(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 16(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 40(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r12,1),%r12d
+ xorl 0(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 20(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r11,1),%r11d
+ xorl 4(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 24(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,36(%rsp)
+ movl 40(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 48(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rdi,1),%edi
+ xorl 8(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 28(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,40(%rsp)
+ movl 44(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rsi,1),%esi
+ xorl 12(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 32(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,44(%rsp)
+ movl 48(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 56(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r13,1),%r13d
+ xorl 16(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 36(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,48(%rsp)
+ movl 52(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r12,1),%r12d
+ xorl 20(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 40(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl 56(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r11,1),%r11d
+ xorl 24(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 44(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl 60(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rdi,1),%edi
+ xorl 28(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 48(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl %r11d,%eax
+ leal -899497514(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ addl 0(%r8),%esi
+ addl 4(%r8),%edi
+ addl 8(%r8),%r11d
+ addl 12(%r8),%r12d
+ addl 16(%r8),%r13d
+ movl %esi,0(%r8)
+ movl %edi,4(%r8)
+ movl %r11d,8(%r8)
+ movl %r12d,12(%r8)
+ movl %r13d,16(%r8)
+
+ subq $1,%r10
+ leaq 64(%r9),%r9
+ jnz .Lloop
+
+ movq 64(%rsp),%rsi
+ movq (%rsi),%r13
+ movq 8(%rsi),%r12
+ movq 16(%rsi),%rbp
+ movq 24(%rsi),%rbx
+ leaq 32(%rsi),%rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.size sha1_block_data_order,.-sha1_block_data_order
+.type sha1_block_data_order_ssse3,@function
+.align 16
+sha1_block_data_order_ssse3:
+_ssse3_shortcut:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ leaq -64(%rsp),%rsp
+ movq %rdi,%r8
+ movq %rsi,%r9
+ movq %rdx,%r10
+
+ shlq $6,%r10
+ addq %r9,%r10
+ leaq K_XX_XX+64(%rip),%r11
+
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movl %ebx,%esi
+ movl 16(%r8),%ebp
+ movl %ecx,%edi
+ xorl %edx,%edi
+ andl %edi,%esi
+
+ movdqa 64(%r11),%xmm6
+ movdqa -64(%r11),%xmm9
+ movdqu 0(%r9),%xmm0
+ movdqu 16(%r9),%xmm1
+ movdqu 32(%r9),%xmm2
+ movdqu 48(%r9),%xmm3
+.byte 102,15,56,0,198
+ addq $64,%r9
+.byte 102,15,56,0,206
+.byte 102,15,56,0,214
+.byte 102,15,56,0,222
+ paddd %xmm9,%xmm0
+ paddd %xmm9,%xmm1
+ paddd %xmm9,%xmm2
+ movdqa %xmm0,0(%rsp)
+ psubd %xmm9,%xmm0
+ movdqa %xmm1,16(%rsp)
+ psubd %xmm9,%xmm1
+ movdqa %xmm2,32(%rsp)
+ psubd %xmm9,%xmm2
+ jmp .Loop_ssse3
+.align 16
+.Loop_ssse3:
+ movdqa %xmm1,%xmm4
+ rorl $2,%ebx
+ xorl %edx,%esi
+ movdqa %xmm3,%xmm8
+.byte 102,15,58,15,224,8
+ movl %eax,%edi
+ addl 0(%rsp),%ebp
+ paddd %xmm3,%xmm9
+ xorl %ecx,%ebx
+ roll $5,%eax
+ psrldq $4,%xmm8
+ addl %esi,%ebp
+ andl %ebx,%edi
+ pxor %xmm0,%xmm4
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ pxor %xmm2,%xmm8
+ rorl $7,%eax
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 4(%rsp),%edx
+ pxor %xmm8,%xmm4
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa %xmm9,48(%rsp)
+ addl %edi,%edx
+ andl %eax,%esi
+ movdqa %xmm4,%xmm10
+ movdqa %xmm4,%xmm8
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ xorl %ebx,%esi
+ pslldq $12,%xmm10
+ paddd %xmm4,%xmm4
+ movl %edx,%edi
+ addl 8(%rsp),%ecx
+ xorl %eax,%ebp
+ roll $5,%edx
+ psrld $31,%xmm8
+ addl %esi,%ecx
+ andl %ebp,%edi
+ movdqa %xmm10,%xmm9
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ psrld $30,%xmm10
+ por %xmm8,%xmm4
+ rorl $7,%edx
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 12(%rsp),%ebx
+ pslld $2,%xmm9
+ pxor %xmm10,%xmm4
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa -64(%r11),%xmm10
+ addl %edi,%ebx
+ andl %edx,%esi
+ pxor %xmm9,%xmm4
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ movdqa %xmm2,%xmm5
+ rorl $7,%ecx
+ xorl %ebp,%esi
+ movdqa %xmm4,%xmm9
+.byte 102,15,58,15,233,8
+ movl %ebx,%edi
+ addl 16(%rsp),%eax
+ paddd %xmm4,%xmm10
+ xorl %edx,%ecx
+ roll $5,%ebx
+ psrldq $4,%xmm9
+ addl %esi,%eax
+ andl %ecx,%edi
+ pxor %xmm1,%xmm5
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ pxor %xmm3,%xmm9
+ rorl $7,%ebx
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 20(%rsp),%ebp
+ pxor %xmm9,%xmm5
+ xorl %ecx,%ebx
+ roll $5,%eax
+ movdqa %xmm10,0(%rsp)
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movdqa %xmm5,%xmm8
+ movdqa %xmm5,%xmm9
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ rorl $7,%eax
+ xorl %ecx,%esi
+ pslldq $12,%xmm8
+ paddd %xmm5,%xmm5
+ movl %ebp,%edi
+ addl 24(%rsp),%edx
+ xorl %ebx,%eax
+ roll $5,%ebp
+ psrld $31,%xmm9
+ addl %esi,%edx
+ andl %eax,%edi
+ movdqa %xmm8,%xmm10
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ psrld $30,%xmm8
+ por %xmm9,%xmm5
+ rorl $7,%ebp
+ xorl %ebx,%edi
+ movl %edx,%esi
+ addl 28(%rsp),%ecx
+ pslld $2,%xmm10
+ pxor %xmm8,%xmm5
+ xorl %eax,%ebp
+ roll $5,%edx
+ movdqa -32(%r11),%xmm8
+ addl %edi,%ecx
+ andl %ebp,%esi
+ pxor %xmm10,%xmm5
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ movdqa %xmm3,%xmm6
+ rorl $7,%edx
+ xorl %eax,%esi
+ movdqa %xmm5,%xmm10
+.byte 102,15,58,15,242,8
+ movl %ecx,%edi
+ addl 32(%rsp),%ebx
+ paddd %xmm5,%xmm8
+ xorl %ebp,%edx
+ roll $5,%ecx
+ psrldq $4,%xmm10
+ addl %esi,%ebx
+ andl %edx,%edi
+ pxor %xmm2,%xmm6
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ pxor %xmm4,%xmm10
+ rorl $7,%ecx
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ addl 36(%rsp),%eax
+ pxor %xmm10,%xmm6
+ xorl %edx,%ecx
+ roll $5,%ebx
+ movdqa %xmm8,16(%rsp)
+ addl %edi,%eax
+ andl %ecx,%esi
+ movdqa %xmm6,%xmm9
+ movdqa %xmm6,%xmm10
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ rorl $7,%ebx
+ xorl %edx,%esi
+ pslldq $12,%xmm9
+ paddd %xmm6,%xmm6
+ movl %eax,%edi
+ addl 40(%rsp),%ebp
+ xorl %ecx,%ebx
+ roll $5,%eax
+ psrld $31,%xmm10
+ addl %esi,%ebp
+ andl %ebx,%edi
+ movdqa %xmm9,%xmm8
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ psrld $30,%xmm9
+ por %xmm10,%xmm6
+ rorl $7,%eax
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 44(%rsp),%edx
+ pslld $2,%xmm8
+ pxor %xmm9,%xmm6
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa -32(%r11),%xmm9
+ addl %edi,%edx
+ andl %eax,%esi
+ pxor %xmm8,%xmm6
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ movdqa %xmm4,%xmm7
+ rorl $7,%ebp
+ xorl %ebx,%esi
+ movdqa %xmm6,%xmm8
+.byte 102,15,58,15,251,8
+ movl %edx,%edi
+ addl 48(%rsp),%ecx
+ paddd %xmm6,%xmm9
+ xorl %eax,%ebp
+ roll $5,%edx
+ psrldq $4,%xmm8
+ addl %esi,%ecx
+ andl %ebp,%edi
+ pxor %xmm3,%xmm7
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ pxor %xmm5,%xmm8
+ rorl $7,%edx
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 52(%rsp),%ebx
+ pxor %xmm8,%xmm7
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa %xmm9,32(%rsp)
+ addl %edi,%ebx
+ andl %edx,%esi
+ movdqa %xmm7,%xmm10
+ movdqa %xmm7,%xmm8
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ xorl %ebp,%esi
+ pslldq $12,%xmm10
+ paddd %xmm7,%xmm7
+ movl %ebx,%edi
+ addl 56(%rsp),%eax
+ xorl %edx,%ecx
+ roll $5,%ebx
+ psrld $31,%xmm8
+ addl %esi,%eax
+ andl %ecx,%edi
+ movdqa %xmm10,%xmm9
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ psrld $30,%xmm10
+ por %xmm8,%xmm7
+ rorl $7,%ebx
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 60(%rsp),%ebp
+ pslld $2,%xmm9
+ pxor %xmm10,%xmm7
+ xorl %ecx,%ebx
+ roll $5,%eax
+ movdqa -32(%r11),%xmm10
+ addl %edi,%ebp
+ andl %ebx,%esi
+ pxor %xmm9,%xmm7
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ movdqa %xmm7,%xmm9
+ rorl $7,%eax
+ pxor %xmm4,%xmm0
+.byte 102,68,15,58,15,206,8
+ xorl %ecx,%esi
+ movl %ebp,%edi
+ addl 0(%rsp),%edx
+ pxor %xmm1,%xmm0
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa %xmm10,%xmm8
+ paddd %xmm7,%xmm10
+ addl %esi,%edx
+ andl %eax,%edi
+ pxor %xmm9,%xmm0
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ xorl %ebx,%edi
+ movdqa %xmm0,%xmm9
+ movdqa %xmm10,48(%rsp)
+ movl %edx,%esi
+ addl 4(%rsp),%ecx
+ xorl %eax,%ebp
+ roll $5,%edx
+ pslld $2,%xmm0
+ addl %edi,%ecx
+ andl %ebp,%esi
+ psrld $30,%xmm9
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ rorl $7,%edx
+ xorl %eax,%esi
+ movl %ecx,%edi
+ addl 8(%rsp),%ebx
+ por %xmm9,%xmm0
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa %xmm0,%xmm10
+ addl %esi,%ebx
+ andl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 12(%rsp),%eax
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 16(%rsp),%ebp
+ pxor %xmm5,%xmm1
+.byte 102,68,15,58,15,215,8
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm2,%xmm1
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm8,%xmm9
+ paddd %xmm0,%xmm8
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pxor %xmm10,%xmm1
+ addl 20(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm1,%xmm10
+ movdqa %xmm8,0(%rsp)
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ pslld $2,%xmm1
+ addl 24(%rsp),%ecx
+ xorl %eax,%esi
+ psrld $30,%xmm10
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ por %xmm10,%xmm1
+ addl 28(%rsp),%ebx
+ xorl %ebp,%edi
+ movdqa %xmm1,%xmm8
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 32(%rsp),%eax
+ pxor %xmm6,%xmm2
+.byte 102,68,15,58,15,192,8
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ pxor %xmm3,%xmm2
+ addl %esi,%eax
+ xorl %edx,%edi
+ movdqa 0(%r11),%xmm10
+ paddd %xmm1,%xmm9
+ rorl $7,%ecx
+ addl %ebx,%eax
+ pxor %xmm8,%xmm2
+ addl 36(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ movdqa %xmm2,%xmm8
+ movdqa %xmm9,16(%rsp)
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pslld $2,%xmm2
+ addl 40(%rsp),%edx
+ xorl %ebx,%esi
+ psrld $30,%xmm8
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ por %xmm8,%xmm2
+ addl 44(%rsp),%ecx
+ xorl %eax,%edi
+ movdqa %xmm2,%xmm9
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 48(%rsp),%ebx
+ pxor %xmm7,%xmm3
+.byte 102,68,15,58,15,201,8
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ pxor %xmm4,%xmm3
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ movdqa %xmm10,%xmm8
+ paddd %xmm2,%xmm10
+ rorl $7,%edx
+ addl %ecx,%ebx
+ pxor %xmm9,%xmm3
+ addl 52(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ movdqa %xmm3,%xmm9
+ movdqa %xmm10,32(%rsp)
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ pslld $2,%xmm3
+ addl 56(%rsp),%ebp
+ xorl %ecx,%esi
+ psrld $30,%xmm9
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ por %xmm9,%xmm3
+ addl 60(%rsp),%edx
+ xorl %ebx,%edi
+ movdqa %xmm3,%xmm10
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 0(%rsp),%ecx
+ pxor %xmm0,%xmm4
+.byte 102,68,15,58,15,210,8
+ xorl %eax,%esi
+ movl %edx,%edi
+ roll $5,%edx
+ pxor %xmm5,%xmm4
+ addl %esi,%ecx
+ xorl %eax,%edi
+ movdqa %xmm8,%xmm9
+ paddd %xmm3,%xmm8
+ rorl $7,%ebp
+ addl %edx,%ecx
+ pxor %xmm10,%xmm4
+ addl 4(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ movdqa %xmm4,%xmm10
+ movdqa %xmm8,48(%rsp)
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ pslld $2,%xmm4
+ addl 8(%rsp),%eax
+ xorl %edx,%esi
+ psrld $30,%xmm10
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ por %xmm10,%xmm4
+ addl 12(%rsp),%ebp
+ xorl %ecx,%edi
+ movdqa %xmm4,%xmm8
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 16(%rsp),%edx
+ pxor %xmm1,%xmm5
+.byte 102,68,15,58,15,195,8
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ pxor %xmm6,%xmm5
+ addl %esi,%edx
+ xorl %ebx,%edi
+ movdqa %xmm9,%xmm10
+ paddd %xmm4,%xmm9
+ rorl $7,%eax
+ addl %ebp,%edx
+ pxor %xmm8,%xmm5
+ addl 20(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ movdqa %xmm5,%xmm8
+ movdqa %xmm9,0(%rsp)
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ pslld $2,%xmm5
+ addl 24(%rsp),%ebx
+ xorl %ebp,%esi
+ psrld $30,%xmm8
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ por %xmm8,%xmm5
+ addl 28(%rsp),%eax
+ movdqa %xmm5,%xmm9
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %ecx,%esi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 32(%rsp),%ebp
+ pxor %xmm2,%xmm6
+.byte 102,68,15,58,15,204,8
+ andl %ecx,%esi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ pxor %xmm7,%xmm6
+ movl %eax,%edi
+ xorl %ecx,%esi
+ movdqa %xmm10,%xmm8
+ paddd %xmm5,%xmm10
+ roll $5,%eax
+ addl %esi,%ebp
+ pxor %xmm9,%xmm6
+ xorl %ebx,%edi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 36(%rsp),%edx
+ movdqa %xmm6,%xmm9
+ movdqa %xmm10,16(%rsp)
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movl %ebp,%esi
+ pslld $2,%xmm6
+ xorl %ebx,%edi
+ roll $5,%ebp
+ psrld $30,%xmm9
+ addl %edi,%edx
+ xorl %eax,%esi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 40(%rsp),%ecx
+ andl %eax,%esi
+ por %xmm9,%xmm6
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movdqa %xmm6,%xmm10
+ movl %edx,%edi
+ xorl %eax,%esi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %ebp,%edi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 44(%rsp),%ebx
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movl %ecx,%esi
+ xorl %ebp,%edi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %edx,%esi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 48(%rsp),%eax
+ pxor %xmm3,%xmm7
+.byte 102,68,15,58,15,213,8
+ andl %edx,%esi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ pxor %xmm0,%xmm7
+ movl %ebx,%edi
+ xorl %edx,%esi
+ movdqa 32(%r11),%xmm9
+ paddd %xmm6,%xmm8
+ roll $5,%ebx
+ addl %esi,%eax
+ pxor %xmm10,%xmm7
+ xorl %ecx,%edi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 52(%rsp),%ebp
+ movdqa %xmm7,%xmm10
+ movdqa %xmm8,32(%rsp)
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movl %eax,%esi
+ pslld $2,%xmm7
+ xorl %ecx,%edi
+ roll $5,%eax
+ psrld $30,%xmm10
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 56(%rsp),%edx
+ andl %ebx,%esi
+ por %xmm10,%xmm7
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movdqa %xmm7,%xmm8
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %eax,%edi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 60(%rsp),%ecx
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movl %edx,%esi
+ xorl %eax,%edi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 0(%rsp),%ebx
+ pxor %xmm4,%xmm0
+.byte 102,68,15,58,15,198,8
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ pxor %xmm1,%xmm0
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ movdqa %xmm9,%xmm10
+ paddd %xmm7,%xmm9
+ roll $5,%ecx
+ addl %esi,%ebx
+ pxor %xmm8,%xmm0
+ xorl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 4(%rsp),%eax
+ movdqa %xmm0,%xmm8
+ movdqa %xmm9,48(%rsp)
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movl %ebx,%esi
+ pslld $2,%xmm0
+ xorl %edx,%edi
+ roll $5,%ebx
+ psrld $30,%xmm8
+ addl %edi,%eax
+ xorl %ecx,%esi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 8(%rsp),%ebp
+ andl %ecx,%esi
+ por %xmm8,%xmm0
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movdqa %xmm0,%xmm9
+ movl %eax,%edi
+ xorl %ecx,%esi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ebx,%edi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 12(%rsp),%edx
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movl %ebp,%esi
+ xorl %ebx,%edi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %eax,%esi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 16(%rsp),%ecx
+ pxor %xmm5,%xmm1
+.byte 102,68,15,58,15,207,8
+ andl %eax,%esi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ pxor %xmm2,%xmm1
+ movl %edx,%edi
+ xorl %eax,%esi
+ movdqa %xmm10,%xmm8
+ paddd %xmm0,%xmm10
+ roll $5,%edx
+ addl %esi,%ecx
+ pxor %xmm9,%xmm1
+ xorl %ebp,%edi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 20(%rsp),%ebx
+ movdqa %xmm1,%xmm9
+ movdqa %xmm10,0(%rsp)
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movl %ecx,%esi
+ pslld $2,%xmm1
+ xorl %ebp,%edi
+ roll $5,%ecx
+ psrld $30,%xmm9
+ addl %edi,%ebx
+ xorl %edx,%esi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 24(%rsp),%eax
+ andl %edx,%esi
+ por %xmm9,%xmm1
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movdqa %xmm1,%xmm10
+ movl %ebx,%edi
+ xorl %edx,%esi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %ecx,%edi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 28(%rsp),%ebp
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movl %eax,%esi
+ xorl %ecx,%edi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 32(%rsp),%edx
+ pxor %xmm6,%xmm2
+.byte 102,68,15,58,15,208,8
+ andl %ebx,%esi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ pxor %xmm3,%xmm2
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ movdqa %xmm8,%xmm9
+ paddd %xmm1,%xmm8
+ roll $5,%ebp
+ addl %esi,%edx
+ pxor %xmm10,%xmm2
+ xorl %eax,%edi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 36(%rsp),%ecx
+ movdqa %xmm2,%xmm10
+ movdqa %xmm8,16(%rsp)
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movl %edx,%esi
+ pslld $2,%xmm2
+ xorl %eax,%edi
+ roll $5,%edx
+ psrld $30,%xmm10
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 40(%rsp),%ebx
+ andl %ebp,%esi
+ por %xmm10,%xmm2
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movdqa %xmm2,%xmm8
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 44(%rsp),%eax
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ addl %ebx,%eax
+ addl 48(%rsp),%ebp
+ pxor %xmm7,%xmm3
+.byte 102,68,15,58,15,193,8
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm4,%xmm3
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm9,%xmm10
+ paddd %xmm2,%xmm9
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pxor %xmm8,%xmm3
+ addl 52(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm3,%xmm8
+ movdqa %xmm9,32(%rsp)
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ pslld $2,%xmm3
+ addl 56(%rsp),%ecx
+ xorl %eax,%esi
+ psrld $30,%xmm8
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ por %xmm8,%xmm3
+ addl 60(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 0(%rsp),%eax
+ paddd %xmm3,%xmm10
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ movdqa %xmm10,48(%rsp)
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 4(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 8(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 12(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ cmpq %r10,%r9
+ je .Ldone_ssse3
+ movdqa 64(%r11),%xmm6
+ movdqa -64(%r11),%xmm9
+ movdqu 0(%r9),%xmm0
+ movdqu 16(%r9),%xmm1
+ movdqu 32(%r9),%xmm2
+ movdqu 48(%r9),%xmm3
+.byte 102,15,56,0,198
+ addq $64,%r9
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+.byte 102,15,56,0,206
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ paddd %xmm9,%xmm0
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ movdqa %xmm0,0(%rsp)
+ xorl %edx,%edi
+ movl %ebx,%esi
+ psubd %xmm9,%xmm0
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+.byte 102,15,56,0,214
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ paddd %xmm9,%xmm1
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ movdqa %xmm1,16(%rsp)
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ psubd %xmm9,%xmm1
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+.byte 102,15,56,0,222
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ paddd %xmm9,%xmm2
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ movdqa %xmm2,32(%rsp)
+ xorl %eax,%edi
+ movl %edx,%esi
+ psubd %xmm9,%xmm2
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 0(%r8),%eax
+ addl 4(%r8),%esi
+ addl 8(%r8),%ecx
+ addl 12(%r8),%edx
+ movl %eax,0(%r8)
+ addl 16(%r8),%ebp
+ movl %esi,4(%r8)
+ movl %esi,%ebx
+ movl %ecx,8(%r8)
+ movl %ecx,%edi
+ movl %edx,12(%r8)
+ xorl %edx,%edi
+ movl %ebp,16(%r8)
+ andl %edi,%esi
+ jmp .Loop_ssse3
+
+.align 16
+.Ldone_ssse3:
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 0(%r8),%eax
+ addl 4(%r8),%esi
+ addl 8(%r8),%ecx
+ movl %eax,0(%r8)
+ addl 12(%r8),%edx
+ movl %esi,4(%r8)
+ addl 16(%r8),%ebp
+ movl %ecx,8(%r8)
+ movl %edx,12(%r8)
+ movl %ebp,16(%r8)
+ leaq 64(%rsp),%rsi
+ movq 0(%rsi),%r12
+ movq 8(%rsi),%rbp
+ movq 16(%rsi),%rbx
+ leaq 24(%rsi),%rsp
+.Lepilogue_ssse3:
+ .byte 0xf3,0xc3
+.size sha1_block_data_order_ssse3,.-sha1_block_data_order_ssse3
+.align 64
+K_XX_XX:
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/elf/sha256-avx-x86_64.s b/lib/accelerated/x86/elf/sha256-avx-x86_64.s
new file mode 100644
index 0000000000..bb9236ea84
--- /dev/null
+++ b/lib/accelerated/x86/elf/sha256-avx-x86_64.s
@@ -0,0 +1,2614 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+
+.globl sha256_multi_block
+.type sha256_multi_block,@function
+.align 32
+sha256_multi_block:
+ movq %rsp,%rax
+ pushq %rbx
+ pushq %rbp
+ subq $288,%rsp
+ andq $-256,%rsp
+ movq %rax,272(%rsp)
+ leaq K256+128(%rip),%rbp
+ leaq 256(%rsp),%rbx
+ leaq 128(%rdi),%rdi
+
+.Loop_grande:
+ movl %edx,280(%rsp)
+ xorl %edx,%edx
+ movq 0(%rsi),%r8
+ movl 8(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,0(%rbx)
+ cmovleq %rbp,%r8
+ movq 16(%rsi),%r9
+ movl 24(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,4(%rbx)
+ cmovleq %rbp,%r9
+ movq 32(%rsi),%r10
+ movl 40(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,8(%rbx)
+ cmovleq %rbp,%r10
+ movq 48(%rsi),%r11
+ movl 56(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,12(%rbx)
+ cmovleq %rbp,%r11
+ testl %edx,%edx
+ jz .Ldone
+
+ movdqu 0-128(%rdi),%xmm8
+ leaq 128(%rsp),%rax
+ movdqu 32-128(%rdi),%xmm9
+ movdqu 64-128(%rdi),%xmm10
+ movdqu 96-128(%rdi),%xmm11
+ movdqu 128-128(%rdi),%xmm12
+ movdqu 160-128(%rdi),%xmm13
+ movdqu 192-128(%rdi),%xmm14
+ movdqu 224-128(%rdi),%xmm15
+ movdqu .Lpbswap(%rip),%xmm6
+ jmp .Loop
+
+.align 32
+.Loop:
+ movdqa %xmm10,%xmm4
+ pxor %xmm9,%xmm4
+ movd 0(%r8),%xmm5
+ movd 0(%r9),%xmm0
+ movd 0(%r10),%xmm1
+ movd 0(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,0-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movd 4(%r8),%xmm5
+ movd 4(%r9),%xmm0
+ movd 4(%r10),%xmm1
+ movd 4(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,16-128(%rax)
+ paddd %xmm14,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm5,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm14
+ paddd %xmm7,%xmm14
+ movd 8(%r8),%xmm5
+ movd 8(%r9),%xmm0
+ movd 8(%r10),%xmm1
+ movd 8(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,32-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movd 12(%r8),%xmm5
+ movd 12(%r9),%xmm0
+ movd 12(%r10),%xmm1
+ movd 12(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,48-128(%rax)
+ paddd %xmm12,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm5,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm12
+ paddd %xmm7,%xmm12
+ movd 16(%r8),%xmm5
+ movd 16(%r9),%xmm0
+ movd 16(%r10),%xmm1
+ movd 16(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,64-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movd 20(%r8),%xmm5
+ movd 20(%r9),%xmm0
+ movd 20(%r10),%xmm1
+ movd 20(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,80-128(%rax)
+ paddd %xmm10,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm5,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm10
+ paddd %xmm7,%xmm10
+ movd 24(%r8),%xmm5
+ movd 24(%r9),%xmm0
+ movd 24(%r10),%xmm1
+ movd 24(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,96-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movd 28(%r8),%xmm5
+ movd 28(%r9),%xmm0
+ movd 28(%r10),%xmm1
+ movd 28(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,112-128(%rax)
+ paddd %xmm8,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm5,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movd 32(%r8),%xmm5
+ movd 32(%r9),%xmm0
+ movd 32(%r10),%xmm1
+ movd 32(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,128-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movd 36(%r8),%xmm5
+ movd 36(%r9),%xmm0
+ movd 36(%r10),%xmm1
+ movd 36(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,144-128(%rax)
+ paddd %xmm14,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm5,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm14
+ paddd %xmm7,%xmm14
+ movd 40(%r8),%xmm5
+ movd 40(%r9),%xmm0
+ movd 40(%r10),%xmm1
+ movd 40(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,160-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movd 44(%r8),%xmm5
+ movd 44(%r9),%xmm0
+ movd 44(%r10),%xmm1
+ movd 44(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,176-128(%rax)
+ paddd %xmm12,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm5,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm12
+ paddd %xmm7,%xmm12
+ movd 48(%r8),%xmm5
+ movd 48(%r9),%xmm0
+ movd 48(%r10),%xmm1
+ movd 48(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,192-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movd 52(%r8),%xmm5
+ movd 52(%r9),%xmm0
+ movd 52(%r10),%xmm1
+ movd 52(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,208-128(%rax)
+ paddd %xmm10,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm5,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm10
+ paddd %xmm7,%xmm10
+ movd 56(%r8),%xmm5
+ movd 56(%r9),%xmm0
+ movd 56(%r10),%xmm1
+ movd 56(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,224-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movd 60(%r8),%xmm5
+ leaq 64(%r8),%r8
+ movd 60(%r9),%xmm0
+ leaq 64(%r9),%r9
+ movd 60(%r10),%xmm1
+ leaq 64(%r10),%r10
+ movd 60(%r11),%xmm2
+ leaq 64(%r11),%r11
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,240-128(%rax)
+ paddd %xmm8,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm5,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movdqu 0-128(%rax),%xmm5
+ movl $3,%ecx
+ jmp .Loop_16_xx
+.align 32
+.Loop_16_xx:
+ movdqa 16-128(%rax),%xmm6
+ paddd 144-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 224-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,0-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movdqa 32-128(%rax),%xmm5
+ paddd 160-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 240-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,16-128(%rax)
+ paddd %xmm14,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm6,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm14
+ paddd %xmm7,%xmm14
+ movdqa 48-128(%rax),%xmm6
+ paddd 176-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 0-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,32-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movdqa 64-128(%rax),%xmm5
+ paddd 192-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 16-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,48-128(%rax)
+ paddd %xmm12,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm6,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm12
+ paddd %xmm7,%xmm12
+ movdqa 80-128(%rax),%xmm6
+ paddd 208-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 32-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,64-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movdqa 96-128(%rax),%xmm5
+ paddd 224-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 48-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,80-128(%rax)
+ paddd %xmm10,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm6,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm10
+ paddd %xmm7,%xmm10
+ movdqa 112-128(%rax),%xmm6
+ paddd 240-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 64-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,96-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movdqa 128-128(%rax),%xmm5
+ paddd 0-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 80-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,112-128(%rax)
+ paddd %xmm8,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm6,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movdqa 144-128(%rax),%xmm6
+ paddd 16-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 96-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,128-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movdqa 160-128(%rax),%xmm5
+ paddd 32-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 112-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,144-128(%rax)
+ paddd %xmm14,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm6,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm14
+ paddd %xmm7,%xmm14
+ movdqa 176-128(%rax),%xmm6
+ paddd 48-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 128-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,160-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movdqa 192-128(%rax),%xmm5
+ paddd 64-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 144-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,176-128(%rax)
+ paddd %xmm12,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm6,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm12
+ paddd %xmm7,%xmm12
+ movdqa 208-128(%rax),%xmm6
+ paddd 80-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 160-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,192-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movdqa 224-128(%rax),%xmm5
+ paddd 96-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 176-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,208-128(%rax)
+ paddd %xmm10,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm6,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm10
+ paddd %xmm7,%xmm10
+ movdqa 240-128(%rax),%xmm6
+ paddd 112-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 192-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,224-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movdqa 0-128(%rax),%xmm5
+ paddd 128-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 208-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,240-128(%rax)
+ paddd %xmm8,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm6,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ decl %ecx
+ jnz .Loop_16_xx
+
+ movl $1,%ecx
+ leaq K256+128(%rip),%rbp
+
+ movdqa (%rbx),%xmm7
+ cmpl 0(%rbx),%ecx
+ pxor %xmm0,%xmm0
+ cmovgeq %rbp,%r8
+ cmpl 4(%rbx),%ecx
+ movdqa %xmm7,%xmm6
+ cmovgeq %rbp,%r9
+ cmpl 8(%rbx),%ecx
+ pcmpgtd %xmm0,%xmm6
+ cmovgeq %rbp,%r10
+ cmpl 12(%rbx),%ecx
+ paddd %xmm6,%xmm7
+ cmovgeq %rbp,%r11
+
+ movdqu 0-128(%rdi),%xmm0
+ pand %xmm6,%xmm8
+ movdqu 32-128(%rdi),%xmm1
+ pand %xmm6,%xmm9
+ movdqu 64-128(%rdi),%xmm2
+ pand %xmm6,%xmm10
+ movdqu 96-128(%rdi),%xmm5
+ pand %xmm6,%xmm11
+ paddd %xmm0,%xmm8
+ movdqu 128-128(%rdi),%xmm0
+ pand %xmm6,%xmm12
+ paddd %xmm1,%xmm9
+ movdqu 160-128(%rdi),%xmm1
+ pand %xmm6,%xmm13
+ paddd %xmm2,%xmm10
+ movdqu 192-128(%rdi),%xmm2
+ pand %xmm6,%xmm14
+ paddd %xmm5,%xmm11
+ movdqu 224-128(%rdi),%xmm5
+ pand %xmm6,%xmm15
+ paddd %xmm0,%xmm12
+ paddd %xmm1,%xmm13
+ movdqu %xmm8,0-128(%rdi)
+ paddd %xmm2,%xmm14
+ movdqu %xmm9,32-128(%rdi)
+ paddd %xmm5,%xmm15
+ movdqu %xmm10,64-128(%rdi)
+ movdqu %xmm11,96-128(%rdi)
+ movdqu %xmm12,128-128(%rdi)
+ movdqu %xmm13,160-128(%rdi)
+ movdqu %xmm14,192-128(%rdi)
+ movdqu %xmm15,224-128(%rdi)
+
+ movdqa %xmm7,(%rbx)
+ movdqa .Lpbswap(%rip),%xmm6
+ decl %edx
+ jnz .Loop
+
+ movl 280(%rsp),%edx
+ leaq 16(%rdi),%rdi
+ leaq 64(%rsi),%rsi
+ decl %edx
+ jnz .Loop_grande
+
+.Ldone:
+ movq 272(%rsp),%rax
+ movq -16(%rax),%rbp
+ movq -8(%rax),%rbx
+ leaq (%rax),%rsp
+ .byte 0xf3,0xc3
+.size sha256_multi_block,.-sha256_multi_block
+.align 256
+K256:
+.long 1116352408,1116352408,1116352408,1116352408
+.long 1116352408,1116352408,1116352408,1116352408
+.long 1899447441,1899447441,1899447441,1899447441
+.long 1899447441,1899447441,1899447441,1899447441
+.long 3049323471,3049323471,3049323471,3049323471
+.long 3049323471,3049323471,3049323471,3049323471
+.long 3921009573,3921009573,3921009573,3921009573
+.long 3921009573,3921009573,3921009573,3921009573
+.long 961987163,961987163,961987163,961987163
+.long 961987163,961987163,961987163,961987163
+.long 1508970993,1508970993,1508970993,1508970993
+.long 1508970993,1508970993,1508970993,1508970993
+.long 2453635748,2453635748,2453635748,2453635748
+.long 2453635748,2453635748,2453635748,2453635748
+.long 2870763221,2870763221,2870763221,2870763221
+.long 2870763221,2870763221,2870763221,2870763221
+.long 3624381080,3624381080,3624381080,3624381080
+.long 3624381080,3624381080,3624381080,3624381080
+.long 310598401,310598401,310598401,310598401
+.long 310598401,310598401,310598401,310598401
+.long 607225278,607225278,607225278,607225278
+.long 607225278,607225278,607225278,607225278
+.long 1426881987,1426881987,1426881987,1426881987
+.long 1426881987,1426881987,1426881987,1426881987
+.long 1925078388,1925078388,1925078388,1925078388
+.long 1925078388,1925078388,1925078388,1925078388
+.long 2162078206,2162078206,2162078206,2162078206
+.long 2162078206,2162078206,2162078206,2162078206
+.long 2614888103,2614888103,2614888103,2614888103
+.long 2614888103,2614888103,2614888103,2614888103
+.long 3248222580,3248222580,3248222580,3248222580
+.long 3248222580,3248222580,3248222580,3248222580
+.long 3835390401,3835390401,3835390401,3835390401
+.long 3835390401,3835390401,3835390401,3835390401
+.long 4022224774,4022224774,4022224774,4022224774
+.long 4022224774,4022224774,4022224774,4022224774
+.long 264347078,264347078,264347078,264347078
+.long 264347078,264347078,264347078,264347078
+.long 604807628,604807628,604807628,604807628
+.long 604807628,604807628,604807628,604807628
+.long 770255983,770255983,770255983,770255983
+.long 770255983,770255983,770255983,770255983
+.long 1249150122,1249150122,1249150122,1249150122
+.long 1249150122,1249150122,1249150122,1249150122
+.long 1555081692,1555081692,1555081692,1555081692
+.long 1555081692,1555081692,1555081692,1555081692
+.long 1996064986,1996064986,1996064986,1996064986
+.long 1996064986,1996064986,1996064986,1996064986
+.long 2554220882,2554220882,2554220882,2554220882
+.long 2554220882,2554220882,2554220882,2554220882
+.long 2821834349,2821834349,2821834349,2821834349
+.long 2821834349,2821834349,2821834349,2821834349
+.long 2952996808,2952996808,2952996808,2952996808
+.long 2952996808,2952996808,2952996808,2952996808
+.long 3210313671,3210313671,3210313671,3210313671
+.long 3210313671,3210313671,3210313671,3210313671
+.long 3336571891,3336571891,3336571891,3336571891
+.long 3336571891,3336571891,3336571891,3336571891
+.long 3584528711,3584528711,3584528711,3584528711
+.long 3584528711,3584528711,3584528711,3584528711
+.long 113926993,113926993,113926993,113926993
+.long 113926993,113926993,113926993,113926993
+.long 338241895,338241895,338241895,338241895
+.long 338241895,338241895,338241895,338241895
+.long 666307205,666307205,666307205,666307205
+.long 666307205,666307205,666307205,666307205
+.long 773529912,773529912,773529912,773529912
+.long 773529912,773529912,773529912,773529912
+.long 1294757372,1294757372,1294757372,1294757372
+.long 1294757372,1294757372,1294757372,1294757372
+.long 1396182291,1396182291,1396182291,1396182291
+.long 1396182291,1396182291,1396182291,1396182291
+.long 1695183700,1695183700,1695183700,1695183700
+.long 1695183700,1695183700,1695183700,1695183700
+.long 1986661051,1986661051,1986661051,1986661051
+.long 1986661051,1986661051,1986661051,1986661051
+.long 2177026350,2177026350,2177026350,2177026350
+.long 2177026350,2177026350,2177026350,2177026350
+.long 2456956037,2456956037,2456956037,2456956037
+.long 2456956037,2456956037,2456956037,2456956037
+.long 2730485921,2730485921,2730485921,2730485921
+.long 2730485921,2730485921,2730485921,2730485921
+.long 2820302411,2820302411,2820302411,2820302411
+.long 2820302411,2820302411,2820302411,2820302411
+.long 3259730800,3259730800,3259730800,3259730800
+.long 3259730800,3259730800,3259730800,3259730800
+.long 3345764771,3345764771,3345764771,3345764771
+.long 3345764771,3345764771,3345764771,3345764771
+.long 3516065817,3516065817,3516065817,3516065817
+.long 3516065817,3516065817,3516065817,3516065817
+.long 3600352804,3600352804,3600352804,3600352804
+.long 3600352804,3600352804,3600352804,3600352804
+.long 4094571909,4094571909,4094571909,4094571909
+.long 4094571909,4094571909,4094571909,4094571909
+.long 275423344,275423344,275423344,275423344
+.long 275423344,275423344,275423344,275423344
+.long 430227734,430227734,430227734,430227734
+.long 430227734,430227734,430227734,430227734
+.long 506948616,506948616,506948616,506948616
+.long 506948616,506948616,506948616,506948616
+.long 659060556,659060556,659060556,659060556
+.long 659060556,659060556,659060556,659060556
+.long 883997877,883997877,883997877,883997877
+.long 883997877,883997877,883997877,883997877
+.long 958139571,958139571,958139571,958139571
+.long 958139571,958139571,958139571,958139571
+.long 1322822218,1322822218,1322822218,1322822218
+.long 1322822218,1322822218,1322822218,1322822218
+.long 1537002063,1537002063,1537002063,1537002063
+.long 1537002063,1537002063,1537002063,1537002063
+.long 1747873779,1747873779,1747873779,1747873779
+.long 1747873779,1747873779,1747873779,1747873779
+.long 1955562222,1955562222,1955562222,1955562222
+.long 1955562222,1955562222,1955562222,1955562222
+.long 2024104815,2024104815,2024104815,2024104815
+.long 2024104815,2024104815,2024104815,2024104815
+.long 2227730452,2227730452,2227730452,2227730452
+.long 2227730452,2227730452,2227730452,2227730452
+.long 2361852424,2361852424,2361852424,2361852424
+.long 2361852424,2361852424,2361852424,2361852424
+.long 2428436474,2428436474,2428436474,2428436474
+.long 2428436474,2428436474,2428436474,2428436474
+.long 2756734187,2756734187,2756734187,2756734187
+.long 2756734187,2756734187,2756734187,2756734187
+.long 3204031479,3204031479,3204031479,3204031479
+.long 3204031479,3204031479,3204031479,3204031479
+.long 3329325298,3329325298,3329325298,3329325298
+.long 3329325298,3329325298,3329325298,3329325298
+.Lpbswap:
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/elf/sha256-ssse3-x86.s b/lib/accelerated/x86/elf/sha256-ssse3-x86.s
new file mode 100644
index 0000000000..81470f510d
--- /dev/null
+++ b/lib/accelerated/x86/elf/sha256-ssse3-x86.s
@@ -0,0 +1,3403 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha512-586.s"
+.text
+.globl sha256_block_data_order
+.type sha256_block_data_order,@function
+.align 16
+sha256_block_data_order:
+.L_sha256_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal .L001K256-.L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $6,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ leal _gnutls_x86_cpuid_s-.L001K256(%ebp),%edx
+ movl (%edx),%ecx
+ movl 4(%edx),%ebx
+ testl $1048576,%ecx
+ jnz .L002loop
+ andl $1073741824,%ecx
+ andl $268435968,%ebx
+ orl %ebx,%ecx
+ andl $1342177280,%ecx
+ cmpl $1342177280,%ecx
+ je .L003loop_shrd
+ subl %edi,%eax
+ cmpl $256,%eax
+ jae .L004unrolled
+ jmp .L002loop
+.align 16
+.L002loop:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ bswap %eax
+ movl 12(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ bswap %eax
+ movl 28(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %eax
+ movl 44(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ bswap %eax
+ movl 60(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ addl $64,%edi
+ leal -36(%esp),%esp
+ movl %edi,104(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edi
+ movl %ebx,8(%esp)
+ xorl %ecx,%ebx
+ movl %ecx,12(%esp)
+ movl %edi,16(%esp)
+ movl %ebx,(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ movl %edi,32(%esp)
+.align 16
+.L00500_15:
+ movl %edx,%ecx
+ movl 24(%esp),%esi
+ rorl $14,%ecx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl 96(%esp),%ebx
+ rorl $5,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ rorl $9,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ rorl $11,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ rorl $2,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3248222580,%esi
+ jne .L00500_15
+ movl 156(%esp),%ecx
+ jmp .L00616_63
+.align 16
+.L00616_63:
+ movl %ecx,%ebx
+ movl 104(%esp),%esi
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 160(%esp),%ebx
+ shrl $10,%edi
+ addl 124(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 24(%esp),%esi
+ rorl $14,%ecx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl %ebx,96(%esp)
+ rorl $5,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ rorl $9,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ rorl $11,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ rorl $2,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ movl 156(%esp),%ecx
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3329325298,%esi
+ jne .L00616_63
+ movl 356(%esp),%esi
+ movl 8(%esp),%ebx
+ movl 16(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebx
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ movl 32(%esp),%ecx
+ movl 360(%esp),%edi
+ addl 16(%esi),%edx
+ addl 20(%esi),%eax
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %eax,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ leal 356(%esp),%esp
+ subl $256,%ebp
+ cmpl 8(%esp),%edi
+ jb .L002loop
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 32
+.L003loop_shrd:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ bswap %eax
+ movl 12(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ bswap %eax
+ movl 28(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %eax
+ movl 44(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ bswap %eax
+ movl 60(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ addl $64,%edi
+ leal -36(%esp),%esp
+ movl %edi,104(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edi
+ movl %ebx,8(%esp)
+ xorl %ecx,%ebx
+ movl %ecx,12(%esp)
+ movl %edi,16(%esp)
+ movl %ebx,(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ movl %edi,32(%esp)
+.align 16
+.L00700_15_shrd:
+ movl %edx,%ecx
+ movl 24(%esp),%esi
+ shrdl $14,%ecx,%ecx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl 96(%esp),%ebx
+ shrdl $5,%ecx,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ shrdl $6,%edx,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ shrdl $9,%ecx,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ shrdl $11,%ecx,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ shrdl $2,%ecx,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3248222580,%esi
+ jne .L00700_15_shrd
+ movl 156(%esp),%ecx
+ jmp .L00816_63_shrd
+.align 16
+.L00816_63_shrd:
+ movl %ecx,%ebx
+ movl 104(%esp),%esi
+ shrdl $11,%ecx,%ecx
+ movl %esi,%edi
+ shrdl $2,%esi,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ shrdl $7,%ecx,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ shrdl $17,%esi,%esi
+ addl 160(%esp),%ebx
+ shrl $10,%edi
+ addl 124(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 24(%esp),%esi
+ shrdl $14,%ecx,%ecx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl %ebx,96(%esp)
+ shrdl $5,%ecx,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ shrdl $6,%edx,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ shrdl $9,%ecx,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ shrdl $11,%ecx,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ shrdl $2,%ecx,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ movl 156(%esp),%ecx
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3329325298,%esi
+ jne .L00816_63_shrd
+ movl 356(%esp),%esi
+ movl 8(%esp),%ebx
+ movl 16(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebx
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ movl 32(%esp),%ecx
+ movl 360(%esp),%edi
+ addl 16(%esi),%edx
+ addl 20(%esi),%eax
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %eax,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ leal 356(%esp),%esp
+ subl $256,%ebp
+ cmpl 8(%esp),%edi
+ jb .L003loop_shrd
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L001K256:
+.long 1116352408,1899447441,3049323471,3921009573,961987163,1508970993,2453635748,2870763221,3624381080,310598401,607225278,1426881987,1925078388,2162078206,2614888103,3248222580,3835390401,4022224774,264347078,604807628,770255983,1249150122,1555081692,1996064986,2554220882,2821834349,2952996808,3210313671,3336571891,3584528711,113926993,338241895,666307205,773529912,1294757372,1396182291,1695183700,1986661051,2177026350,2456956037,2730485921,2820302411,3259730800,3345764771,3516065817,3600352804,4094571909,275423344,430227734,506948616,659060556,883997877,958139571,1322822218,1537002063,1747873779,1955562222,2024104815,2227730452,2361852424,2428436474,2756734187,3204031479,3329325298
+.long 66051,67438087,134810123,202182159
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
+.align 16
+.L004unrolled:
+ leal -96(%esp),%esp
+ movl (%esi),%eax
+ movl 4(%esi),%ebp
+ movl 8(%esi),%ecx
+ movl 12(%esi),%ebx
+ movl %ebp,4(%esp)
+ xorl %ecx,%ebp
+ movl %ecx,8(%esp)
+ movl %ebx,12(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%esi
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %esi,28(%esp)
+ jmp .L009grand_loop
+.align 16
+.L009grand_loop:
+ movl (%edi),%ebx
+ movl 4(%edi),%ecx
+ bswap %ebx
+ movl 8(%edi),%esi
+ bswap %ecx
+ movl %ebx,32(%esp)
+ bswap %esi
+ movl %ecx,36(%esp)
+ movl %esi,40(%esp)
+ movl 12(%edi),%ebx
+ movl 16(%edi),%ecx
+ bswap %ebx
+ movl 20(%edi),%esi
+ bswap %ecx
+ movl %ebx,44(%esp)
+ bswap %esi
+ movl %ecx,48(%esp)
+ movl %esi,52(%esp)
+ movl 24(%edi),%ebx
+ movl 28(%edi),%ecx
+ bswap %ebx
+ movl 32(%edi),%esi
+ bswap %ecx
+ movl %ebx,56(%esp)
+ bswap %esi
+ movl %ecx,60(%esp)
+ movl %esi,64(%esp)
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %ebx
+ movl 44(%edi),%esi
+ bswap %ecx
+ movl %ebx,68(%esp)
+ bswap %esi
+ movl %ecx,72(%esp)
+ movl %esi,76(%esp)
+ movl 48(%edi),%ebx
+ movl 52(%edi),%ecx
+ bswap %ebx
+ movl 56(%edi),%esi
+ bswap %ecx
+ movl %ebx,80(%esp)
+ bswap %esi
+ movl %ecx,84(%esp)
+ movl %esi,88(%esp)
+ movl 60(%edi),%ebx
+ addl $64,%edi
+ bswap %ebx
+ movl %edi,100(%esp)
+ movl %ebx,92(%esp)
+ movl %edx,%ecx
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1116352408(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl 36(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1899447441(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl 40(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3049323471(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl 44(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3921009573(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl 48(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 961987163(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl 52(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1508970993(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl 56(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2453635748(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl 60(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2870763221(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl 64(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3624381080(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl 68(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 310598401(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl 72(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 607225278(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl 76(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1426881987(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl 80(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1925078388(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl 84(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2162078206(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl 88(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2614888103(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl 92(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3248222580(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3835390401(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 4022224774(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 264347078(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 604807628(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 770255983(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1249150122(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1555081692(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1996064986(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2554220882(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2821834349(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2952996808(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3210313671(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3336571891(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3584528711(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,88(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 113926993(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,92(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 338241895(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 666307205(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 773529912(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1294757372(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1396182291(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1695183700(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1986661051(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2177026350(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2456956037(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2730485921(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2820302411(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3259730800(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3345764771(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3516065817(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3600352804(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,88(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 4094571909(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,92(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 275423344(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 430227734(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 506948616(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 659060556(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 883997877(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 958139571(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1322822218(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1537002063(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1747873779(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1955562222(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2024104815(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2227730452(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2361852424(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2428436474(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2756734187(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3204031479(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3329325298(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 96(%esp),%esi
+ xorl %edi,%ebp
+ movl 12(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebp
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebp,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ movl %edi,8(%esp)
+ movl %ecx,12(%esp)
+ movl 20(%esp),%edi
+ movl 24(%esp),%ebx
+ movl 28(%esp),%ecx
+ addl 16(%esi),%edx
+ addl 20(%esi),%edi
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %edi,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ movl %edi,20(%esp)
+ movl 100(%esp),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ cmpl 104(%esp),%edi
+ jb .L009grand_loop
+ movl 108(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size sha256_block_data_order,.-.L_sha256_block_data_order_begin
+.comm _gnutls_x86_cpuid_s,16,4
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/elf/sha512-ssse3-x86.s b/lib/accelerated/x86/elf/sha512-ssse3-x86.s
new file mode 100644
index 0000000000..088a0fa54a
--- /dev/null
+++ b/lib/accelerated/x86/elf/sha512-ssse3-x86.s
@@ -0,0 +1,606 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha512-586.s"
+.text
+.globl sha512_block_data_order
+.type sha512_block_data_order,@function
+.align 16
+sha512_block_data_order:
+.L_sha512_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal .L001K512-.L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $7,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+.align 16
+.L002loop_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ movl 28(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ movl 44(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ movl 60(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 64(%edi),%eax
+ movl 68(%edi),%ebx
+ movl 72(%edi),%ecx
+ movl 76(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 80(%edi),%eax
+ movl 84(%edi),%ebx
+ movl 88(%edi),%ecx
+ movl 92(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 96(%edi),%eax
+ movl 100(%edi),%ebx
+ movl 104(%edi),%ecx
+ movl 108(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 112(%edi),%eax
+ movl 116(%edi),%ebx
+ movl 120(%edi),%ecx
+ movl 124(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ addl $128,%edi
+ subl $72,%esp
+ movl %edi,204(%esp)
+ leal 8(%esp),%edi
+ movl $16,%ecx
+.long 2784229001
+.align 16
+.L00300_15_x86:
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $148,%dl
+ jne .L00300_15_x86
+.align 16
+.L00416_79_x86:
+ movl 312(%esp),%ecx
+ movl 316(%esp),%edx
+ movl %ecx,%esi
+ shrl $1,%ecx
+ movl %edx,%edi
+ shrl $1,%edx
+ movl %ecx,%eax
+ shll $24,%esi
+ movl %edx,%ebx
+ shll $24,%edi
+ xorl %esi,%ebx
+ shrl $6,%ecx
+ xorl %edi,%eax
+ shrl $6,%edx
+ xorl %ecx,%eax
+ shll $7,%esi
+ xorl %edx,%ebx
+ shll $1,%edi
+ xorl %esi,%ebx
+ shrl $1,%ecx
+ xorl %edi,%eax
+ shrl $1,%edx
+ xorl %ecx,%eax
+ shll $6,%edi
+ xorl %edx,%ebx
+ xorl %edi,%eax
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl 208(%esp),%ecx
+ movl 212(%esp),%edx
+ movl %ecx,%esi
+ shrl $6,%ecx
+ movl %edx,%edi
+ shrl $6,%edx
+ movl %ecx,%eax
+ shll $3,%esi
+ movl %edx,%ebx
+ shll $3,%edi
+ xorl %esi,%eax
+ shrl $13,%ecx
+ xorl %edi,%ebx
+ shrl $13,%edx
+ xorl %ecx,%eax
+ shll $10,%esi
+ xorl %edx,%ebx
+ shll $10,%edi
+ xorl %esi,%ebx
+ shrl $10,%ecx
+ xorl %edi,%eax
+ shrl $10,%edx
+ xorl %ecx,%ebx
+ shll $13,%edi
+ xorl %edx,%eax
+ xorl %edi,%eax
+ movl 320(%esp),%ecx
+ movl 324(%esp),%edx
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ movl 248(%esp),%esi
+ movl 252(%esp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,192(%esp)
+ movl %ebx,196(%esp)
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $23,%dl
+ jne .L00416_79_x86
+ movl 840(%esp),%esi
+ movl 844(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ addl 8(%esp),%eax
+ adcl 12(%esp),%ebx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ addl 16(%esp),%ecx
+ adcl 20(%esp),%edx
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ addl 24(%esp),%eax
+ adcl 28(%esp),%ebx
+ movl %eax,16(%esi)
+ movl %ebx,20(%esi)
+ addl 32(%esp),%ecx
+ adcl 36(%esp),%edx
+ movl %ecx,24(%esi)
+ movl %edx,28(%esi)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ addl 40(%esp),%eax
+ adcl 44(%esp),%ebx
+ movl %eax,32(%esi)
+ movl %ebx,36(%esi)
+ addl 48(%esp),%ecx
+ adcl 52(%esp),%edx
+ movl %ecx,40(%esi)
+ movl %edx,44(%esi)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ addl 56(%esp),%eax
+ adcl 60(%esp),%ebx
+ movl %eax,48(%esi)
+ movl %ebx,52(%esi)
+ addl 64(%esp),%ecx
+ adcl 68(%esp),%edx
+ movl %ecx,56(%esi)
+ movl %edx,60(%esi)
+ addl $840,%esp
+ subl $640,%ebp
+ cmpl 8(%esp),%edi
+ jb .L002loop_x86
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L001K512:
+.long 3609767458,1116352408
+.long 602891725,1899447441
+.long 3964484399,3049323471
+.long 2173295548,3921009573
+.long 4081628472,961987163
+.long 3053834265,1508970993
+.long 2937671579,2453635748
+.long 3664609560,2870763221
+.long 2734883394,3624381080
+.long 1164996542,310598401
+.long 1323610764,607225278
+.long 3590304994,1426881987
+.long 4068182383,1925078388
+.long 991336113,2162078206
+.long 633803317,2614888103
+.long 3479774868,3248222580
+.long 2666613458,3835390401
+.long 944711139,4022224774
+.long 2341262773,264347078
+.long 2007800933,604807628
+.long 1495990901,770255983
+.long 1856431235,1249150122
+.long 3175218132,1555081692
+.long 2198950837,1996064986
+.long 3999719339,2554220882
+.long 766784016,2821834349
+.long 2566594879,2952996808
+.long 3203337956,3210313671
+.long 1034457026,3336571891
+.long 2466948901,3584528711
+.long 3758326383,113926993
+.long 168717936,338241895
+.long 1188179964,666307205
+.long 1546045734,773529912
+.long 1522805485,1294757372
+.long 2643833823,1396182291
+.long 2343527390,1695183700
+.long 1014477480,1986661051
+.long 1206759142,2177026350
+.long 344077627,2456956037
+.long 1290863460,2730485921
+.long 3158454273,2820302411
+.long 3505952657,3259730800
+.long 106217008,3345764771
+.long 3606008344,3516065817
+.long 1432725776,3600352804
+.long 1467031594,4094571909
+.long 851169720,275423344
+.long 3100823752,430227734
+.long 1363258195,506948616
+.long 3750685593,659060556
+.long 3785050280,883997877
+.long 3318307427,958139571
+.long 3812723403,1322822218
+.long 2003034995,1537002063
+.long 3602036899,1747873779
+.long 1575990012,1955562222
+.long 1125592928,2024104815
+.long 2716904306,2227730452
+.long 442776044,2361852424
+.long 593698344,2428436474
+.long 3733110249,2756734187
+.long 2999351573,3204031479
+.long 3815920427,3329325298
+.long 3928383900,3391569614
+.long 566280711,3515267271
+.long 3454069534,3940187606
+.long 4000239992,4118630271
+.long 1914138554,116418474
+.long 2731055270,174292421
+.long 3203993006,289380356
+.long 320620315,460393269
+.long 587496836,685471733
+.long 1086792851,852142971
+.long 365543100,1017036298
+.long 2618297676,1126000580
+.long 3409855158,1288033470
+.long 4234509866,1501505948
+.long 987167468,1607167915
+.long 1246189591,1816402316
+.long 67438087,66051
+.long 202182159,134810123
+.size sha512_block_data_order,.-.L_sha512_block_data_order_begin
+.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/elf/sha512-ssse3-x86_64.s b/lib/accelerated/x86/elf/sha512-ssse3-x86_64.s
new file mode 100644
index 0000000000..ea1915ded0
--- /dev/null
+++ b/lib/accelerated/x86/elf/sha512-ssse3-x86_64.s
@@ -0,0 +1,2881 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+.globl sha256_block_data_order
+.type sha256_block_data_order,@function
+.align 16
+sha256_block_data_order:
+ leaq _gnutls_x86_cpuid_s(%rip),%r11
+ movl 0(%r11),%r9d
+ movl 4(%r11),%r10d
+ movl 8(%r11),%r11d
+ testl $512,%r10d
+ jnz .Lssse3_shortcut
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp,%r11
+ shlq $4,%rdx
+ subq $64+32,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+.Lprologue:
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+ jmp .Lloop
+
+.align 16
+.Lloop:
+ movl %ebx,%edi
+ leaq K256(%rip),%rbp
+ xorl %ecx,%edi
+ movl 0(%rsi),%r12d
+ movl %r8d,%r13d
+ movl %eax,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,0(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r11d
+ movl 4(%rsi),%r12d
+ movl %edx,%r13d
+ movl %r11d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,4(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r10d
+ movl 8(%rsi),%r12d
+ movl %ecx,%r13d
+ movl %r10d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,8(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r9d
+ movl 12(%rsi),%r12d
+ movl %ebx,%r13d
+ movl %r9d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,12(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%r8d
+ movl 16(%rsi),%r12d
+ movl %eax,%r13d
+ movl %r8d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,16(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%edx
+ movl 20(%rsi),%r12d
+ movl %r11d,%r13d
+ movl %edx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,20(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ecx
+ movl 24(%rsi),%r12d
+ movl %r10d,%r13d
+ movl %ecx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,24(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ebx
+ movl 28(%rsi),%r12d
+ movl %r9d,%r13d
+ movl %ebx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,28(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%eax
+ movl 32(%rsi),%r12d
+ movl %r8d,%r13d
+ movl %eax,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,32(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r11d
+ movl 36(%rsi),%r12d
+ movl %edx,%r13d
+ movl %r11d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,36(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r10d
+ movl 40(%rsi),%r12d
+ movl %ecx,%r13d
+ movl %r10d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,40(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r9d
+ movl 44(%rsi),%r12d
+ movl %ebx,%r13d
+ movl %r9d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,44(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%r8d
+ movl 48(%rsi),%r12d
+ movl %eax,%r13d
+ movl %r8d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,48(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%edx
+ movl 52(%rsi),%r12d
+ movl %r11d,%r13d
+ movl %edx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,52(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ecx
+ movl 56(%rsi),%r12d
+ movl %r10d,%r13d
+ movl %ecx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,56(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ebx
+ movl 60(%rsi),%r12d
+ movl %r9d,%r13d
+ movl %ebx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,60(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ jmp .Lrounds_16_xx
+.align 16
+.Lrounds_16_xx:
+ movl 4(%rsp),%r13d
+ movl 56(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%eax
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 36(%rsp),%r12d
+
+ addl 0(%rsp),%r12d
+ movl %r8d,%r13d
+ addl %r15d,%r12d
+ movl %eax,%r14d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,0(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ movl 8(%rsp),%r13d
+ movl 60(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r11d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 40(%rsp),%r12d
+
+ addl 4(%rsp),%r12d
+ movl %edx,%r13d
+ addl %edi,%r12d
+ movl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,4(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ movl 12(%rsp),%r13d
+ movl 0(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r10d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 44(%rsp),%r12d
+
+ addl 8(%rsp),%r12d
+ movl %ecx,%r13d
+ addl %r15d,%r12d
+ movl %r10d,%r14d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,8(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ movl 16(%rsp),%r13d
+ movl 4(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r9d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 48(%rsp),%r12d
+
+ addl 12(%rsp),%r12d
+ movl %ebx,%r13d
+ addl %edi,%r12d
+ movl %r9d,%r14d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,12(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ movl 20(%rsp),%r13d
+ movl 8(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r8d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 52(%rsp),%r12d
+
+ addl 16(%rsp),%r12d
+ movl %eax,%r13d
+ addl %r15d,%r12d
+ movl %r8d,%r14d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,16(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ movl 24(%rsp),%r13d
+ movl 12(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%edx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 56(%rsp),%r12d
+
+ addl 20(%rsp),%r12d
+ movl %r11d,%r13d
+ addl %edi,%r12d
+ movl %edx,%r14d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,20(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ movl 28(%rsp),%r13d
+ movl 16(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ecx
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 60(%rsp),%r12d
+
+ addl 24(%rsp),%r12d
+ movl %r10d,%r13d
+ addl %r15d,%r12d
+ movl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,24(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ movl 32(%rsp),%r13d
+ movl 20(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ebx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 0(%rsp),%r12d
+
+ addl 28(%rsp),%r12d
+ movl %r9d,%r13d
+ addl %edi,%r12d
+ movl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,28(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ movl 36(%rsp),%r13d
+ movl 24(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%eax
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 4(%rsp),%r12d
+
+ addl 32(%rsp),%r12d
+ movl %r8d,%r13d
+ addl %r15d,%r12d
+ movl %eax,%r14d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,32(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ movl 40(%rsp),%r13d
+ movl 28(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r11d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 8(%rsp),%r12d
+
+ addl 36(%rsp),%r12d
+ movl %edx,%r13d
+ addl %edi,%r12d
+ movl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,36(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ movl 44(%rsp),%r13d
+ movl 32(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r10d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 12(%rsp),%r12d
+
+ addl 40(%rsp),%r12d
+ movl %ecx,%r13d
+ addl %r15d,%r12d
+ movl %r10d,%r14d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,40(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ movl 48(%rsp),%r13d
+ movl 36(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r9d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 16(%rsp),%r12d
+
+ addl 44(%rsp),%r12d
+ movl %ebx,%r13d
+ addl %edi,%r12d
+ movl %r9d,%r14d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,44(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ movl 52(%rsp),%r13d
+ movl 40(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r8d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 20(%rsp),%r12d
+
+ addl 48(%rsp),%r12d
+ movl %eax,%r13d
+ addl %r15d,%r12d
+ movl %r8d,%r14d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,48(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ movl 56(%rsp),%r13d
+ movl 44(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%edx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 24(%rsp),%r12d
+
+ addl 52(%rsp),%r12d
+ movl %r11d,%r13d
+ addl %edi,%r12d
+ movl %edx,%r14d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,52(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ movl 60(%rsp),%r13d
+ movl 48(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ecx
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 28(%rsp),%r12d
+
+ addl 56(%rsp),%r12d
+ movl %r10d,%r13d
+ addl %r15d,%r12d
+ movl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,56(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ movl 0(%rsp),%r13d
+ movl 52(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ebx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 32(%rsp),%r12d
+
+ addl 60(%rsp),%r12d
+ movl %r9d,%r13d
+ addl %edi,%r12d
+ movl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,60(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ cmpb $0,3(%rbp)
+ jnz .Lrounds_16_xx
+
+ movq 64+0(%rsp),%rdi
+ addl %r14d,%eax
+ leaq 64(%rsi),%rsi
+
+ addl 0(%rdi),%eax
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb .Lloop
+
+ movq 64+24(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.size sha256_block_data_order,.-sha256_block_data_order
+.align 64
+.type K256,@object
+K256:
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.type sha256_block_data_order_ssse3,@function
+.align 64
+sha256_block_data_order_ssse3:
+.Lssse3_shortcut:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp,%r11
+ shlq $4,%rdx
+ subq $96,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+.Lprologue_ssse3:
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+
+
+ jmp .Lloop_ssse3
+.align 16
+.Lloop_ssse3:
+ movdqa K256+512(%rip),%xmm7
+ movdqu 0(%rsi),%xmm0
+ movdqu 16(%rsi),%xmm1
+ movdqu 32(%rsi),%xmm2
+ movdqu 48(%rsi),%xmm3
+.byte 102,15,56,0,199
+ leaq K256(%rip),%rbp
+.byte 102,15,56,0,207
+ movdqa 0(%rbp),%xmm4
+.byte 102,15,56,0,215
+ movdqa 32(%rbp),%xmm5
+ paddd %xmm0,%xmm4
+ movdqa 64(%rbp),%xmm6
+.byte 102,15,56,0,223
+ movdqa 96(%rbp),%xmm7
+ paddd %xmm1,%xmm5
+ paddd %xmm2,%xmm6
+ paddd %xmm3,%xmm7
+ movdqa %xmm4,0(%rsp)
+ movl %eax,%r14d
+ movdqa %xmm5,16(%rsp)
+ movl %ebx,%edi
+ movdqa %xmm6,32(%rsp)
+ xorl %ecx,%edi
+ movdqa %xmm7,48(%rsp)
+ movl %r8d,%r13d
+ jmp .Lssse3_00_47
+
+.align 16
+.Lssse3_00_47:
+ subq $-32*4,%rbp
+ rorl $14,%r13d
+ movdqa %xmm1,%xmm4
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ movdqa %xmm3,%xmm7
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+.byte 102,15,58,15,224,4
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+.byte 102,15,58,15,250,4
+ addl 0(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ paddd %xmm7,%xmm0
+ rorl $2,%r14d
+ addl %r11d,%edx
+ psrld $7,%xmm6
+ addl %edi,%r11d
+ movl %edx,%r13d
+ pshufd $250,%xmm3,%xmm7
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %r11d,%r14d
+ pxor %xmm5,%xmm4
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ pslld $11,%xmm5
+ addl 4(%rsp),%r10d
+ movl %r11d,%edi
+ pxor %xmm6,%xmm4
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ paddd %xmm4,%xmm0
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ psrlq $17,%xmm6
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %ecx,%r13d
+ addl 8(%rsp),%r9d
+ movl %r10d,%r15d
+ psrldq $8,%xmm7
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ paddd %xmm7,%xmm0
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ pshufd $80,%xmm0,%xmm7
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ movdqa %xmm7,%xmm6
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ psrld $10,%xmm7
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ psrlq $2,%xmm6
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 12(%rsp),%r8d
+ pxor %xmm6,%xmm7
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ movdqa 0(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ paddd %xmm7,%xmm0
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ paddd %xmm0,%xmm6
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ movdqa %xmm6,0(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm2,%xmm4
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ movdqa %xmm0,%xmm7
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+.byte 102,15,58,15,225,4
+ andl %eax,%r12d
+ xorl %eax,%r13d
+.byte 102,15,58,15,251,4
+ addl 16(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ paddd %xmm7,%xmm1
+ rorl $2,%r14d
+ addl %edx,%r11d
+ psrld $7,%xmm6
+ addl %edi,%edx
+ movl %r11d,%r13d
+ pshufd $250,%xmm0,%xmm7
+ addl %edx,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%edx
+ movl %eax,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %edx,%r14d
+ pxor %xmm5,%xmm4
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ pslld $11,%xmm5
+ addl 20(%rsp),%ecx
+ movl %edx,%edi
+ pxor %xmm6,%xmm4
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ paddd %xmm4,%xmm1
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ psrlq $17,%xmm6
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %r10d,%r13d
+ addl 24(%rsp),%ebx
+ movl %ecx,%r15d
+ psrldq $8,%xmm7
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ paddd %xmm7,%xmm1
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ pshufd $80,%xmm1,%xmm7
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ movdqa %xmm7,%xmm6
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ psrld $10,%xmm7
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ psrlq $2,%xmm6
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 28(%rsp),%eax
+ pxor %xmm6,%xmm7
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ movdqa 32(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ paddd %xmm7,%xmm1
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ paddd %xmm1,%xmm6
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movdqa %xmm6,16(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm3,%xmm4
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ movdqa %xmm1,%xmm7
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+.byte 102,15,58,15,226,4
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+.byte 102,15,58,15,248,4
+ addl 32(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ paddd %xmm7,%xmm2
+ rorl $2,%r14d
+ addl %r11d,%edx
+ psrld $7,%xmm6
+ addl %edi,%r11d
+ movl %edx,%r13d
+ pshufd $250,%xmm1,%xmm7
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %r11d,%r14d
+ pxor %xmm5,%xmm4
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ pslld $11,%xmm5
+ addl 36(%rsp),%r10d
+ movl %r11d,%edi
+ pxor %xmm6,%xmm4
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ paddd %xmm4,%xmm2
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ psrlq $17,%xmm6
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %ecx,%r13d
+ addl 40(%rsp),%r9d
+ movl %r10d,%r15d
+ psrldq $8,%xmm7
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ paddd %xmm7,%xmm2
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ pshufd $80,%xmm2,%xmm7
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ movdqa %xmm7,%xmm6
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ psrld $10,%xmm7
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ psrlq $2,%xmm6
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 44(%rsp),%r8d
+ pxor %xmm6,%xmm7
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ movdqa 64(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ paddd %xmm7,%xmm2
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ paddd %xmm2,%xmm6
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ movdqa %xmm6,32(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm0,%xmm4
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ movdqa %xmm2,%xmm7
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+.byte 102,15,58,15,227,4
+ andl %eax,%r12d
+ xorl %eax,%r13d
+.byte 102,15,58,15,249,4
+ addl 48(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ paddd %xmm7,%xmm3
+ rorl $2,%r14d
+ addl %edx,%r11d
+ psrld $7,%xmm6
+ addl %edi,%edx
+ movl %r11d,%r13d
+ pshufd $250,%xmm2,%xmm7
+ addl %edx,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%edx
+ movl %eax,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %edx,%r14d
+ pxor %xmm5,%xmm4
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ pslld $11,%xmm5
+ addl 52(%rsp),%ecx
+ movl %edx,%edi
+ pxor %xmm6,%xmm4
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ paddd %xmm4,%xmm3
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ psrlq $17,%xmm6
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %r10d,%r13d
+ addl 56(%rsp),%ebx
+ movl %ecx,%r15d
+ psrldq $8,%xmm7
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ paddd %xmm7,%xmm3
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ pshufd $80,%xmm3,%xmm7
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ movdqa %xmm7,%xmm6
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ psrld $10,%xmm7
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ psrlq $2,%xmm6
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 60(%rsp),%eax
+ pxor %xmm6,%xmm7
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ movdqa 96(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ paddd %xmm7,%xmm3
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ paddd %xmm3,%xmm6
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movdqa %xmm6,48(%rsp)
+ cmpb $0,131(%rbp)
+ jne .Lssse3_00_47
+ rorl $14,%r13d
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+ addl 0(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ rorl $2,%r14d
+ addl %r11d,%edx
+ addl %edi,%r11d
+ movl %edx,%r13d
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ xorl %r11d,%r14d
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ addl 4(%rsp),%r10d
+ movl %r11d,%edi
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ xorl %ecx,%r13d
+ addl 8(%rsp),%r9d
+ movl %r10d,%r15d
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 12(%rsp),%r8d
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+ andl %eax,%r12d
+ xorl %eax,%r13d
+ addl 16(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ rorl $2,%r14d
+ addl %edx,%r11d
+ addl %edi,%edx
+ movl %r11d,%r13d
+ addl %edx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%edx
+ movl %eax,%r12d
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ xorl %edx,%r14d
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ addl 20(%rsp),%ecx
+ movl %edx,%edi
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ xorl %r10d,%r13d
+ addl 24(%rsp),%ebx
+ movl %ecx,%r15d
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 28(%rsp),%eax
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ rorl $14,%r13d
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+ addl 32(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ rorl $2,%r14d
+ addl %r11d,%edx
+ addl %edi,%r11d
+ movl %edx,%r13d
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ xorl %r11d,%r14d
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ addl 36(%rsp),%r10d
+ movl %r11d,%edi
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ xorl %ecx,%r13d
+ addl 40(%rsp),%r9d
+ movl %r10d,%r15d
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 44(%rsp),%r8d
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+ andl %eax,%r12d
+ xorl %eax,%r13d
+ addl 48(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ rorl $2,%r14d
+ addl %edx,%r11d
+ addl %edi,%edx
+ movl %r11d,%r13d
+ addl %edx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%edx
+ movl %eax,%r12d
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ xorl %edx,%r14d
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ addl 52(%rsp),%ecx
+ movl %edx,%edi
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ xorl %r10d,%r13d
+ addl 56(%rsp),%ebx
+ movl %ecx,%r15d
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 60(%rsp),%eax
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movq 64+0(%rsp),%rdi
+ movl %r14d,%eax
+
+ addl 0(%rdi),%eax
+ leaq 64(%rsi),%rsi
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb .Lloop_ssse3
+
+ movq 64+24(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lepilogue_ssse3:
+ .byte 0xf3,0xc3
+.size sha256_block_data_order_ssse3,.-sha256_block_data_order_ssse3
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/files.mk b/lib/accelerated/x86/files.mk
new file mode 100644
index 0000000000..c22d758387
--- /dev/null
+++ b/lib/accelerated/x86/files.mk
@@ -0,0 +1,6 @@
+X86_FILES_ELF=elf/aesni-x86.s elf/cpuid-x86.s elf/e_padlock-x86.s elf/sha1-ssse3-x86.s elf/sha256-ssse3-x86.s elf/sha512-ssse3-x86.s
+X86_FILES_COFF=coff/aesni-x86.s coff/cpuid-x86.s coff/e_padlock-x86.s coff/sha1-ssse3-x86.s coff/sha256-ssse3-x86.s coff/sha512-ssse3-x86.s
+X86_FILES_MACOSX=macosx/aesni-x86.s macosx/cpuid-x86.s macosx/e_padlock-x86.s macosx/sha1-ssse3-x86.s macosx/sha256-ssse3-x86.s macosx/sha512-ssse3-x86.s
+X86_64_FILES_ELF=elf/aesni-x86_64.s elf/cpuid-x86_64.s elf/e_padlock-x86_64.s elf/ghash-x86_64.s elf/sha1-ssse3-x86_64.s elf/sha512-ssse3-x86_64.s
+X86_64_FILES_COFF=coff/aesni-x86_64.s coff/cpuid-x86_64.s coff/e_padlock-x86_64.s coff/ghash-x86_64.s coff/sha1-ssse3-x86_64.s coff/sha512-ssse3-x86_64.s
+X86_64_FILES_MACOSX=macosx/aesni-x86_64.s macosx/cpuid-x86_64.s macosx/e_padlock-x86_64.s macosx/ghash-x86_64.s macosx/sha1-ssse3-x86_64.s macosx/sha512-ssse3-x86_64.s
diff --git a/lib/accelerated/x86/hmac-x86.c b/lib/accelerated/x86/hmac-x86.c
new file mode 100644
index 0000000000..73b21cc25d
--- /dev/null
+++ b/lib/accelerated/x86/hmac-x86.c
@@ -0,0 +1,300 @@
+/*
+ * Copyright (C) 2008, 2010-2012 Free Software Foundation, Inc.
+ *
+ * Author: Nikos Mavrogiannopoulos
+ *
+ * This file is part of GNUTLS.
+ *
+ * The GNUTLS library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>
+ *
+ */
+
+/* This file provides the backend hash/mac implementation for
+ * VIA Padlock hardware acceleration.
+ */
+
+#include <gnutls_int.h>
+#include <gnutls_hash_int.h>
+#include <gnutls_errors.h>
+#include <nettle/sha.h>
+#include <nettle/hmac.h>
+#include <nettle/macros.h>
+#include <aes-x86.h>
+#include <sha-x86.h>
+#include <algorithms.h>
+
+#ifdef HAVE_LIBNETTLE
+
+typedef void (*update_func) (void *, unsigned, const uint8_t *);
+typedef void (*digest_func) (void *, unsigned, uint8_t *);
+typedef void (*set_key_func) (void *, unsigned, const uint8_t *);
+
+struct x86_hmac_ctx {
+ union {
+ struct hmac_sha1_ctx sha1;
+ struct hmac_sha224_ctx sha224;
+ struct hmac_sha256_ctx sha256;
+#ifdef ENABLE_SHA512
+ struct hmac_sha384_ctx sha384;
+ struct hmac_sha512_ctx sha512;
+#endif
+ } ctx;
+
+ void *ctx_ptr;
+ gnutls_mac_algorithm_t algo;
+ size_t length;
+ update_func update;
+ digest_func digest;
+ set_key_func setkey;
+};
+
+static void
+x86_hmac_sha1_set_key(struct hmac_sha1_ctx *ctx,
+ unsigned key_length, const uint8_t * key)
+{
+ HMAC_SET_KEY(ctx, &x86_sha1, key_length, key);
+}
+
+static void
+x86_hmac_sha1_update(struct hmac_sha1_ctx *ctx,
+ unsigned length, const uint8_t * data)
+{
+ x86_sha1_update(&ctx->state, length, data);
+}
+
+static void
+x86_hmac_sha1_digest(struct hmac_sha1_ctx *ctx,
+ unsigned length, uint8_t * digest)
+{
+ HMAC_DIGEST(ctx, &x86_sha1, length, digest);
+}
+
+static void
+x86_hmac_sha256_set_key(struct hmac_sha256_ctx *ctx,
+ unsigned key_length, const uint8_t * key)
+{
+ HMAC_SET_KEY(ctx, &x86_sha256, key_length, key);
+}
+
+static void
+x86_hmac_sha256_update(struct hmac_sha256_ctx *ctx,
+ unsigned length, const uint8_t * data)
+{
+ x86_sha256_update(&ctx->state, length, data);
+}
+
+static void
+x86_hmac_sha256_digest(struct hmac_sha256_ctx *ctx,
+ unsigned length, uint8_t * digest)
+{
+ HMAC_DIGEST(ctx, &x86_sha256, length, digest);
+}
+
+static void
+x86_hmac_sha224_set_key(struct hmac_sha224_ctx *ctx,
+ unsigned key_length, const uint8_t * key)
+{
+ HMAC_SET_KEY(ctx, &x86_sha224, key_length, key);
+}
+
+static void
+x86_hmac_sha224_digest(struct hmac_sha224_ctx *ctx,
+ unsigned length, uint8_t * digest)
+{
+ HMAC_DIGEST(ctx, &x86_sha224, length, digest);
+}
+
+#ifdef ENABLE_SHA512
+static void
+x86_hmac_sha384_set_key(struct hmac_sha384_ctx *ctx,
+ unsigned key_length, const uint8_t * key)
+{
+ HMAC_SET_KEY(ctx, &x86_sha384, key_length, key);
+}
+
+static void
+x86_hmac_sha384_digest(struct hmac_sha384_ctx *ctx,
+ unsigned length, uint8_t * digest)
+{
+ HMAC_DIGEST(ctx, &x86_sha384, length, digest);
+}
+
+static void
+x86_hmac_sha512_set_key(struct hmac_sha512_ctx *ctx,
+ unsigned key_length, const uint8_t * key)
+{
+ HMAC_SET_KEY(ctx, &x86_sha512, key_length, key);
+}
+
+static void
+x86_hmac_sha512_update(struct hmac_sha512_ctx *ctx,
+ unsigned length, const uint8_t * data)
+{
+ x86_sha512_update(&ctx->state, length, data);
+}
+
+static void
+x86_hmac_sha512_digest(struct hmac_sha512_ctx *ctx,
+ unsigned length, uint8_t * digest)
+{
+ HMAC_DIGEST(ctx, &x86_sha512, length, digest);
+}
+#endif
+
+static int
+_hmac_ctx_init(gnutls_mac_algorithm_t algo, struct x86_hmac_ctx *ctx)
+{
+ switch (algo) {
+ case GNUTLS_MAC_SHA1:
+ ctx->update = (update_func) x86_hmac_sha1_update;
+ ctx->digest = (digest_func) x86_hmac_sha1_digest;
+ ctx->setkey = (set_key_func) x86_hmac_sha1_set_key;
+ ctx->ctx_ptr = &ctx->ctx.sha1;
+ ctx->length = SHA1_DIGEST_SIZE;
+ break;
+ case GNUTLS_MAC_SHA224:
+ ctx->update = (update_func) x86_hmac_sha256_update;
+ ctx->digest = (digest_func) x86_hmac_sha224_digest;
+ ctx->setkey = (set_key_func) x86_hmac_sha224_set_key;
+ ctx->ctx_ptr = &ctx->ctx.sha224;
+ ctx->length = SHA224_DIGEST_SIZE;
+ break;
+ case GNUTLS_MAC_SHA256:
+ ctx->update = (update_func) x86_hmac_sha256_update;
+ ctx->digest = (digest_func) x86_hmac_sha256_digest;
+ ctx->setkey = (set_key_func) x86_hmac_sha256_set_key;
+ ctx->ctx_ptr = &ctx->ctx.sha256;
+ ctx->length = SHA256_DIGEST_SIZE;
+ break;
+#ifdef ENABLE_SHA512
+ case GNUTLS_MAC_SHA384:
+ ctx->update = (update_func) x86_hmac_sha512_update;
+ ctx->digest = (digest_func) x86_hmac_sha384_digest;
+ ctx->setkey = (set_key_func) x86_hmac_sha384_set_key;
+ ctx->ctx_ptr = &ctx->ctx.sha384;
+ ctx->length = SHA384_DIGEST_SIZE;
+ break;
+ case GNUTLS_MAC_SHA512:
+ ctx->update = (update_func) x86_hmac_sha512_update;
+ ctx->digest = (digest_func) x86_hmac_sha512_digest;
+ ctx->setkey = (set_key_func) x86_hmac_sha512_set_key;
+ ctx->ctx_ptr = &ctx->ctx.sha512;
+ ctx->length = SHA512_DIGEST_SIZE;
+ break;
+#endif
+ default:
+ gnutls_assert();
+ return GNUTLS_E_INVALID_REQUEST;
+ }
+
+ return 0;
+}
+
+
+static int wrap_x86_hmac_init(gnutls_mac_algorithm_t algo, void **_ctx)
+{
+ struct x86_hmac_ctx *ctx;
+ int ret;
+
+ ctx = gnutls_calloc(1, sizeof(struct x86_hmac_ctx));
+ if (ctx == NULL) {
+ gnutls_assert();
+ return GNUTLS_E_MEMORY_ERROR;
+ }
+
+ ctx->algo = algo;
+
+ ret = _hmac_ctx_init(algo, ctx);
+ if (ret < 0)
+ return gnutls_assert_val(ret);
+
+ *_ctx = ctx;
+
+ return 0;
+}
+
+static int
+wrap_x86_hmac_setkey(void *_ctx, const void *key, size_t keylen)
+{
+ struct x86_hmac_ctx *ctx = _ctx;
+
+ ctx->setkey(ctx->ctx_ptr, keylen, key);
+
+ return GNUTLS_E_SUCCESS;
+}
+
+static int
+wrap_x86_hmac_update(void *_ctx, const void *text, size_t textsize)
+{
+ struct x86_hmac_ctx *ctx = _ctx;
+
+ ctx->update(ctx->ctx_ptr, textsize, text);
+
+ return GNUTLS_E_SUCCESS;
+}
+
+static int
+wrap_x86_hmac_output(void *src_ctx, void *digest, size_t digestsize)
+{
+ struct x86_hmac_ctx *ctx;
+ ctx = src_ctx;
+
+ if (digestsize < ctx->length) {
+ gnutls_assert();
+ return GNUTLS_E_SHORT_MEMORY_BUFFER;
+ }
+
+ ctx->digest(ctx->ctx_ptr, digestsize, digest);
+
+ return 0;
+}
+
+static void wrap_x86_hmac_deinit(void *hd)
+{
+ gnutls_free(hd);
+}
+
+static int wrap_x86_hmac_fast(gnutls_mac_algorithm_t algo,
+ const void *nonce, size_t nonce_size,
+ const void *key, size_t key_size,
+ const void *text, size_t text_size,
+ void *digest)
+{
+ struct x86_hmac_ctx ctx;
+ int ret;
+
+ ret = _hmac_ctx_init(algo, &ctx);
+ if (ret < 0)
+ return gnutls_assert_val(ret);
+
+ ctx.setkey(&ctx, key_size, key);
+ ctx.update(&ctx, text_size, text);
+ ctx.digest(&ctx, ctx.length, digest);
+
+ zeroize_temp_key(&ctx, sizeof(ctx));
+
+ return 0;
+}
+
+const gnutls_crypto_mac_st hmac_sha_x86_struct = {
+ .init = wrap_x86_hmac_init,
+ .setkey = wrap_x86_hmac_setkey,
+ .setnonce = NULL,
+ .hash = wrap_x86_hmac_update,
+ .output = wrap_x86_hmac_output,
+ .deinit = wrap_x86_hmac_deinit,
+ .fast = wrap_x86_hmac_fast,
+};
+
+#endif /* HAVE_LIBNETTLE */
diff --git a/lib/accelerated/x86/macosx/appro-aes-x86-macosx.s b/lib/accelerated/x86/macosx/aesni-x86.s
index 92313599bd..4cb2d98af9 100644
--- a/lib/accelerated/x86/macosx/appro-aes-x86-macosx.s
+++ b/lib/accelerated/x86/macosx/aesni-x86.s
@@ -1,4 +1,3 @@
-/*
# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
# All rights reserved.
#
@@ -38,7 +37,6 @@
#
# *** This file is auto-generated ***
#
-*/
.file "devel/perlasm/aesni-x86.s"
.text
.globl _aesni_encrypt
@@ -2146,3 +2144,5 @@ L100dec_key_ret:
.byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
.byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
.byte 115,108,46,111,114,103,62,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/appro-aes-x86-64-macosx.s b/lib/accelerated/x86/macosx/aesni-x86_64.s
index e2cfa17951..85f26e05d4 100644
--- a/lib/accelerated/x86/macosx/appro-aes-x86-64-macosx.s
+++ b/lib/accelerated/x86/macosx/aesni-x86_64.s
@@ -1,4 +1,3 @@
-/*
# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
# All rights reserved.
#
@@ -38,7 +37,6 @@
#
# *** This file is auto-generated ***
#
-*/
.text
.globl _aesni_encrypt
@@ -2973,3 +2971,5 @@ L$increment1:
.byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.p2align 6
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/cpuid-x86-64-macosx.s b/lib/accelerated/x86/macosx/cpuid-x86-64-macosx.s
deleted file mode 100644
index 26adc5b445..0000000000
--- a/lib/accelerated/x86/macosx/cpuid-x86-64-macosx.s
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
-# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-#
-# * Redistributions of source code must retain copyright notices,
-# this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following
-# disclaimer in the documentation and/or other materials
-# provided with the distribution.
-#
-# * Neither the name of the Andy Polyakov nor the names of its
-# copyright holder and contributors may be used to endorse or
-# promote products derived from this software without specific
-# prior written permission.
-#
-# ALTERNATIVELY, provided that this notice is retained in full, this
-# product may be distributed under the terms of the GNU General Public
-# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
-# those given above.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
-# *** This file is auto-generated ***
-#
-*/
-.text
-.globl _gnutls_cpuid
-
-.p2align 4
-_gnutls_cpuid:
- pushq %rbp
- movq %rsp,%rbp
- pushq %rbx
- movl %edi,-12(%rbp)
- movq %rsi,-24(%rbp)
- movq %rdx,-32(%rbp)
- movq %rcx,-40(%rbp)
- movq %r8,-48(%rbp)
- movl -12(%rbp),%eax
- movl %eax,-60(%rbp)
- movl -60(%rbp),%eax
- cpuid
- movl %edx,-56(%rbp)
- movl %ecx,%esi
- movl %eax,-52(%rbp)
- movq -24(%rbp),%rax
- movl -52(%rbp),%edx
- movl %edx,(%rax)
- movq -32(%rbp),%rax
- movl %ebx,(%rax)
- movq -40(%rbp),%rax
- movl %esi,(%rax)
- movq -48(%rbp),%rax
- movl -56(%rbp),%ecx
- movl %ecx,(%rax)
- popq %rbx
- leave
- .byte 0xf3,0xc3
-
diff --git a/lib/accelerated/x86/macosx/cpuid-x86-macosx.s b/lib/accelerated/x86/macosx/cpuid-x86-macosx.s
deleted file mode 100644
index 0078f3d06f..0000000000
--- a/lib/accelerated/x86/macosx/cpuid-x86-macosx.s
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
-# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-#
-# * Redistributions of source code must retain copyright notices,
-# this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following
-# disclaimer in the documentation and/or other materials
-# provided with the distribution.
-#
-# * Neither the name of the Andy Polyakov nor the names of its
-# copyright holder and contributors may be used to endorse or
-# promote products derived from this software without specific
-# prior written permission.
-#
-# ALTERNATIVELY, provided that this notice is retained in full, this
-# product may be distributed under the terms of the GNU General Public
-# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
-# those given above.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
-# *** This file is auto-generated ***
-#
-*/
-.file "devel/perlasm/cpuid-x86.s"
-.text
-.globl _gnutls_cpuid
-.align 4
-_gnutls_cpuid:
-L_gnutls_cpuid_begin:
- pushl %ebp
- movl %esp,%ebp
- subl $12,%esp
- movl %ebx,(%esp)
- movl 8(%ebp),%eax
- movl %esi,4(%esp)
- movl %edi,8(%esp)
- pushl %ebx
- .byte 0x0f,0xa2
- movl %ebx,%edi
- popl %ebx
- movl %edx,%esi
- movl 12(%ebp),%edx
- movl %eax,(%edx)
- movl 16(%ebp),%eax
- movl %edi,(%eax)
- movl 20(%ebp),%eax
- movl %ecx,(%eax)
- movl 24(%ebp),%eax
- movl %esi,(%eax)
- movl (%esp),%ebx
- movl 4(%esp),%esi
- movl 8(%esp),%edi
- movl %ebp,%esp
- popl %ebp
- ret
-.globl _gnutls_have_cpuid
-.align 4
-_gnutls_have_cpuid:
-L_gnutls_have_cpuid_begin:
- pushfl
- popl %eax
- orl $2097152,%eax
- pushl %eax
- popfl
- pushfl
- popl %eax
- andl $2097152,%eax
- ret
-.byte 67,80,85,73,68,32,102,111,114,32,120,56,54,0
diff --git a/lib/accelerated/x86/macosx/cpuid-x86.s b/lib/accelerated/x86/macosx/cpuid-x86.s
new file mode 100644
index 0000000000..978b232e74
--- /dev/null
+++ b/lib/accelerated/x86/macosx/cpuid-x86.s
@@ -0,0 +1,70 @@
+#
+# Copyright (C) 2011-2012 Free Software Foundation, Inc.
+#
+# Author: Nikos Mavrogiannopoulos
+#
+# This file is part of GnuTLS.
+#
+# The GnuTLS is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public License
+# as published by the Free Software Foundation; either version 2.1 of
+# the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# *** This file is auto-generated ***
+#
+.file "devel/perlasm/cpuid-x86.s"
+.text
+.globl _gnutls_cpuid
+.align 4
+_gnutls_cpuid:
+L_gnutls_cpuid_begin:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $12,%esp
+ movl %ebx,(%esp)
+ movl 8(%ebp),%eax
+ movl %esi,4(%esp)
+ movl %edi,8(%esp)
+ pushl %ebx
+ .byte 0x0f,0xa2
+ movl %ebx,%edi
+ popl %ebx
+ movl %edx,%esi
+ movl 12(%ebp),%edx
+ movl %eax,(%edx)
+ movl 16(%ebp),%eax
+ movl %edi,(%eax)
+ movl 20(%ebp),%eax
+ movl %ecx,(%eax)
+ movl 24(%ebp),%eax
+ movl %esi,(%eax)
+ movl (%esp),%ebx
+ movl 4(%esp),%esi
+ movl 8(%esp),%edi
+ movl %ebp,%esp
+ popl %ebp
+ ret
+.globl _gnutls_have_cpuid
+.align 4
+_gnutls_have_cpuid:
+L_gnutls_have_cpuid_begin:
+ pushfl
+ popl %eax
+ orl $2097152,%eax
+ pushl %eax
+ popfl
+ pushfl
+ popl %eax
+ andl $2097152,%eax
+ ret
+.byte 67,80,85,73,68,32,102,111,114,32,120,56,54,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/cpuid-x86_64.s b/lib/accelerated/x86/macosx/cpuid-x86_64.s
new file mode 100644
index 0000000000..cf8fea99a2
--- /dev/null
+++ b/lib/accelerated/x86/macosx/cpuid-x86_64.s
@@ -0,0 +1,58 @@
+#
+# Copyright (C) 2011-2012 Free Software Foundation, Inc.
+#
+# Author: Nikos Mavrogiannopoulos
+#
+# This file is part of GnuTLS.
+#
+# The GnuTLS is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public License
+# as published by the Free Software Foundation; either version 2.1 of
+# the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# *** This file is auto-generated ***
+#
+.text
+.globl _gnutls_cpuid
+
+.p2align 4
+_gnutls_cpuid:
+ pushq %rbp
+ movq %rsp,%rbp
+ pushq %rbx
+ movl %edi,-12(%rbp)
+ movq %rsi,-24(%rbp)
+ movq %rdx,-32(%rbp)
+ movq %rcx,-40(%rbp)
+ movq %r8,-48(%rbp)
+ movl -12(%rbp),%eax
+ movl %eax,-60(%rbp)
+ movl -60(%rbp),%eax
+ cpuid
+ movl %edx,-56(%rbp)
+ movl %ecx,%esi
+ movl %eax,-52(%rbp)
+ movq -24(%rbp),%rax
+ movl -52(%rbp),%edx
+ movl %edx,(%rax)
+ movq -32(%rbp),%rax
+ movl %ebx,(%rax)
+ movq -40(%rbp),%rax
+ movl %esi,(%rax)
+ movq -48(%rbp),%rax
+ movl -56(%rbp),%ecx
+ movl %ecx,(%rax)
+ popq %rbx
+ leave
+ .byte 0xf3,0xc3
+
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/padlock-x86-macosx.s b/lib/accelerated/x86/macosx/e_padlock-x86.s
index 1a2fa9246b..4bdadb99a1 100644
--- a/lib/accelerated/x86/macosx/padlock-x86-macosx.s
+++ b/lib/accelerated/x86/macosx/e_padlock-x86.s
@@ -1,4 +1,3 @@
-/*
# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
# All rights reserved.
#
@@ -38,7 +37,6 @@
#
# *** This file is auto-generated ***
#
-*/
.file "devel/perlasm/e_padlock-x86.s"
.text
.globl _padlock_capability
@@ -1034,3 +1032,5 @@ L_padlock_sha512_blocks_begin:
.align 2,0x90
Lpadlock_saved_context:
.long 0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/padlock-x86-64-macosx.s b/lib/accelerated/x86/macosx/e_padlock-x86_64.s
index 1327e82172..29723b3714 100644
--- a/lib/accelerated/x86/macosx/padlock-x86-64-macosx.s
+++ b/lib/accelerated/x86/macosx/e_padlock-x86_64.s
@@ -1,4 +1,3 @@
-/*
# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
# All rights reserved.
#
@@ -38,7 +37,6 @@
#
# *** This file is auto-generated ***
#
-*/
.text
.globl _padlock_capability
@@ -1065,3 +1063,5 @@ L$ctr32_abort:
.p2align 3
L$padlock_saved_context:
.quad 0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/appro-aes-gcm-x86-64-macosx.s b/lib/accelerated/x86/macosx/ghash-x86_64.s
index eac88aeba1..2f5ac653ab 100644
--- a/lib/accelerated/x86/macosx/appro-aes-gcm-x86-64-macosx.s
+++ b/lib/accelerated/x86/macosx/ghash-x86_64.s
@@ -1,4 +1,3 @@
-/*
# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
# All rights reserved.
#
@@ -38,7 +37,6 @@
#
# *** This file is auto-generated ***
#
-*/
.text
.globl _gcm_gmult_4bit
@@ -1348,3 +1346,5 @@ L$rem_8bit:
.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.p2align 6
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/openssl-cpuid-x86.s b/lib/accelerated/x86/macosx/openssl-cpuid-x86.s
new file mode 100644
index 0000000000..ba4f09c67b
--- /dev/null
+++ b/lib/accelerated/x86/macosx/openssl-cpuid-x86.s
@@ -0,0 +1,399 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "x86cpuid.s"
+.text
+.globl _OPENSSL_ia32_cpuid
+.align 4
+_OPENSSL_ia32_cpuid:
+L_OPENSSL_ia32_cpuid_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ xorl %edx,%edx
+ pushfl
+ popl %eax
+ movl %eax,%ecx
+ xorl $2097152,%eax
+ pushl %eax
+ popfl
+ pushfl
+ popl %eax
+ xorl %eax,%ecx
+ xorl %eax,%eax
+ btl $21,%ecx
+ jnc L000nocpuid
+ movl 20(%esp),%esi
+ movl %eax,8(%esi)
+ .byte 0x0f,0xa2
+ movl %eax,%edi
+ xorl %eax,%eax
+ cmpl $1970169159,%ebx
+ setne %al
+ movl %eax,%ebp
+ cmpl $1231384169,%edx
+ setne %al
+ orl %eax,%ebp
+ cmpl $1818588270,%ecx
+ setne %al
+ orl %eax,%ebp
+ jz L001intel
+ cmpl $1752462657,%ebx
+ setne %al
+ movl %eax,%esi
+ cmpl $1769238117,%edx
+ setne %al
+ orl %eax,%esi
+ cmpl $1145913699,%ecx
+ setne %al
+ orl %eax,%esi
+ jnz L001intel
+ movl $2147483648,%eax
+ .byte 0x0f,0xa2
+ cmpl $2147483649,%eax
+ jb L001intel
+ movl %eax,%esi
+ movl $2147483649,%eax
+ .byte 0x0f,0xa2
+ orl %ecx,%ebp
+ andl $2049,%ebp
+ cmpl $2147483656,%esi
+ jb L001intel
+ movl $2147483656,%eax
+ .byte 0x0f,0xa2
+ movzbl %cl,%esi
+ incl %esi
+ movl $1,%eax
+ xorl %ecx,%ecx
+ .byte 0x0f,0xa2
+ btl $28,%edx
+ jnc L002generic
+ shrl $16,%ebx
+ andl $255,%ebx
+ cmpl %esi,%ebx
+ ja L002generic
+ andl $4026531839,%edx
+ jmp L002generic
+L001intel:
+ cmpl $7,%edi
+ jb L003cacheinfo
+ movl 20(%esp),%esi
+ movl $7,%eax
+ xorl %ecx,%ecx
+ .byte 0x0f,0xa2
+ movl %ebx,8(%esi)
+L003cacheinfo:
+ cmpl $4,%edi
+ movl $-1,%edi
+ jb L004nocacheinfo
+ movl $4,%eax
+ movl $0,%ecx
+ .byte 0x0f,0xa2
+ movl %eax,%edi
+ shrl $14,%edi
+ andl $4095,%edi
+L004nocacheinfo:
+ movl $1,%eax
+ xorl %ecx,%ecx
+ .byte 0x0f,0xa2
+ andl $3220176895,%edx
+ cmpl $0,%ebp
+ jne L005notintel
+ orl $1073741824,%edx
+ andb $15,%ah
+ cmpb $15,%ah
+ jne L005notintel
+ orl $1048576,%edx
+L005notintel:
+ btl $28,%edx
+ jnc L002generic
+ andl $4026531839,%edx
+ cmpl $0,%edi
+ je L002generic
+ orl $268435456,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja L002generic
+ andl $4026531839,%edx
+L002generic:
+ andl $2048,%ebp
+ andl $4294965247,%ecx
+ movl %edx,%esi
+ orl %ecx,%ebp
+ btl $27,%ecx
+ jnc L006clear_avx
+ xorl %ecx,%ecx
+.byte 15,1,208
+ andl $6,%eax
+ cmpl $6,%eax
+ je L007done
+ cmpl $2,%eax
+ je L006clear_avx
+L008clear_xmm:
+ andl $4261412861,%ebp
+ andl $4278190079,%esi
+L006clear_avx:
+ andl $4026525695,%ebp
+ movl 20(%esp),%edi
+ andl $4294967263,8(%edi)
+L007done:
+ movl %esi,%eax
+ movl %ebp,%edx
+L000nocpuid:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _OPENSSL_rdtsc
+.align 4
+_OPENSSL_rdtsc:
+L_OPENSSL_rdtsc_begin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ call L009PIC_me_up
+L009PIC_me_up:
+ popl %ecx
+ movl L__gnutls_x86_cpuid_s$non_lazy_ptr-L009PIC_me_up(%ecx),%ecx
+ btl $4,(%ecx)
+ jnc L010notsc
+ .byte 0x0f,0x31
+L010notsc:
+ ret
+.globl _OPENSSL_instrument_halt
+.align 4
+_OPENSSL_instrument_halt:
+L_OPENSSL_instrument_halt_begin:
+ call L011PIC_me_up
+L011PIC_me_up:
+ popl %ecx
+ movl L__gnutls_x86_cpuid_s$non_lazy_ptr-L011PIC_me_up(%ecx),%ecx
+ btl $4,(%ecx)
+ jnc L012nohalt
+.long 2421723150
+ andl $3,%eax
+ jnz L012nohalt
+ pushfl
+ popl %eax
+ btl $9,%eax
+ jnc L012nohalt
+ .byte 0x0f,0x31
+ pushl %edx
+ pushl %eax
+ hlt
+ .byte 0x0f,0x31
+ subl (%esp),%eax
+ sbbl 4(%esp),%edx
+ addl $8,%esp
+ ret
+L012nohalt:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ ret
+.globl _OPENSSL_far_spin
+.align 4
+_OPENSSL_far_spin:
+L_OPENSSL_far_spin_begin:
+ pushfl
+ popl %eax
+ btl $9,%eax
+ jnc L013nospin
+ movl 4(%esp),%eax
+ movl 8(%esp),%ecx
+.long 2430111262
+ xorl %eax,%eax
+ movl (%ecx),%edx
+ jmp L014spin
+.align 4,0x90
+L014spin:
+ incl %eax
+ cmpl (%ecx),%edx
+ je L014spin
+.long 529567888
+ ret
+L013nospin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ ret
+.globl _OPENSSL_wipe_cpu
+.align 4
+_OPENSSL_wipe_cpu:
+L_OPENSSL_wipe_cpu_begin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ call L015PIC_me_up
+L015PIC_me_up:
+ popl %ecx
+ movl L__gnutls_x86_cpuid_s$non_lazy_ptr-L015PIC_me_up(%ecx),%ecx
+ movl (%ecx),%ecx
+ btl $1,(%ecx)
+ jnc L016no_x87
+.long 4007259865,4007259865,4007259865,4007259865,2430851995
+L016no_x87:
+ leal 4(%esp),%eax
+ ret
+.globl _OPENSSL_atomic_add
+.align 4
+_OPENSSL_atomic_add:
+L_OPENSSL_atomic_add_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%ecx
+ pushl %ebx
+ nop
+ movl (%edx),%eax
+L017spin:
+ leal (%eax,%ecx,1),%ebx
+ nop
+.long 447811568
+ jne L017spin
+ movl %ebx,%eax
+ popl %ebx
+ ret
+.globl _OPENSSL_indirect_call
+.align 4
+_OPENSSL_indirect_call:
+L_OPENSSL_indirect_call_begin:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $28,%esp
+ movl 12(%ebp),%ecx
+ movl %ecx,(%esp)
+ movl 16(%ebp),%edx
+ movl %edx,4(%esp)
+ movl 20(%ebp),%eax
+ movl %eax,8(%esp)
+ movl 24(%ebp),%eax
+ movl %eax,12(%esp)
+ movl 28(%ebp),%eax
+ movl %eax,16(%esp)
+ movl 32(%ebp),%eax
+ movl %eax,20(%esp)
+ movl 36(%ebp),%eax
+ movl %eax,24(%esp)
+ call *8(%ebp)
+ movl %ebp,%esp
+ popl %ebp
+ ret
+.globl _OPENSSL_cleanse
+.align 4
+_OPENSSL_cleanse:
+L_OPENSSL_cleanse_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%ecx
+ xorl %eax,%eax
+ cmpl $7,%ecx
+ jae L018lot
+ cmpl $0,%ecx
+ je L019ret
+L020little:
+ movb %al,(%edx)
+ subl $1,%ecx
+ leal 1(%edx),%edx
+ jnz L020little
+L019ret:
+ ret
+.align 4,0x90
+L018lot:
+ testl $3,%edx
+ jz L021aligned
+ movb %al,(%edx)
+ leal -1(%ecx),%ecx
+ leal 1(%edx),%edx
+ jmp L018lot
+L021aligned:
+ movl %eax,(%edx)
+ leal -4(%ecx),%ecx
+ testl $-4,%ecx
+ leal 4(%edx),%edx
+ jnz L021aligned
+ cmpl $0,%ecx
+ jne L020little
+ ret
+.globl _OPENSSL_instrument_bus
+.align 4
+_OPENSSL_instrument_bus:
+L_OPENSSL_instrument_bus_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl $0,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _OPENSSL_instrument_bus2
+.align 4
+_OPENSSL_instrument_bus2:
+L_OPENSSL_instrument_bus2_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl $0,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _OPENSSL_ia32_rdrand
+.align 4
+_OPENSSL_ia32_rdrand:
+L_OPENSSL_ia32_rdrand_begin:
+ movl $8,%ecx
+L022loop:
+.byte 15,199,240
+ jc L023break
+ loop L022loop
+L023break:
+ cmpl $0,%eax
+ cmovel %ecx,%eax
+ ret
+.section __IMPORT,__pointers,non_lazy_symbol_pointers
+L__gnutls_x86_cpuid_s$non_lazy_ptr:
+.indirect_symbol __gnutls_x86_cpuid_s
+.long 0
+.comm __gnutls_x86_cpuid_s,16,2
+.mod_init_func
+.align 2
+.long _OPENSSL_cpuid_setup
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/openssl-cpuid-x86_64.s b/lib/accelerated/x86/macosx/openssl-cpuid-x86_64.s
new file mode 100644
index 0000000000..8e12261242
--- /dev/null
+++ b/lib/accelerated/x86/macosx/openssl-cpuid-x86_64.s
@@ -0,0 +1,365 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+
+.private_extern _OPENSSL_cpuid_setup
+.mod_init_func
+ .p2align 3
+ .quad _OPENSSL_cpuid_setup
+
+.private_extern __gnutls_x86_cpuid_s
+.comm __gnutls_x86_cpuid_s,16,2
+
+.text
+
+.globl _OPENSSL_atomic_add
+
+.p2align 4
+_OPENSSL_atomic_add:
+ movl (%rdi),%eax
+L$spin: leaq (%rsi,%rax,1),%r8
+.byte 0xf0
+ cmpxchgl %r8d,(%rdi)
+ jne L$spin
+ movl %r8d,%eax
+.byte 0x48,0x98
+ .byte 0xf3,0xc3
+
+
+.globl _OPENSSL_rdtsc
+
+.p2align 4
+_OPENSSL_rdtsc:
+ rdtsc
+ shlq $32,%rdx
+ orq %rdx,%rax
+ .byte 0xf3,0xc3
+
+
+.globl _OPENSSL_ia32_cpuid
+
+.p2align 4
+_OPENSSL_ia32_cpuid:
+ movq %rbx,%r8
+
+ xorl %eax,%eax
+ movl %eax,8(%rdi)
+ cpuid
+ movl %eax,%r11d
+
+ xorl %eax,%eax
+ cmpl $1970169159,%ebx
+ setne %al
+ movl %eax,%r9d
+ cmpl $1231384169,%edx
+ setne %al
+ orl %eax,%r9d
+ cmpl $1818588270,%ecx
+ setne %al
+ orl %eax,%r9d
+ jz L$intel
+
+ cmpl $1752462657,%ebx
+ setne %al
+ movl %eax,%r10d
+ cmpl $1769238117,%edx
+ setne %al
+ orl %eax,%r10d
+ cmpl $1145913699,%ecx
+ setne %al
+ orl %eax,%r10d
+ jnz L$intel
+
+
+ movl $2147483648,%eax
+ cpuid
+ cmpl $2147483649,%eax
+ jb L$intel
+ movl %eax,%r10d
+ movl $2147483649,%eax
+ cpuid
+ orl %ecx,%r9d
+ andl $2049,%r9d
+
+ cmpl $2147483656,%r10d
+ jb L$intel
+
+ movl $2147483656,%eax
+ cpuid
+ movzbq %cl,%r10
+ incq %r10
+
+ movl $1,%eax
+ cpuid
+ btl $28,%edx
+ jnc L$generic
+ shrl $16,%ebx
+ cmpb %r10b,%bl
+ ja L$generic
+ andl $4026531839,%edx
+ jmp L$generic
+
+L$intel:
+ cmpl $4,%r11d
+ movl $-1,%r10d
+ jb L$nocacheinfo
+
+ movl $4,%eax
+ movl $0,%ecx
+ cpuid
+ movl %eax,%r10d
+ shrl $14,%r10d
+ andl $4095,%r10d
+
+ cmpl $7,%r11d
+ jb L$nocacheinfo
+
+ movl $7,%eax
+ xorl %ecx,%ecx
+ cpuid
+ movl %ebx,8(%rdi)
+
+L$nocacheinfo:
+ movl $1,%eax
+ cpuid
+ andl $3220176895,%edx
+ cmpl $0,%r9d
+ jne L$notintel
+ orl $1073741824,%edx
+ andb $15,%ah
+ cmpb $15,%ah
+ jne L$notintel
+ orl $1048576,%edx
+L$notintel:
+ btl $28,%edx
+ jnc L$generic
+ andl $4026531839,%edx
+ cmpl $0,%r10d
+ je L$generic
+
+ orl $268435456,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja L$generic
+ andl $4026531839,%edx
+L$generic:
+ andl $2048,%r9d
+ andl $4294965247,%ecx
+ orl %ecx,%r9d
+
+ movl %edx,%r10d
+ btl $27,%r9d
+ jnc L$clear_avx
+ xorl %ecx,%ecx
+.byte 0x0f,0x01,0xd0
+ andl $6,%eax
+ cmpl $6,%eax
+ je L$done
+L$clear_avx:
+ movl $4026525695,%eax
+ andl %eax,%r9d
+ andl $4294967263,8(%rdi)
+L$done:
+ shlq $32,%r9
+ movl %r10d,%eax
+ movq %r8,%rbx
+ orq %r9,%rax
+ .byte 0xf3,0xc3
+
+
+.globl _OPENSSL_cleanse
+
+.p2align 4
+_OPENSSL_cleanse:
+ xorq %rax,%rax
+ cmpq $15,%rsi
+ jae L$ot
+ cmpq $0,%rsi
+ je L$ret
+L$ittle:
+ movb %al,(%rdi)
+ subq $1,%rsi
+ leaq 1(%rdi),%rdi
+ jnz L$ittle
+L$ret:
+ .byte 0xf3,0xc3
+.p2align 4
+L$ot:
+ testq $7,%rdi
+ jz L$aligned
+ movb %al,(%rdi)
+ leaq -1(%rsi),%rsi
+ leaq 1(%rdi),%rdi
+ jmp L$ot
+L$aligned:
+ movq %rax,(%rdi)
+ leaq -8(%rsi),%rsi
+ testq $-8,%rsi
+ leaq 8(%rdi),%rdi
+ jnz L$aligned
+ cmpq $0,%rsi
+ jne L$ittle
+ .byte 0xf3,0xc3
+
+.globl _OPENSSL_wipe_cpu
+
+.p2align 4
+_OPENSSL_wipe_cpu:
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+ pxor %xmm10,%xmm10
+ pxor %xmm11,%xmm11
+ pxor %xmm12,%xmm12
+ pxor %xmm13,%xmm13
+ pxor %xmm14,%xmm14
+ pxor %xmm15,%xmm15
+ xorq %rcx,%rcx
+ xorq %rdx,%rdx
+ xorq %rsi,%rsi
+ xorq %rdi,%rdi
+ xorq %r8,%r8
+ xorq %r9,%r9
+ xorq %r10,%r10
+ xorq %r11,%r11
+ leaq 8(%rsp),%rax
+ .byte 0xf3,0xc3
+
+.globl _OPENSSL_instrument_bus
+
+.p2align 4
+_OPENSSL_instrument_bus:
+ movq %rdi,%r10
+ movq %rsi,%rcx
+ movq %rsi,%r11
+
+ rdtsc
+ movl %eax,%r8d
+ movl $0,%r9d
+ clflush (%r10)
+.byte 0xf0
+ addl %r9d,(%r10)
+ jmp L$oop
+.p2align 4
+L$oop: rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ movl %eax,%r9d
+ clflush (%r10)
+.byte 0xf0
+ addl %eax,(%r10)
+ leaq 4(%r10),%r10
+ subq $1,%rcx
+ jnz L$oop
+
+ movq %r11,%rax
+ .byte 0xf3,0xc3
+
+
+.globl _OPENSSL_instrument_bus2
+
+.p2align 4
+_OPENSSL_instrument_bus2:
+ movq %rdi,%r10
+ movq %rsi,%rcx
+ movq %rdx,%r11
+ movq %rcx,8(%rsp)
+
+ rdtsc
+ movl %eax,%r8d
+ movl $0,%r9d
+
+ clflush (%r10)
+.byte 0xf0
+ addl %r9d,(%r10)
+
+ rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ movl %eax,%r9d
+L$oop2:
+ clflush (%r10)
+.byte 0xf0
+ addl %eax,(%r10)
+
+ subq $1,%r11
+ jz L$done2
+
+ rdtsc
+ movl %eax,%edx
+ subl %r8d,%eax
+ movl %edx,%r8d
+ cmpl %r9d,%eax
+ movl %eax,%r9d
+ movl $0,%edx
+ setne %dl
+ subq %rdx,%rcx
+ leaq (%r10,%rdx,4),%r10
+ jnz L$oop2
+
+L$done2:
+ movq 8(%rsp),%rax
+ subq %rcx,%rax
+ .byte 0xf3,0xc3
+
+.globl _OPENSSL_ia32_rdrand
+
+.p2align 4
+_OPENSSL_ia32_rdrand:
+ movl $8,%ecx
+L$oop_rdrand:
+.byte 72,15,199,240
+ jc L$break_rdrand
+ loop L$oop_rdrand
+L$break_rdrand:
+ cmpq $0,%rax
+ cmoveq %rcx,%rax
+ .byte 0xf3,0xc3
+
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/sha1-ssse3-x86.s b/lib/accelerated/x86/macosx/sha1-ssse3-x86.s
new file mode 100644
index 0000000000..0e0c719af2
--- /dev/null
+++ b/lib/accelerated/x86/macosx/sha1-ssse3-x86.s
@@ -0,0 +1,1419 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha1-586.s"
+.text
+.globl _sha1_block_data_order
+.align 4
+_sha1_block_data_order:
+L_sha1_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%ebp
+ movl 24(%esp),%esi
+ movl 28(%esp),%eax
+ subl $76,%esp
+ shll $6,%eax
+ addl %esi,%eax
+ movl %eax,104(%esp)
+ movl 16(%ebp),%edi
+ jmp L000loop
+.align 4,0x90
+L000loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %edx,28(%esp)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edx,44(%esp)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,48(%esp)
+ movl %ebx,52(%esp)
+ movl %ecx,56(%esp)
+ movl %edx,60(%esp)
+ movl %esi,100(%esp)
+ movl (%ebp),%eax
+ movl 4(%ebp),%ebx
+ movl 8(%ebp),%ecx
+ movl 12(%ebp),%edx
+ # 00_15 0
+ movl %ecx,%esi
+ movl %eax,%ebp
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl (%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 1
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 4(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 2
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 8(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 3
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 12(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+ # 00_15 4
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ movl 16(%esp),%eax
+ andl %edx,%ebx
+ rorl $2,%edx
+ xorl %esi,%ebx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+ # 00_15 5
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ movl 20(%esp),%esi
+ andl %ecx,%eax
+ rorl $2,%ecx
+ xorl %edi,%eax
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+ # 00_15 6
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl 24(%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 7
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 28(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 8
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 32(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 9
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 36(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+ # 00_15 10
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ movl 40(%esp),%eax
+ andl %edx,%ebx
+ rorl $2,%edx
+ xorl %esi,%ebx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+ # 00_15 11
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ movl 44(%esp),%esi
+ andl %ecx,%eax
+ rorl $2,%ecx
+ xorl %edi,%eax
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+ # 00_15 12
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ movl 48(%esp),%edi
+ andl %ebx,%esi
+ rorl $2,%ebx
+ xorl %edx,%esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 13
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ movl 52(%esp),%edx
+ andl %eax,%edi
+ rorl $2,%eax
+ xorl %ecx,%edi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 14
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ movl 56(%esp),%ecx
+ andl %esi,%edx
+ rorl $2,%esi
+ xorl %ebx,%edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 15
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ movl 60(%esp),%ebx
+ andl %edi,%ecx
+ rorl $2,%edi
+ xorl %eax,%ecx
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ movl (%esp),%ebx
+ addl %ebp,%ecx
+ # 16_19 16
+ movl %edi,%ebp
+ xorl 8(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 32(%esp),%ebx
+ andl %edx,%ebp
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ xorl %esi,%ebp
+ addl %ebp,%eax
+ movl %ecx,%ebp
+ rorl $2,%edx
+ movl %ebx,(%esp)
+ roll $5,%ebp
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl 4(%esp),%eax
+ addl %ebp,%ebx
+ # 16_19 17
+ movl %edx,%ebp
+ xorl 12(%esp),%eax
+ xorl %edi,%ebp
+ xorl 36(%esp),%eax
+ andl %ecx,%ebp
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ xorl %edi,%ebp
+ addl %ebp,%esi
+ movl %ebx,%ebp
+ rorl $2,%ecx
+ movl %eax,4(%esp)
+ roll $5,%ebp
+ leal 1518500249(%eax,%esi,1),%eax
+ movl 8(%esp),%esi
+ addl %ebp,%eax
+ # 16_19 18
+ movl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 40(%esp),%esi
+ andl %ebx,%ebp
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ xorl %edx,%ebp
+ addl %ebp,%edi
+ movl %eax,%ebp
+ rorl $2,%ebx
+ movl %esi,8(%esp)
+ roll $5,%ebp
+ leal 1518500249(%esi,%edi,1),%esi
+ movl 12(%esp),%edi
+ addl %ebp,%esi
+ # 16_19 19
+ movl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 44(%esp),%edi
+ andl %eax,%ebp
+ xorl (%esp),%edi
+ roll $1,%edi
+ xorl %ecx,%ebp
+ addl %ebp,%edx
+ movl %esi,%ebp
+ rorl $2,%eax
+ movl %edi,12(%esp)
+ roll $5,%ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl 16(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 20
+ movl %esi,%ebp
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,16(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 20(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 21
+ movl %edi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,20(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 24(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 22
+ movl %edx,%ebp
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,24(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 28(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 23
+ movl %ecx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,28(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 32(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 24
+ movl %ebx,%ebp
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,32(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 36(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 25
+ movl %eax,%ebp
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,36(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl 40(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 26
+ movl %esi,%ebp
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,40(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 44(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 27
+ movl %edi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,44(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 48(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 28
+ movl %edx,%ebp
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,48(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 52(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 29
+ movl %ecx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,52(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 56(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 30
+ movl %ebx,%ebp
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,56(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 60(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 31
+ movl %eax,%ebp
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,60(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl (%esp),%edx
+ addl %ebp,%edi
+ # 20_39 32
+ movl %esi,%ebp
+ xorl 8(%esp),%edx
+ xorl %eax,%ebp
+ xorl 32(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 4(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 33
+ movl %edi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 56(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,4(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 8(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 34
+ movl %edx,%ebp
+ xorl 16(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 40(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,8(%esp)
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl 12(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 35
+ movl %ecx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edx,%ebp
+ xorl 44(%esp),%eax
+ xorl %edi,%ebp
+ xorl (%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,12(%esp)
+ leal 1859775393(%eax,%esi,1),%eax
+ movl 16(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 36
+ movl %ebx,%ebp
+ xorl 24(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%esi
+ xorl %edx,%ebp
+ xorl 4(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,16(%esp)
+ leal 1859775393(%esi,%edi,1),%esi
+ movl 20(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 37
+ movl %eax,%ebp
+ xorl 28(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 8(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,20(%esp)
+ leal 1859775393(%edi,%edx,1),%edi
+ movl 24(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 38
+ movl %esi,%ebp
+ xorl 32(%esp),%edx
+ xorl %eax,%ebp
+ xorl 56(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 12(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,24(%esp)
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl 28(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 39
+ movl %edi,%ebp
+ xorl 36(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 16(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,28(%esp)
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl 32(%esp),%ebx
+ addl %ebp,%ecx
+ # 40_59 40
+ movl %edi,%ebp
+ xorl 40(%esp),%ebx
+ xorl %esi,%ebp
+ xorl (%esp),%ebx
+ andl %edx,%ebp
+ xorl 20(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,32(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 36(%esp),%eax
+ addl %ebp,%ebx
+ # 40_59 41
+ movl %edx,%ebp
+ xorl 44(%esp),%eax
+ xorl %edi,%ebp
+ xorl 4(%esp),%eax
+ andl %ecx,%ebp
+ xorl 24(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,36(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 40(%esp),%esi
+ addl %ebp,%eax
+ # 40_59 42
+ movl %ecx,%ebp
+ xorl 48(%esp),%esi
+ xorl %edx,%ebp
+ xorl 8(%esp),%esi
+ andl %ebx,%ebp
+ xorl 28(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,40(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 44(%esp),%edi
+ addl %ebp,%esi
+ # 40_59 43
+ movl %ebx,%ebp
+ xorl 52(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 12(%esp),%edi
+ andl %eax,%ebp
+ xorl 32(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,44(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 48(%esp),%edx
+ addl %ebp,%edi
+ # 40_59 44
+ movl %eax,%ebp
+ xorl 56(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 16(%esp),%edx
+ andl %esi,%ebp
+ xorl 36(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,48(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 52(%esp),%ecx
+ addl %ebp,%edx
+ # 40_59 45
+ movl %esi,%ebp
+ xorl 60(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 20(%esp),%ecx
+ andl %edi,%ebp
+ xorl 40(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,52(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 56(%esp),%ebx
+ addl %ebp,%ecx
+ # 40_59 46
+ movl %edi,%ebp
+ xorl (%esp),%ebx
+ xorl %esi,%ebp
+ xorl 24(%esp),%ebx
+ andl %edx,%ebp
+ xorl 44(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,56(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 60(%esp),%eax
+ addl %ebp,%ebx
+ # 40_59 47
+ movl %edx,%ebp
+ xorl 4(%esp),%eax
+ xorl %edi,%ebp
+ xorl 28(%esp),%eax
+ andl %ecx,%ebp
+ xorl 48(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,60(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl (%esp),%esi
+ addl %ebp,%eax
+ # 40_59 48
+ movl %ecx,%ebp
+ xorl 8(%esp),%esi
+ xorl %edx,%ebp
+ xorl 32(%esp),%esi
+ andl %ebx,%ebp
+ xorl 52(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 4(%esp),%edi
+ addl %ebp,%esi
+ # 40_59 49
+ movl %ebx,%ebp
+ xorl 12(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 36(%esp),%edi
+ andl %eax,%ebp
+ xorl 56(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,4(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 8(%esp),%edx
+ addl %ebp,%edi
+ # 40_59 50
+ movl %eax,%ebp
+ xorl 16(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 40(%esp),%edx
+ andl %esi,%ebp
+ xorl 60(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,8(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 12(%esp),%ecx
+ addl %ebp,%edx
+ # 40_59 51
+ movl %esi,%ebp
+ xorl 20(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 44(%esp),%ecx
+ andl %edi,%ebp
+ xorl (%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,12(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 16(%esp),%ebx
+ addl %ebp,%ecx
+ # 40_59 52
+ movl %edi,%ebp
+ xorl 24(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 48(%esp),%ebx
+ andl %edx,%ebp
+ xorl 4(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,16(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 20(%esp),%eax
+ addl %ebp,%ebx
+ # 40_59 53
+ movl %edx,%ebp
+ xorl 28(%esp),%eax
+ xorl %edi,%ebp
+ xorl 52(%esp),%eax
+ andl %ecx,%ebp
+ xorl 8(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,20(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 24(%esp),%esi
+ addl %ebp,%eax
+ # 40_59 54
+ movl %ecx,%ebp
+ xorl 32(%esp),%esi
+ xorl %edx,%ebp
+ xorl 56(%esp),%esi
+ andl %ebx,%ebp
+ xorl 12(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ rorl $2,%ebx
+ movl %eax,%edi
+ roll $5,%edi
+ movl %esi,24(%esp)
+ leal 2400959708(%esi,%ebp,1),%esi
+ movl %ecx,%ebp
+ addl %edi,%esi
+ andl %edx,%ebp
+ movl 28(%esp),%edi
+ addl %ebp,%esi
+ # 40_59 55
+ movl %ebx,%ebp
+ xorl 36(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 60(%esp),%edi
+ andl %eax,%ebp
+ xorl 16(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ rorl $2,%eax
+ movl %esi,%edx
+ roll $5,%edx
+ movl %edi,28(%esp)
+ leal 2400959708(%edi,%ebp,1),%edi
+ movl %ebx,%ebp
+ addl %edx,%edi
+ andl %ecx,%ebp
+ movl 32(%esp),%edx
+ addl %ebp,%edi
+ # 40_59 56
+ movl %eax,%ebp
+ xorl 40(%esp),%edx
+ xorl %ebx,%ebp
+ xorl (%esp),%edx
+ andl %esi,%ebp
+ xorl 20(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ rorl $2,%esi
+ movl %edi,%ecx
+ roll $5,%ecx
+ movl %edx,32(%esp)
+ leal 2400959708(%edx,%ebp,1),%edx
+ movl %eax,%ebp
+ addl %ecx,%edx
+ andl %ebx,%ebp
+ movl 36(%esp),%ecx
+ addl %ebp,%edx
+ # 40_59 57
+ movl %esi,%ebp
+ xorl 44(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 4(%esp),%ecx
+ andl %edi,%ebp
+ xorl 24(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ rorl $2,%edi
+ movl %edx,%ebx
+ roll $5,%ebx
+ movl %ecx,36(%esp)
+ leal 2400959708(%ecx,%ebp,1),%ecx
+ movl %esi,%ebp
+ addl %ebx,%ecx
+ andl %eax,%ebp
+ movl 40(%esp),%ebx
+ addl %ebp,%ecx
+ # 40_59 58
+ movl %edi,%ebp
+ xorl 48(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 8(%esp),%ebx
+ andl %edx,%ebp
+ xorl 28(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ rorl $2,%edx
+ movl %ecx,%eax
+ roll $5,%eax
+ movl %ebx,40(%esp)
+ leal 2400959708(%ebx,%ebp,1),%ebx
+ movl %edi,%ebp
+ addl %eax,%ebx
+ andl %esi,%ebp
+ movl 44(%esp),%eax
+ addl %ebp,%ebx
+ # 40_59 59
+ movl %edx,%ebp
+ xorl 52(%esp),%eax
+ xorl %edi,%ebp
+ xorl 12(%esp),%eax
+ andl %ecx,%ebp
+ xorl 32(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ rorl $2,%ecx
+ movl %ebx,%esi
+ roll $5,%esi
+ movl %eax,44(%esp)
+ leal 2400959708(%eax,%ebp,1),%eax
+ movl %edx,%ebp
+ addl %esi,%eax
+ andl %edi,%ebp
+ movl 48(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 60
+ movl %ebx,%ebp
+ xorl 56(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 36(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,48(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 52(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 61
+ movl %eax,%ebp
+ xorl 60(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,52(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 56(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 62
+ movl %esi,%ebp
+ xorl (%esp),%edx
+ xorl %eax,%ebp
+ xorl 24(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,56(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 60(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 63
+ movl %edi,%ebp
+ xorl 4(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 48(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,60(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl (%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 64
+ movl %edx,%ebp
+ xorl 8(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 32(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 4(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 65
+ movl %ecx,%ebp
+ xorl 12(%esp),%eax
+ xorl %edx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edi,%ebp
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,4(%esp)
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 8(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 66
+ movl %ebx,%ebp
+ xorl 16(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%esi
+ xorl %edx,%ebp
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,8(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 12(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 67
+ movl %eax,%ebp
+ xorl 20(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edi
+ xorl %ecx,%ebp
+ xorl (%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,12(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 16(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 68
+ movl %esi,%ebp
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,16(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 20(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 69
+ movl %edi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,20(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl 24(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 70
+ movl %edx,%ebp
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,24(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 28(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 71
+ movl %ecx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ movl %eax,28(%esp)
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 32(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 72
+ movl %ebx,%ebp
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ movl %esi,32(%esp)
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 36(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 73
+ movl %eax,%ebp
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ movl %edi,36(%esp)
+ leal 3395469782(%edi,%edx,1),%edi
+ movl 40(%esp),%edx
+ addl %ebp,%edi
+ # 20_39 74
+ movl %esi,%ebp
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ebp,%ecx
+ rorl $2,%esi
+ movl %edi,%ebp
+ roll $5,%ebp
+ movl %edx,40(%esp)
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl 44(%esp),%ecx
+ addl %ebp,%edx
+ # 20_39 75
+ movl %edi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebp,%ebx
+ rorl $2,%edi
+ movl %edx,%ebp
+ roll $5,%ebp
+ movl %ecx,44(%esp)
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl 48(%esp),%ebx
+ addl %ebp,%ecx
+ # 20_39 76
+ movl %edx,%ebp
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %ebp,%eax
+ rorl $2,%edx
+ movl %ecx,%ebp
+ roll $5,%ebp
+ movl %ebx,48(%esp)
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl 52(%esp),%eax
+ addl %ebp,%ebx
+ # 20_39 77
+ movl %ecx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %ebp,%esi
+ rorl $2,%ecx
+ movl %ebx,%ebp
+ roll $5,%ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl 56(%esp),%esi
+ addl %ebp,%eax
+ # 20_39 78
+ movl %ebx,%ebp
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %ebp,%edi
+ rorl $2,%ebx
+ movl %eax,%ebp
+ roll $5,%ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl 60(%esp),%edi
+ addl %ebp,%esi
+ # 20_39 79
+ movl %eax,%ebp
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %ebp,%edx
+ rorl $2,%eax
+ movl %esi,%ebp
+ roll $5,%ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ addl %ebp,%edi
+ movl 96(%esp),%ebp
+ movl 100(%esp),%edx
+ addl (%ebp),%edi
+ addl 4(%ebp),%esi
+ addl 8(%ebp),%eax
+ addl 12(%ebp),%ebx
+ addl 16(%ebp),%ecx
+ movl %edi,(%ebp)
+ addl $64,%edx
+ movl %esi,4(%ebp)
+ cmpl 104(%esp),%edx
+ movl %eax,8(%ebp)
+ movl %ecx,%edi
+ movl %ebx,12(%ebp)
+ movl %edx,%esi
+ movl %ecx,16(%ebp)
+ jb L000loop
+ addl $76,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+.byte 102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
+.byte 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
+.byte 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/sha1-ssse3-x86_64.s b/lib/accelerated/x86/macosx/sha1-ssse3-x86_64.s
new file mode 100644
index 0000000000..88bf435f81
--- /dev/null
+++ b/lib/accelerated/x86/macosx/sha1-ssse3-x86_64.s
@@ -0,0 +1,2515 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+.globl _sha1_block_data_order
+
+.p2align 4
+_sha1_block_data_order:
+ movl __gnutls_x86_cpuid_s+0(%rip),%r9d
+ movl __gnutls_x86_cpuid_s+4(%rip),%r8d
+ movl __gnutls_x86_cpuid_s+8(%rip),%r10d
+ testl $512,%r8d
+ jz L$ialu
+ jmp _ssse3_shortcut
+
+.p2align 4
+L$ialu:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ movq %rsp,%r11
+ movq %rdi,%r8
+ subq $72,%rsp
+ movq %rsi,%r9
+ andq $-64,%rsp
+ movq %rdx,%r10
+ movq %r11,64(%rsp)
+L$prologue:
+
+ movl 0(%r8),%esi
+ movl 4(%r8),%edi
+ movl 8(%r8),%r11d
+ movl 12(%r8),%r12d
+ movl 16(%r8),%r13d
+ jmp L$loop
+
+.p2align 4
+L$loop:
+ movl 0(%r9),%edx
+ bswapl %edx
+ movl %edx,0(%rsp)
+ movl %r11d,%eax
+ movl 4(%r9),%ebp
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r13,1),%r13d
+ andl %edi,%eax
+ movl %ebp,4(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 8(%r9),%edx
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r12,1),%r12d
+ andl %esi,%eax
+ movl %edx,8(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 12(%r9),%ebp
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %ebp,12(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 16(%r9),%edx
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %edx,16(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 20(%r9),%ebp
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %ebp,20(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl %r11d,%eax
+ movl 24(%r9),%edx
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r13,1),%r13d
+ andl %edi,%eax
+ movl %edx,24(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 28(%r9),%ebp
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r12,1),%r12d
+ andl %esi,%eax
+ movl %ebp,28(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 32(%r9),%edx
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %edx,32(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 36(%r9),%ebp
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %ebp,36(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 40(%r9),%edx
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %edx,40(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl %r11d,%eax
+ movl 44(%r9),%ebp
+ movl %esi,%ecx
+ xorl %r12d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r13,1),%r13d
+ andl %edi,%eax
+ movl %ebp,44(%rsp)
+ addl %ecx,%r13d
+ xorl %r12d,%eax
+ roll $30,%edi
+ addl %eax,%r13d
+ movl %edi,%eax
+ movl 48(%r9),%edx
+ movl %r13d,%ecx
+ xorl %r11d,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%r12,1),%r12d
+ andl %esi,%eax
+ movl %edx,48(%rsp)
+ addl %ecx,%r12d
+ xorl %r11d,%eax
+ roll $30,%esi
+ addl %eax,%r12d
+ movl %esi,%eax
+ movl 52(%r9),%ebp
+ movl %r12d,%ecx
+ xorl %edi,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%r11,1),%r11d
+ andl %r13d,%eax
+ movl %ebp,52(%rsp)
+ addl %ecx,%r11d
+ xorl %edi,%eax
+ roll $30,%r13d
+ addl %eax,%r11d
+ movl %r13d,%eax
+ movl 56(%r9),%edx
+ movl %r11d,%ecx
+ xorl %esi,%eax
+ bswapl %edx
+ roll $5,%ecx
+ leal 1518500249(%rbp,%rdi,1),%edi
+ andl %r12d,%eax
+ movl %edx,56(%rsp)
+ addl %ecx,%edi
+ xorl %esi,%eax
+ roll $30,%r12d
+ addl %eax,%edi
+ movl %r12d,%eax
+ movl 60(%r9),%ebp
+ movl %edi,%ecx
+ xorl %r13d,%eax
+ bswapl %ebp
+ roll $5,%ecx
+ leal 1518500249(%rdx,%rsi,1),%esi
+ andl %r11d,%eax
+ movl %ebp,60(%rsp)
+ addl %ecx,%esi
+ xorl %r13d,%eax
+ roll $30,%r11d
+ addl %eax,%esi
+ movl 0(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ xorl 32(%rsp),%edx
+ andl %edi,%eax
+ leal 1518500249(%rbp,%r13,1),%r13d
+ xorl 52(%rsp),%edx
+ xorl %r12d,%eax
+ roll $1,%edx
+ addl %ecx,%r13d
+ roll $30,%edi
+ movl %edx,0(%rsp)
+ addl %eax,%r13d
+ movl 4(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ xorl 36(%rsp),%ebp
+ andl %esi,%eax
+ leal 1518500249(%rdx,%r12,1),%r12d
+ xorl 56(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $1,%ebp
+ addl %ecx,%r12d
+ roll $30,%esi
+ movl %ebp,4(%rsp)
+ addl %eax,%r12d
+ movl 8(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ xorl 40(%rsp),%edx
+ andl %r13d,%eax
+ leal 1518500249(%rbp,%r11,1),%r11d
+ xorl 60(%rsp),%edx
+ xorl %edi,%eax
+ roll $1,%edx
+ addl %ecx,%r11d
+ roll $30,%r13d
+ movl %edx,8(%rsp)
+ addl %eax,%r11d
+ movl 12(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ xorl 44(%rsp),%ebp
+ andl %r12d,%eax
+ leal 1518500249(%rdx,%rdi,1),%edi
+ xorl 0(%rsp),%ebp
+ xorl %esi,%eax
+ roll $1,%ebp
+ addl %ecx,%edi
+ roll $30,%r12d
+ movl %ebp,12(%rsp)
+ addl %eax,%edi
+ movl 16(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ xorl 48(%rsp),%edx
+ andl %r11d,%eax
+ leal 1518500249(%rbp,%rsi,1),%esi
+ xorl 4(%rsp),%edx
+ xorl %r13d,%eax
+ roll $1,%edx
+ addl %ecx,%esi
+ roll $30,%r11d
+ movl %edx,16(%rsp)
+ addl %eax,%esi
+ movl 20(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r13,1),%r13d
+ xorl 52(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 8(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r12,1),%r12d
+ xorl 56(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 12(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r11,1),%r11d
+ xorl 60(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 16(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rdi,1),%edi
+ xorl 0(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 20(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rsi,1),%esi
+ xorl 4(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 24(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,36(%rsp)
+ movl 40(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 48(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r13,1),%r13d
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 28(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,40(%rsp)
+ movl 44(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r12,1),%r12d
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 32(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,44(%rsp)
+ movl 48(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 56(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r11,1),%r11d
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 36(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,48(%rsp)
+ movl 52(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rdi,1),%edi
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 40(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,52(%rsp)
+ movl 56(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rsi,1),%esi
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 44(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,56(%rsp)
+ movl 60(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r13,1),%r13d
+ xorl 28(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 48(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,60(%rsp)
+ movl 0(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r12,1),%r12d
+ xorl 32(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 52(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,0(%rsp)
+ movl 4(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r11,1),%r11d
+ xorl 36(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 56(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,4(%rsp)
+ movl 8(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rdi,1),%edi
+ xorl 40(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 60(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,8(%rsp)
+ movl 12(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rsi,1),%esi
+ xorl 44(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 0(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,12(%rsp)
+ movl 16(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r13,1),%r13d
+ xorl 48(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 4(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,16(%rsp)
+ movl 20(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%r12,1),%r12d
+ xorl 52(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 8(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%r11,1),%r11d
+ xorl 56(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 12(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rdx,%rdi,1),%edi
+ xorl 60(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 16(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal 1859775393(%rbp,%rsi,1),%esi
+ xorl 0(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 20(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 44(%rsp),%ebp
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r12d,%ebx
+ leal -1894007588(%rdx,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 24(%rsp),%ebp
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %ebp,36(%rsp)
+ addl %ecx,%r13d
+ movl 40(%rsp),%edx
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 48(%rsp),%edx
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r11d,%ebx
+ leal -1894007588(%rbp,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 28(%rsp),%edx
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%edx
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %edx,40(%rsp)
+ addl %ecx,%r12d
+ movl 44(%rsp),%ebp
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 52(%rsp),%ebp
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %edi,%ebx
+ leal -1894007588(%rdx,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 32(%rsp),%ebp
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%ebp
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %ebp,44(%rsp)
+ addl %ecx,%r11d
+ movl 48(%rsp),%edx
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 56(%rsp),%edx
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 16(%rsp),%edx
+ xorl %esi,%ebx
+ leal -1894007588(%rbp,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 36(%rsp),%edx
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%edx
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %edx,48(%rsp)
+ addl %ecx,%edi
+ movl 52(%rsp),%ebp
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 60(%rsp),%ebp
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %r13d,%ebx
+ leal -1894007588(%rdx,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 40(%rsp),%ebp
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%ebp
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %ebp,52(%rsp)
+ addl %ecx,%esi
+ movl 56(%rsp),%edx
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 0(%rsp),%edx
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r12d,%ebx
+ leal -1894007588(%rbp,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 44(%rsp),%edx
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%edx
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %edx,56(%rsp)
+ addl %ecx,%r13d
+ movl 60(%rsp),%ebp
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 4(%rsp),%ebp
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %r11d,%ebx
+ leal -1894007588(%rdx,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 48(%rsp),%ebp
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %ebp,60(%rsp)
+ addl %ecx,%r12d
+ movl 0(%rsp),%edx
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 8(%rsp),%edx
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 32(%rsp),%edx
+ xorl %edi,%ebx
+ leal -1894007588(%rbp,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 52(%rsp),%edx
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%edx
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %edx,0(%rsp)
+ addl %ecx,%r11d
+ movl 4(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 12(%rsp),%ebp
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %esi,%ebx
+ leal -1894007588(%rdx,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 56(%rsp),%ebp
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%ebp
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %ebp,4(%rsp)
+ addl %ecx,%edi
+ movl 8(%rsp),%edx
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 16(%rsp),%edx
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 40(%rsp),%edx
+ xorl %r13d,%ebx
+ leal -1894007588(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 60(%rsp),%edx
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%edx
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %edx,8(%rsp)
+ addl %ecx,%esi
+ movl 12(%rsp),%ebp
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 20(%rsp),%ebp
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r12d,%ebx
+ leal -1894007588(%rdx,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 0(%rsp),%ebp
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %ebp,12(%rsp)
+ addl %ecx,%r13d
+ movl 16(%rsp),%edx
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 24(%rsp),%edx
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 48(%rsp),%edx
+ xorl %r11d,%ebx
+ leal -1894007588(%rbp,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 4(%rsp),%edx
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%edx
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %edx,16(%rsp)
+ addl %ecx,%r12d
+ movl 20(%rsp),%ebp
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 28(%rsp),%ebp
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %edi,%ebx
+ leal -1894007588(%rdx,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 8(%rsp),%ebp
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%ebp
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %ebp,20(%rsp)
+ addl %ecx,%r11d
+ movl 24(%rsp),%edx
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 32(%rsp),%edx
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 56(%rsp),%edx
+ xorl %esi,%ebx
+ leal -1894007588(%rbp,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 12(%rsp),%edx
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%edx
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %edx,24(%rsp)
+ addl %ecx,%edi
+ movl 28(%rsp),%ebp
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 36(%rsp),%ebp
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %r13d,%ebx
+ leal -1894007588(%rdx,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 16(%rsp),%ebp
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%ebp
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %ebp,28(%rsp)
+ addl %ecx,%esi
+ movl 32(%rsp),%edx
+ movl %r11d,%eax
+ movl %r11d,%ebx
+ xorl 40(%rsp),%edx
+ andl %r12d,%eax
+ movl %esi,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r12d,%ebx
+ leal -1894007588(%rbp,%r13,1),%r13d
+ roll $5,%ecx
+ xorl 20(%rsp),%edx
+ addl %eax,%r13d
+ andl %edi,%ebx
+ roll $1,%edx
+ addl %ebx,%r13d
+ roll $30,%edi
+ movl %edx,32(%rsp)
+ addl %ecx,%r13d
+ movl 36(%rsp),%ebp
+ movl %edi,%eax
+ movl %edi,%ebx
+ xorl 44(%rsp),%ebp
+ andl %r11d,%eax
+ movl %r13d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r11d,%ebx
+ leal -1894007588(%rdx,%r12,1),%r12d
+ roll $5,%ecx
+ xorl 24(%rsp),%ebp
+ addl %eax,%r12d
+ andl %esi,%ebx
+ roll $1,%ebp
+ addl %ebx,%r12d
+ roll $30,%esi
+ movl %ebp,36(%rsp)
+ addl %ecx,%r12d
+ movl 40(%rsp),%edx
+ movl %esi,%eax
+ movl %esi,%ebx
+ xorl 48(%rsp),%edx
+ andl %edi,%eax
+ movl %r12d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %edi,%ebx
+ leal -1894007588(%rbp,%r11,1),%r11d
+ roll $5,%ecx
+ xorl 28(%rsp),%edx
+ addl %eax,%r11d
+ andl %r13d,%ebx
+ roll $1,%edx
+ addl %ebx,%r11d
+ roll $30,%r13d
+ movl %edx,40(%rsp)
+ addl %ecx,%r11d
+ movl 44(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r13d,%ebx
+ xorl 52(%rsp),%ebp
+ andl %esi,%eax
+ movl %r11d,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %esi,%ebx
+ leal -1894007588(%rdx,%rdi,1),%edi
+ roll $5,%ecx
+ xorl 32(%rsp),%ebp
+ addl %eax,%edi
+ andl %r12d,%ebx
+ roll $1,%ebp
+ addl %ebx,%edi
+ roll $30,%r12d
+ movl %ebp,44(%rsp)
+ addl %ecx,%edi
+ movl 48(%rsp),%edx
+ movl %r12d,%eax
+ movl %r12d,%ebx
+ xorl 56(%rsp),%edx
+ andl %r13d,%eax
+ movl %edi,%ecx
+ xorl 16(%rsp),%edx
+ xorl %r13d,%ebx
+ leal -1894007588(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl 36(%rsp),%edx
+ addl %eax,%esi
+ andl %r11d,%ebx
+ roll $1,%edx
+ addl %ebx,%esi
+ roll $30,%r11d
+ movl %edx,48(%rsp)
+ addl %ecx,%esi
+ movl 52(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r13,1),%r13d
+ xorl 20(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 40(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,52(%rsp)
+ movl 56(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 0(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r12,1),%r12d
+ xorl 24(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 44(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,56(%rsp)
+ movl 60(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r11,1),%r11d
+ xorl 28(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 48(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,60(%rsp)
+ movl 0(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 8(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rdi,1),%edi
+ xorl 32(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 52(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,0(%rsp)
+ movl 4(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 12(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rsi,1),%esi
+ xorl 36(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 56(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,4(%rsp)
+ movl 8(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 16(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r13,1),%r13d
+ xorl 40(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 60(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,8(%rsp)
+ movl 12(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 20(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r12,1),%r12d
+ xorl 44(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 0(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl %ebp,12(%rsp)
+ movl 16(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 24(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r11,1),%r11d
+ xorl 48(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 4(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl %edx,16(%rsp)
+ movl 20(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 28(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rdi,1),%edi
+ xorl 52(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 8(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %ebp,20(%rsp)
+ movl 24(%rsp),%edx
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 32(%rsp),%edx
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rsi,1),%esi
+ xorl 56(%rsp),%edx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 12(%rsp),%edx
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%edx
+ movl %edx,24(%rsp)
+ movl 28(%rsp),%ebp
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 36(%rsp),%ebp
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r13,1),%r13d
+ xorl 60(%rsp),%ebp
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 16(%rsp),%ebp
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%ebp
+ movl %ebp,28(%rsp)
+ movl 32(%rsp),%edx
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 40(%rsp),%edx
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r12,1),%r12d
+ xorl 0(%rsp),%edx
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 20(%rsp),%edx
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%edx
+ movl %edx,32(%rsp)
+ movl 36(%rsp),%ebp
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 44(%rsp),%ebp
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r11,1),%r11d
+ xorl 4(%rsp),%ebp
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 24(%rsp),%ebp
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%ebp
+ movl %ebp,36(%rsp)
+ movl 40(%rsp),%edx
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 48(%rsp),%edx
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%rdi,1),%edi
+ xorl 8(%rsp),%edx
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 28(%rsp),%edx
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%edx
+ movl %edx,40(%rsp)
+ movl 44(%rsp),%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl 52(%rsp),%ebp
+ xorl %r11d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rsi,1),%esi
+ xorl 12(%rsp),%ebp
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ xorl 32(%rsp),%ebp
+ roll $30,%r11d
+ addl %eax,%esi
+ roll $1,%ebp
+ movl %ebp,44(%rsp)
+ movl 48(%rsp),%edx
+ movl %r11d,%eax
+ movl %esi,%ecx
+ xorl 56(%rsp),%edx
+ xorl %edi,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r13,1),%r13d
+ xorl 16(%rsp),%edx
+ xorl %r12d,%eax
+ addl %ecx,%r13d
+ xorl 36(%rsp),%edx
+ roll $30,%edi
+ addl %eax,%r13d
+ roll $1,%edx
+ movl %edx,48(%rsp)
+ movl 52(%rsp),%ebp
+ movl %edi,%eax
+ movl %r13d,%ecx
+ xorl 60(%rsp),%ebp
+ xorl %esi,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%r12,1),%r12d
+ xorl 20(%rsp),%ebp
+ xorl %r11d,%eax
+ addl %ecx,%r12d
+ xorl 40(%rsp),%ebp
+ roll $30,%esi
+ addl %eax,%r12d
+ roll $1,%ebp
+ movl 56(%rsp),%edx
+ movl %esi,%eax
+ movl %r12d,%ecx
+ xorl 0(%rsp),%edx
+ xorl %r13d,%eax
+ roll $5,%ecx
+ leal -899497514(%rbp,%r11,1),%r11d
+ xorl 24(%rsp),%edx
+ xorl %edi,%eax
+ addl %ecx,%r11d
+ xorl 44(%rsp),%edx
+ roll $30,%r13d
+ addl %eax,%r11d
+ roll $1,%edx
+ movl 60(%rsp),%ebp
+ movl %r13d,%eax
+ movl %r11d,%ecx
+ xorl 4(%rsp),%ebp
+ xorl %r12d,%eax
+ roll $5,%ecx
+ leal -899497514(%rdx,%rdi,1),%edi
+ xorl 28(%rsp),%ebp
+ xorl %esi,%eax
+ addl %ecx,%edi
+ xorl 48(%rsp),%ebp
+ roll $30,%r12d
+ addl %eax,%edi
+ roll $1,%ebp
+ movl %r12d,%eax
+ movl %edi,%ecx
+ xorl %r11d,%eax
+ leal -899497514(%rbp,%rsi,1),%esi
+ roll $5,%ecx
+ xorl %r13d,%eax
+ addl %ecx,%esi
+ roll $30,%r11d
+ addl %eax,%esi
+ addl 0(%r8),%esi
+ addl 4(%r8),%edi
+ addl 8(%r8),%r11d
+ addl 12(%r8),%r12d
+ addl 16(%r8),%r13d
+ movl %esi,0(%r8)
+ movl %edi,4(%r8)
+ movl %r11d,8(%r8)
+ movl %r12d,12(%r8)
+ movl %r13d,16(%r8)
+
+ subq $1,%r10
+ leaq 64(%r9),%r9
+ jnz L$loop
+
+ movq 64(%rsp),%rsi
+ movq (%rsi),%r13
+ movq 8(%rsi),%r12
+ movq 16(%rsi),%rbp
+ movq 24(%rsi),%rbx
+ leaq 32(%rsi),%rsp
+L$epilogue:
+ .byte 0xf3,0xc3
+
+
+.p2align 4
+sha1_block_data_order_ssse3:
+_ssse3_shortcut:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ leaq -64(%rsp),%rsp
+ movq %rdi,%r8
+ movq %rsi,%r9
+ movq %rdx,%r10
+
+ shlq $6,%r10
+ addq %r9,%r10
+ leaq K_XX_XX+64(%rip),%r11
+
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movl %ebx,%esi
+ movl 16(%r8),%ebp
+ movl %ecx,%edi
+ xorl %edx,%edi
+ andl %edi,%esi
+
+ movdqa 64(%r11),%xmm6
+ movdqa -64(%r11),%xmm9
+ movdqu 0(%r9),%xmm0
+ movdqu 16(%r9),%xmm1
+ movdqu 32(%r9),%xmm2
+ movdqu 48(%r9),%xmm3
+.byte 102,15,56,0,198
+ addq $64,%r9
+.byte 102,15,56,0,206
+.byte 102,15,56,0,214
+.byte 102,15,56,0,222
+ paddd %xmm9,%xmm0
+ paddd %xmm9,%xmm1
+ paddd %xmm9,%xmm2
+ movdqa %xmm0,0(%rsp)
+ psubd %xmm9,%xmm0
+ movdqa %xmm1,16(%rsp)
+ psubd %xmm9,%xmm1
+ movdqa %xmm2,32(%rsp)
+ psubd %xmm9,%xmm2
+ jmp L$oop_ssse3
+.p2align 4
+L$oop_ssse3:
+ movdqa %xmm1,%xmm4
+ rorl $2,%ebx
+ xorl %edx,%esi
+ movdqa %xmm3,%xmm8
+.byte 102,15,58,15,224,8
+ movl %eax,%edi
+ addl 0(%rsp),%ebp
+ paddd %xmm3,%xmm9
+ xorl %ecx,%ebx
+ roll $5,%eax
+ psrldq $4,%xmm8
+ addl %esi,%ebp
+ andl %ebx,%edi
+ pxor %xmm0,%xmm4
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ pxor %xmm2,%xmm8
+ rorl $7,%eax
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 4(%rsp),%edx
+ pxor %xmm8,%xmm4
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa %xmm9,48(%rsp)
+ addl %edi,%edx
+ andl %eax,%esi
+ movdqa %xmm4,%xmm10
+ movdqa %xmm4,%xmm8
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ xorl %ebx,%esi
+ pslldq $12,%xmm10
+ paddd %xmm4,%xmm4
+ movl %edx,%edi
+ addl 8(%rsp),%ecx
+ xorl %eax,%ebp
+ roll $5,%edx
+ psrld $31,%xmm8
+ addl %esi,%ecx
+ andl %ebp,%edi
+ movdqa %xmm10,%xmm9
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ psrld $30,%xmm10
+ por %xmm8,%xmm4
+ rorl $7,%edx
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 12(%rsp),%ebx
+ pslld $2,%xmm9
+ pxor %xmm10,%xmm4
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa -64(%r11),%xmm10
+ addl %edi,%ebx
+ andl %edx,%esi
+ pxor %xmm9,%xmm4
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ movdqa %xmm2,%xmm5
+ rorl $7,%ecx
+ xorl %ebp,%esi
+ movdqa %xmm4,%xmm9
+.byte 102,15,58,15,233,8
+ movl %ebx,%edi
+ addl 16(%rsp),%eax
+ paddd %xmm4,%xmm10
+ xorl %edx,%ecx
+ roll $5,%ebx
+ psrldq $4,%xmm9
+ addl %esi,%eax
+ andl %ecx,%edi
+ pxor %xmm1,%xmm5
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ pxor %xmm3,%xmm9
+ rorl $7,%ebx
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 20(%rsp),%ebp
+ pxor %xmm9,%xmm5
+ xorl %ecx,%ebx
+ roll $5,%eax
+ movdqa %xmm10,0(%rsp)
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movdqa %xmm5,%xmm8
+ movdqa %xmm5,%xmm9
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ rorl $7,%eax
+ xorl %ecx,%esi
+ pslldq $12,%xmm8
+ paddd %xmm5,%xmm5
+ movl %ebp,%edi
+ addl 24(%rsp),%edx
+ xorl %ebx,%eax
+ roll $5,%ebp
+ psrld $31,%xmm9
+ addl %esi,%edx
+ andl %eax,%edi
+ movdqa %xmm8,%xmm10
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ psrld $30,%xmm8
+ por %xmm9,%xmm5
+ rorl $7,%ebp
+ xorl %ebx,%edi
+ movl %edx,%esi
+ addl 28(%rsp),%ecx
+ pslld $2,%xmm10
+ pxor %xmm8,%xmm5
+ xorl %eax,%ebp
+ roll $5,%edx
+ movdqa -32(%r11),%xmm8
+ addl %edi,%ecx
+ andl %ebp,%esi
+ pxor %xmm10,%xmm5
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ movdqa %xmm3,%xmm6
+ rorl $7,%edx
+ xorl %eax,%esi
+ movdqa %xmm5,%xmm10
+.byte 102,15,58,15,242,8
+ movl %ecx,%edi
+ addl 32(%rsp),%ebx
+ paddd %xmm5,%xmm8
+ xorl %ebp,%edx
+ roll $5,%ecx
+ psrldq $4,%xmm10
+ addl %esi,%ebx
+ andl %edx,%edi
+ pxor %xmm2,%xmm6
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ pxor %xmm4,%xmm10
+ rorl $7,%ecx
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ addl 36(%rsp),%eax
+ pxor %xmm10,%xmm6
+ xorl %edx,%ecx
+ roll $5,%ebx
+ movdqa %xmm8,16(%rsp)
+ addl %edi,%eax
+ andl %ecx,%esi
+ movdqa %xmm6,%xmm9
+ movdqa %xmm6,%xmm10
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ rorl $7,%ebx
+ xorl %edx,%esi
+ pslldq $12,%xmm9
+ paddd %xmm6,%xmm6
+ movl %eax,%edi
+ addl 40(%rsp),%ebp
+ xorl %ecx,%ebx
+ roll $5,%eax
+ psrld $31,%xmm10
+ addl %esi,%ebp
+ andl %ebx,%edi
+ movdqa %xmm9,%xmm8
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ psrld $30,%xmm9
+ por %xmm10,%xmm6
+ rorl $7,%eax
+ xorl %ecx,%edi
+ movl %ebp,%esi
+ addl 44(%rsp),%edx
+ pslld $2,%xmm8
+ pxor %xmm9,%xmm6
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa -32(%r11),%xmm9
+ addl %edi,%edx
+ andl %eax,%esi
+ pxor %xmm8,%xmm6
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ movdqa %xmm4,%xmm7
+ rorl $7,%ebp
+ xorl %ebx,%esi
+ movdqa %xmm6,%xmm8
+.byte 102,15,58,15,251,8
+ movl %edx,%edi
+ addl 48(%rsp),%ecx
+ paddd %xmm6,%xmm9
+ xorl %eax,%ebp
+ roll $5,%edx
+ psrldq $4,%xmm8
+ addl %esi,%ecx
+ andl %ebp,%edi
+ pxor %xmm3,%xmm7
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ pxor %xmm5,%xmm8
+ rorl $7,%edx
+ xorl %eax,%edi
+ movl %ecx,%esi
+ addl 52(%rsp),%ebx
+ pxor %xmm8,%xmm7
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa %xmm9,32(%rsp)
+ addl %edi,%ebx
+ andl %edx,%esi
+ movdqa %xmm7,%xmm10
+ movdqa %xmm7,%xmm8
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ rorl $7,%ecx
+ xorl %ebp,%esi
+ pslldq $12,%xmm10
+ paddd %xmm7,%xmm7
+ movl %ebx,%edi
+ addl 56(%rsp),%eax
+ xorl %edx,%ecx
+ roll $5,%ebx
+ psrld $31,%xmm8
+ addl %esi,%eax
+ andl %ecx,%edi
+ movdqa %xmm10,%xmm9
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ psrld $30,%xmm10
+ por %xmm8,%xmm7
+ rorl $7,%ebx
+ xorl %edx,%edi
+ movl %eax,%esi
+ addl 60(%rsp),%ebp
+ pslld $2,%xmm9
+ pxor %xmm10,%xmm7
+ xorl %ecx,%ebx
+ roll $5,%eax
+ movdqa -32(%r11),%xmm10
+ addl %edi,%ebp
+ andl %ebx,%esi
+ pxor %xmm9,%xmm7
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ movdqa %xmm7,%xmm9
+ rorl $7,%eax
+ pxor %xmm4,%xmm0
+.byte 102,68,15,58,15,206,8
+ xorl %ecx,%esi
+ movl %ebp,%edi
+ addl 0(%rsp),%edx
+ pxor %xmm1,%xmm0
+ xorl %ebx,%eax
+ roll $5,%ebp
+ movdqa %xmm10,%xmm8
+ paddd %xmm7,%xmm10
+ addl %esi,%edx
+ andl %eax,%edi
+ pxor %xmm9,%xmm0
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ rorl $7,%ebp
+ xorl %ebx,%edi
+ movdqa %xmm0,%xmm9
+ movdqa %xmm10,48(%rsp)
+ movl %edx,%esi
+ addl 4(%rsp),%ecx
+ xorl %eax,%ebp
+ roll $5,%edx
+ pslld $2,%xmm0
+ addl %edi,%ecx
+ andl %ebp,%esi
+ psrld $30,%xmm9
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ rorl $7,%edx
+ xorl %eax,%esi
+ movl %ecx,%edi
+ addl 8(%rsp),%ebx
+ por %xmm9,%xmm0
+ xorl %ebp,%edx
+ roll $5,%ecx
+ movdqa %xmm0,%xmm10
+ addl %esi,%ebx
+ andl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 12(%rsp),%eax
+ xorl %ebp,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 16(%rsp),%ebp
+ pxor %xmm5,%xmm1
+.byte 102,68,15,58,15,215,8
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm2,%xmm1
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm8,%xmm9
+ paddd %xmm0,%xmm8
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pxor %xmm10,%xmm1
+ addl 20(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm1,%xmm10
+ movdqa %xmm8,0(%rsp)
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ pslld $2,%xmm1
+ addl 24(%rsp),%ecx
+ xorl %eax,%esi
+ psrld $30,%xmm10
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ por %xmm10,%xmm1
+ addl 28(%rsp),%ebx
+ xorl %ebp,%edi
+ movdqa %xmm1,%xmm8
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 32(%rsp),%eax
+ pxor %xmm6,%xmm2
+.byte 102,68,15,58,15,192,8
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ pxor %xmm3,%xmm2
+ addl %esi,%eax
+ xorl %edx,%edi
+ movdqa 0(%r11),%xmm10
+ paddd %xmm1,%xmm9
+ rorl $7,%ecx
+ addl %ebx,%eax
+ pxor %xmm8,%xmm2
+ addl 36(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ movdqa %xmm2,%xmm8
+ movdqa %xmm9,16(%rsp)
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pslld $2,%xmm2
+ addl 40(%rsp),%edx
+ xorl %ebx,%esi
+ psrld $30,%xmm8
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ por %xmm8,%xmm2
+ addl 44(%rsp),%ecx
+ xorl %eax,%edi
+ movdqa %xmm2,%xmm9
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 48(%rsp),%ebx
+ pxor %xmm7,%xmm3
+.byte 102,68,15,58,15,201,8
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ pxor %xmm4,%xmm3
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ movdqa %xmm10,%xmm8
+ paddd %xmm2,%xmm10
+ rorl $7,%edx
+ addl %ecx,%ebx
+ pxor %xmm9,%xmm3
+ addl 52(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ movdqa %xmm3,%xmm9
+ movdqa %xmm10,32(%rsp)
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ pslld $2,%xmm3
+ addl 56(%rsp),%ebp
+ xorl %ecx,%esi
+ psrld $30,%xmm9
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ por %xmm9,%xmm3
+ addl 60(%rsp),%edx
+ xorl %ebx,%edi
+ movdqa %xmm3,%xmm10
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 0(%rsp),%ecx
+ pxor %xmm0,%xmm4
+.byte 102,68,15,58,15,210,8
+ xorl %eax,%esi
+ movl %edx,%edi
+ roll $5,%edx
+ pxor %xmm5,%xmm4
+ addl %esi,%ecx
+ xorl %eax,%edi
+ movdqa %xmm8,%xmm9
+ paddd %xmm3,%xmm8
+ rorl $7,%ebp
+ addl %edx,%ecx
+ pxor %xmm10,%xmm4
+ addl 4(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ movdqa %xmm4,%xmm10
+ movdqa %xmm8,48(%rsp)
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ pslld $2,%xmm4
+ addl 8(%rsp),%eax
+ xorl %edx,%esi
+ psrld $30,%xmm10
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ por %xmm10,%xmm4
+ addl 12(%rsp),%ebp
+ xorl %ecx,%edi
+ movdqa %xmm4,%xmm8
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 16(%rsp),%edx
+ pxor %xmm1,%xmm5
+.byte 102,68,15,58,15,195,8
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ pxor %xmm6,%xmm5
+ addl %esi,%edx
+ xorl %ebx,%edi
+ movdqa %xmm9,%xmm10
+ paddd %xmm4,%xmm9
+ rorl $7,%eax
+ addl %ebp,%edx
+ pxor %xmm8,%xmm5
+ addl 20(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ movdqa %xmm5,%xmm8
+ movdqa %xmm9,0(%rsp)
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ pslld $2,%xmm5
+ addl 24(%rsp),%ebx
+ xorl %ebp,%esi
+ psrld $30,%xmm8
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ por %xmm8,%xmm5
+ addl 28(%rsp),%eax
+ movdqa %xmm5,%xmm9
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %ecx,%esi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 32(%rsp),%ebp
+ pxor %xmm2,%xmm6
+.byte 102,68,15,58,15,204,8
+ andl %ecx,%esi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ pxor %xmm7,%xmm6
+ movl %eax,%edi
+ xorl %ecx,%esi
+ movdqa %xmm10,%xmm8
+ paddd %xmm5,%xmm10
+ roll $5,%eax
+ addl %esi,%ebp
+ pxor %xmm9,%xmm6
+ xorl %ebx,%edi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 36(%rsp),%edx
+ movdqa %xmm6,%xmm9
+ movdqa %xmm10,16(%rsp)
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movl %ebp,%esi
+ pslld $2,%xmm6
+ xorl %ebx,%edi
+ roll $5,%ebp
+ psrld $30,%xmm9
+ addl %edi,%edx
+ xorl %eax,%esi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 40(%rsp),%ecx
+ andl %eax,%esi
+ por %xmm9,%xmm6
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movdqa %xmm6,%xmm10
+ movl %edx,%edi
+ xorl %eax,%esi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %ebp,%edi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 44(%rsp),%ebx
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movl %ecx,%esi
+ xorl %ebp,%edi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %edx,%esi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 48(%rsp),%eax
+ pxor %xmm3,%xmm7
+.byte 102,68,15,58,15,213,8
+ andl %edx,%esi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ pxor %xmm0,%xmm7
+ movl %ebx,%edi
+ xorl %edx,%esi
+ movdqa 32(%r11),%xmm9
+ paddd %xmm6,%xmm8
+ roll $5,%ebx
+ addl %esi,%eax
+ pxor %xmm10,%xmm7
+ xorl %ecx,%edi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 52(%rsp),%ebp
+ movdqa %xmm7,%xmm10
+ movdqa %xmm8,32(%rsp)
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movl %eax,%esi
+ pslld $2,%xmm7
+ xorl %ecx,%edi
+ roll $5,%eax
+ psrld $30,%xmm10
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 56(%rsp),%edx
+ andl %ebx,%esi
+ por %xmm10,%xmm7
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movdqa %xmm7,%xmm8
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %eax,%edi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 60(%rsp),%ecx
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movl %edx,%esi
+ xorl %eax,%edi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 0(%rsp),%ebx
+ pxor %xmm4,%xmm0
+.byte 102,68,15,58,15,198,8
+ andl %ebp,%esi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ pxor %xmm1,%xmm0
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ movdqa %xmm9,%xmm10
+ paddd %xmm7,%xmm9
+ roll $5,%ecx
+ addl %esi,%ebx
+ pxor %xmm8,%xmm0
+ xorl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 4(%rsp),%eax
+ movdqa %xmm0,%xmm8
+ movdqa %xmm9,48(%rsp)
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movl %ebx,%esi
+ pslld $2,%xmm0
+ xorl %edx,%edi
+ roll $5,%ebx
+ psrld $30,%xmm8
+ addl %edi,%eax
+ xorl %ecx,%esi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 8(%rsp),%ebp
+ andl %ecx,%esi
+ por %xmm8,%xmm0
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movdqa %xmm0,%xmm9
+ movl %eax,%edi
+ xorl %ecx,%esi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ebx,%edi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 12(%rsp),%edx
+ andl %ebx,%edi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ movl %ebp,%esi
+ xorl %ebx,%edi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %eax,%esi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 16(%rsp),%ecx
+ pxor %xmm5,%xmm1
+.byte 102,68,15,58,15,207,8
+ andl %eax,%esi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ pxor %xmm2,%xmm1
+ movl %edx,%edi
+ xorl %eax,%esi
+ movdqa %xmm10,%xmm8
+ paddd %xmm0,%xmm10
+ roll $5,%edx
+ addl %esi,%ecx
+ pxor %xmm9,%xmm1
+ xorl %ebp,%edi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 20(%rsp),%ebx
+ movdqa %xmm1,%xmm9
+ movdqa %xmm10,0(%rsp)
+ andl %ebp,%edi
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movl %ecx,%esi
+ pslld $2,%xmm1
+ xorl %ebp,%edi
+ roll $5,%ecx
+ psrld $30,%xmm9
+ addl %edi,%ebx
+ xorl %edx,%esi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 24(%rsp),%eax
+ andl %edx,%esi
+ por %xmm9,%xmm1
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movdqa %xmm1,%xmm10
+ movl %ebx,%edi
+ xorl %edx,%esi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %ecx,%edi
+ xorl %edx,%ecx
+ addl %ebx,%eax
+ addl 28(%rsp),%ebp
+ andl %ecx,%edi
+ xorl %edx,%ecx
+ rorl $7,%ebx
+ movl %eax,%esi
+ xorl %ecx,%edi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ebx,%esi
+ xorl %ecx,%ebx
+ addl %eax,%ebp
+ addl 32(%rsp),%edx
+ pxor %xmm6,%xmm2
+.byte 102,68,15,58,15,208,8
+ andl %ebx,%esi
+ xorl %ecx,%ebx
+ rorl $7,%eax
+ pxor %xmm3,%xmm2
+ movl %ebp,%edi
+ xorl %ebx,%esi
+ movdqa %xmm8,%xmm9
+ paddd %xmm1,%xmm8
+ roll $5,%ebp
+ addl %esi,%edx
+ pxor %xmm10,%xmm2
+ xorl %eax,%edi
+ xorl %ebx,%eax
+ addl %ebp,%edx
+ addl 36(%rsp),%ecx
+ movdqa %xmm2,%xmm10
+ movdqa %xmm8,16(%rsp)
+ andl %eax,%edi
+ xorl %ebx,%eax
+ rorl $7,%ebp
+ movl %edx,%esi
+ pslld $2,%xmm2
+ xorl %eax,%edi
+ roll $5,%edx
+ psrld $30,%xmm10
+ addl %edi,%ecx
+ xorl %ebp,%esi
+ xorl %eax,%ebp
+ addl %edx,%ecx
+ addl 40(%rsp),%ebx
+ andl %ebp,%esi
+ por %xmm10,%xmm2
+ xorl %eax,%ebp
+ rorl $7,%edx
+ movdqa %xmm2,%xmm8
+ movl %ecx,%edi
+ xorl %ebp,%esi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %edx,%edi
+ xorl %ebp,%edx
+ addl %ecx,%ebx
+ addl 44(%rsp),%eax
+ andl %edx,%edi
+ xorl %ebp,%edx
+ rorl $7,%ecx
+ movl %ebx,%esi
+ xorl %edx,%edi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ addl %ebx,%eax
+ addl 48(%rsp),%ebp
+ pxor %xmm7,%xmm3
+.byte 102,68,15,58,15,193,8
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ pxor %xmm4,%xmm3
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ movdqa %xmm9,%xmm10
+ paddd %xmm2,%xmm9
+ rorl $7,%ebx
+ addl %eax,%ebp
+ pxor %xmm8,%xmm3
+ addl 52(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ movdqa %xmm3,%xmm8
+ movdqa %xmm9,32(%rsp)
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ pslld $2,%xmm3
+ addl 56(%rsp),%ecx
+ xorl %eax,%esi
+ psrld $30,%xmm8
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ por %xmm8,%xmm3
+ addl 60(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 0(%rsp),%eax
+ paddd %xmm3,%xmm10
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ movdqa %xmm10,48(%rsp)
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 4(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 8(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 12(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ cmpq %r10,%r9
+ je L$done_ssse3
+ movdqa 64(%r11),%xmm6
+ movdqa -64(%r11),%xmm9
+ movdqu 0(%r9),%xmm0
+ movdqu 16(%r9),%xmm1
+ movdqu 32(%r9),%xmm2
+ movdqu 48(%r9),%xmm3
+.byte 102,15,56,0,198
+ addq $64,%r9
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+.byte 102,15,56,0,206
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ paddd %xmm9,%xmm0
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ movdqa %xmm0,0(%rsp)
+ xorl %edx,%edi
+ movl %ebx,%esi
+ psubd %xmm9,%xmm0
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+.byte 102,15,56,0,214
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ paddd %xmm9,%xmm1
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ movdqa %xmm1,16(%rsp)
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ psubd %xmm9,%xmm1
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+.byte 102,15,56,0,222
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ paddd %xmm9,%xmm2
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ movdqa %xmm2,32(%rsp)
+ xorl %eax,%edi
+ movl %edx,%esi
+ psubd %xmm9,%xmm2
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 0(%r8),%eax
+ addl 4(%r8),%esi
+ addl 8(%r8),%ecx
+ addl 12(%r8),%edx
+ movl %eax,0(%r8)
+ addl 16(%r8),%ebp
+ movl %esi,4(%r8)
+ movl %esi,%ebx
+ movl %ecx,8(%r8)
+ movl %ecx,%edi
+ movl %edx,12(%r8)
+ xorl %edx,%edi
+ movl %ebp,16(%r8)
+ andl %edi,%esi
+ jmp L$oop_ssse3
+
+.p2align 4
+L$done_ssse3:
+ addl 16(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 20(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ xorl %edx,%esi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 24(%rsp),%ebp
+ xorl %ecx,%esi
+ movl %eax,%edi
+ roll $5,%eax
+ addl %esi,%ebp
+ xorl %ecx,%edi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 28(%rsp),%edx
+ xorl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ addl %edi,%edx
+ xorl %ebx,%esi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 32(%rsp),%ecx
+ xorl %eax,%esi
+ movl %edx,%edi
+ roll $5,%edx
+ addl %esi,%ecx
+ xorl %eax,%edi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 36(%rsp),%ebx
+ xorl %ebp,%edi
+ movl %ecx,%esi
+ roll $5,%ecx
+ addl %edi,%ebx
+ xorl %ebp,%esi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 40(%rsp),%eax
+ xorl %edx,%esi
+ movl %ebx,%edi
+ roll $5,%ebx
+ addl %esi,%eax
+ xorl %edx,%edi
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 44(%rsp),%ebp
+ xorl %ecx,%edi
+ movl %eax,%esi
+ roll $5,%eax
+ addl %edi,%ebp
+ xorl %ecx,%esi
+ rorl $7,%ebx
+ addl %eax,%ebp
+ addl 48(%rsp),%edx
+ xorl %ebx,%esi
+ movl %ebp,%edi
+ roll $5,%ebp
+ addl %esi,%edx
+ xorl %ebx,%edi
+ rorl $7,%eax
+ addl %ebp,%edx
+ addl 52(%rsp),%ecx
+ xorl %eax,%edi
+ movl %edx,%esi
+ roll $5,%edx
+ addl %edi,%ecx
+ xorl %eax,%esi
+ rorl $7,%ebp
+ addl %edx,%ecx
+ addl 56(%rsp),%ebx
+ xorl %ebp,%esi
+ movl %ecx,%edi
+ roll $5,%ecx
+ addl %esi,%ebx
+ xorl %ebp,%edi
+ rorl $7,%edx
+ addl %ecx,%ebx
+ addl 60(%rsp),%eax
+ xorl %edx,%edi
+ movl %ebx,%esi
+ roll $5,%ebx
+ addl %edi,%eax
+ rorl $7,%ecx
+ addl %ebx,%eax
+ addl 0(%r8),%eax
+ addl 4(%r8),%esi
+ addl 8(%r8),%ecx
+ movl %eax,0(%r8)
+ addl 12(%r8),%edx
+ movl %esi,4(%r8)
+ addl 16(%r8),%ebp
+ movl %ecx,8(%r8)
+ movl %edx,12(%r8)
+ movl %ebp,16(%r8)
+ leaq 64(%rsp),%rsi
+ movq 0(%rsi),%r12
+ movq 8(%rsi),%rbp
+ movq 16(%rsi),%rbx
+ leaq 24(%rsi),%rsp
+L$epilogue_ssse3:
+ .byte 0xf3,0xc3
+
+.p2align 6
+K_XX_XX:
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.p2align 6
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/sha256-avx-x86_64.s b/lib/accelerated/x86/macosx/sha256-avx-x86_64.s
new file mode 100644
index 0000000000..bd53138cf6
--- /dev/null
+++ b/lib/accelerated/x86/macosx/sha256-avx-x86_64.s
@@ -0,0 +1,2614 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+
+.globl _sha256_multi_block
+
+.p2align 5
+_sha256_multi_block:
+ movq %rsp,%rax
+ pushq %rbx
+ pushq %rbp
+ subq $288,%rsp
+ andq $-256,%rsp
+ movq %rax,272(%rsp)
+ leaq K256+128(%rip),%rbp
+ leaq 256(%rsp),%rbx
+ leaq 128(%rdi),%rdi
+
+L$oop_grande:
+ movl %edx,280(%rsp)
+ xorl %edx,%edx
+ movq 0(%rsi),%r8
+ movl 8(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,0(%rbx)
+ cmovleq %rbp,%r8
+ movq 16(%rsi),%r9
+ movl 24(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,4(%rbx)
+ cmovleq %rbp,%r9
+ movq 32(%rsi),%r10
+ movl 40(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,8(%rbx)
+ cmovleq %rbp,%r10
+ movq 48(%rsi),%r11
+ movl 56(%rsi),%ecx
+ cmpl %edx,%ecx
+ cmovgl %ecx,%edx
+ testl %ecx,%ecx
+ movl %ecx,12(%rbx)
+ cmovleq %rbp,%r11
+ testl %edx,%edx
+ jz L$done
+
+ movdqu 0-128(%rdi),%xmm8
+ leaq 128(%rsp),%rax
+ movdqu 32-128(%rdi),%xmm9
+ movdqu 64-128(%rdi),%xmm10
+ movdqu 96-128(%rdi),%xmm11
+ movdqu 128-128(%rdi),%xmm12
+ movdqu 160-128(%rdi),%xmm13
+ movdqu 192-128(%rdi),%xmm14
+ movdqu 224-128(%rdi),%xmm15
+ movdqu L$pbswap(%rip),%xmm6
+ jmp L$oop
+
+.p2align 5
+L$oop:
+ movdqa %xmm10,%xmm4
+ pxor %xmm9,%xmm4
+ movd 0(%r8),%xmm5
+ movd 0(%r9),%xmm0
+ movd 0(%r10),%xmm1
+ movd 0(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,0-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movd 4(%r8),%xmm5
+ movd 4(%r9),%xmm0
+ movd 4(%r10),%xmm1
+ movd 4(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,16-128(%rax)
+ paddd %xmm14,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm5,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm14
+ paddd %xmm7,%xmm14
+ movd 8(%r8),%xmm5
+ movd 8(%r9),%xmm0
+ movd 8(%r10),%xmm1
+ movd 8(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,32-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movd 12(%r8),%xmm5
+ movd 12(%r9),%xmm0
+ movd 12(%r10),%xmm1
+ movd 12(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,48-128(%rax)
+ paddd %xmm12,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm5,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm12
+ paddd %xmm7,%xmm12
+ movd 16(%r8),%xmm5
+ movd 16(%r9),%xmm0
+ movd 16(%r10),%xmm1
+ movd 16(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,64-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movd 20(%r8),%xmm5
+ movd 20(%r9),%xmm0
+ movd 20(%r10),%xmm1
+ movd 20(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,80-128(%rax)
+ paddd %xmm10,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm5,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm10
+ paddd %xmm7,%xmm10
+ movd 24(%r8),%xmm5
+ movd 24(%r9),%xmm0
+ movd 24(%r10),%xmm1
+ movd 24(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,96-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movd 28(%r8),%xmm5
+ movd 28(%r9),%xmm0
+ movd 28(%r10),%xmm1
+ movd 28(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,112-128(%rax)
+ paddd %xmm8,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm5,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movd 32(%r8),%xmm5
+ movd 32(%r9),%xmm0
+ movd 32(%r10),%xmm1
+ movd 32(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,128-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movd 36(%r8),%xmm5
+ movd 36(%r9),%xmm0
+ movd 36(%r10),%xmm1
+ movd 36(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,144-128(%rax)
+ paddd %xmm14,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm5,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm14
+ paddd %xmm7,%xmm14
+ movd 40(%r8),%xmm5
+ movd 40(%r9),%xmm0
+ movd 40(%r10),%xmm1
+ movd 40(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,160-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movd 44(%r8),%xmm5
+ movd 44(%r9),%xmm0
+ movd 44(%r10),%xmm1
+ movd 44(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,176-128(%rax)
+ paddd %xmm12,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm5,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm12
+ paddd %xmm7,%xmm12
+ movd 48(%r8),%xmm5
+ movd 48(%r9),%xmm0
+ movd 48(%r10),%xmm1
+ movd 48(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,192-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movd 52(%r8),%xmm5
+ movd 52(%r9),%xmm0
+ movd 52(%r10),%xmm1
+ movd 52(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,208-128(%rax)
+ paddd %xmm10,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm5,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm10
+ paddd %xmm7,%xmm10
+ movd 56(%r8),%xmm5
+ movd 56(%r9),%xmm0
+ movd 56(%r10),%xmm1
+ movd 56(%r11),%xmm2
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,224-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movd 60(%r8),%xmm5
+ leaq 64(%r8),%r8
+ movd 60(%r9),%xmm0
+ leaq 64(%r9),%r9
+ movd 60(%r10),%xmm1
+ leaq 64(%r10),%r10
+ movd 60(%r11),%xmm2
+ leaq 64(%r11),%r11
+ punpckldq %xmm1,%xmm5
+ punpckldq %xmm2,%xmm0
+ punpckldq %xmm0,%xmm5
+.byte 102,15,56,0,238
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,240-128(%rax)
+ paddd %xmm8,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm5,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movdqu 0-128(%rax),%xmm5
+ movl $3,%ecx
+ jmp L$oop_16_xx
+.p2align 5
+L$oop_16_xx:
+ movdqa 16-128(%rax),%xmm6
+ paddd 144-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 224-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,0-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movdqa 32-128(%rax),%xmm5
+ paddd 160-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 240-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,16-128(%rax)
+ paddd %xmm14,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm6,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm14
+ paddd %xmm7,%xmm14
+ movdqa 48-128(%rax),%xmm6
+ paddd 176-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 0-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,32-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movdqa 64-128(%rax),%xmm5
+ paddd 192-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 16-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,48-128(%rax)
+ paddd %xmm12,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm6,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm12
+ paddd %xmm7,%xmm12
+ movdqa 80-128(%rax),%xmm6
+ paddd 208-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 32-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,64-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movdqa 96-128(%rax),%xmm5
+ paddd 224-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 48-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,80-128(%rax)
+ paddd %xmm10,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm6,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm10
+ paddd %xmm7,%xmm10
+ movdqa 112-128(%rax),%xmm6
+ paddd 240-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 64-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,96-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movdqa 128-128(%rax),%xmm5
+ paddd 0-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 80-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,112-128(%rax)
+ paddd %xmm8,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm6,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ movdqa 144-128(%rax),%xmm6
+ paddd 16-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 96-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm12,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm12,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,128-128(%rax)
+ paddd %xmm15,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -128(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm12,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm14,%xmm0
+ pand %xmm13,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm8,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm9,%xmm3
+ movdqa %xmm8,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm8,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm9,%xmm15
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm15
+ paddd %xmm5,%xmm11
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm15
+ paddd %xmm7,%xmm15
+ movdqa 160-128(%rax),%xmm5
+ paddd 32-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 112-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm11,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm11,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,144-128(%rax)
+ paddd %xmm14,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm11,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm13,%xmm0
+ pand %xmm12,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm15,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm8,%xmm4
+ movdqa %xmm15,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm15,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm8,%xmm14
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm14
+ paddd %xmm6,%xmm10
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm14
+ paddd %xmm7,%xmm14
+ movdqa 176-128(%rax),%xmm6
+ paddd 48-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 128-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm10,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm10,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,160-128(%rax)
+ paddd %xmm13,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm10,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm12,%xmm0
+ pand %xmm11,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm14,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm15,%xmm3
+ movdqa %xmm14,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm14,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm15,%xmm13
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm13
+ paddd %xmm5,%xmm9
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm13
+ paddd %xmm7,%xmm13
+ movdqa 192-128(%rax),%xmm5
+ paddd 64-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 144-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm9,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm9,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,176-128(%rax)
+ paddd %xmm12,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd -32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm9,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm11,%xmm0
+ pand %xmm10,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm13,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm14,%xmm4
+ movdqa %xmm13,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm13,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm14,%xmm12
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm12
+ paddd %xmm6,%xmm8
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm12
+ paddd %xmm7,%xmm12
+ movdqa 208-128(%rax),%xmm6
+ paddd 80-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 160-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm8,%xmm7
+ movdqa %xmm8,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm8,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,192-128(%rax)
+ paddd %xmm11,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 0(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm8,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm8,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm10,%xmm0
+ pand %xmm9,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm12,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm12,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm13,%xmm3
+ movdqa %xmm12,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm12,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm13,%xmm11
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm11
+ paddd %xmm5,%xmm15
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm11
+ paddd %xmm7,%xmm11
+ movdqa 224-128(%rax),%xmm5
+ paddd 96-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 176-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm15,%xmm7
+ movdqa %xmm15,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm15,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,208-128(%rax)
+ paddd %xmm10,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 32(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm15,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm15,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm9,%xmm0
+ pand %xmm8,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm11,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm11,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm12,%xmm4
+ movdqa %xmm11,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm11,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm12,%xmm10
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm10
+ paddd %xmm6,%xmm14
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm10
+ paddd %xmm7,%xmm10
+ movdqa 240-128(%rax),%xmm6
+ paddd 112-128(%rax),%xmm5
+
+ movdqa %xmm6,%xmm7
+ movdqa %xmm6,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm6,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 192-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm3,%xmm1
+
+ psrld $17,%xmm3
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ psrld $19-17,%xmm3
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm3,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm5
+ movdqa %xmm14,%xmm7
+ movdqa %xmm14,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm14,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm5,224-128(%rax)
+ paddd %xmm9,%xmm5
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 64(%rbp),%xmm5
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm14,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm14,%xmm3
+ pslld $26-21,%xmm2
+ pandn %xmm8,%xmm0
+ pand %xmm15,%xmm3
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm10,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm10,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm5
+ pxor %xmm3,%xmm0
+ movdqa %xmm11,%xmm3
+ movdqa %xmm10,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm10,%xmm3
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm5
+ pslld $19-10,%xmm2
+ pand %xmm3,%xmm4
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm11,%xmm9
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm4,%xmm9
+ paddd %xmm5,%xmm13
+ pxor %xmm2,%xmm7
+
+ paddd %xmm5,%xmm9
+ paddd %xmm7,%xmm9
+ movdqa 0-128(%rax),%xmm5
+ paddd 128-128(%rax),%xmm6
+
+ movdqa %xmm5,%xmm7
+ movdqa %xmm5,%xmm1
+ psrld $3,%xmm7
+ movdqa %xmm5,%xmm2
+
+ psrld $7,%xmm1
+ movdqa 208-128(%rax),%xmm0
+ pslld $14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $18-7,%xmm1
+ movdqa %xmm0,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $25-14,%xmm2
+ pxor %xmm1,%xmm7
+ psrld $10,%xmm0
+ movdqa %xmm4,%xmm1
+
+ psrld $17,%xmm4
+ pxor %xmm2,%xmm7
+ pslld $13,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ psrld $19-17,%xmm4
+ pxor %xmm1,%xmm0
+ pslld $15-13,%xmm1
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm0
+ paddd %xmm0,%xmm6
+ movdqa %xmm13,%xmm7
+ movdqa %xmm13,%xmm2
+ psrld $6,%xmm7
+ movdqa %xmm13,%xmm1
+ pslld $7,%xmm2
+ movdqa %xmm6,240-128(%rax)
+ paddd %xmm8,%xmm6
+
+ psrld $11,%xmm1
+ pxor %xmm2,%xmm7
+ pslld $21-7,%xmm2
+ paddd 96(%rbp),%xmm6
+ pxor %xmm1,%xmm7
+
+ psrld $25-11,%xmm1
+ movdqa %xmm13,%xmm0
+ pxor %xmm2,%xmm7
+ movdqa %xmm13,%xmm4
+ pslld $26-21,%xmm2
+ pandn %xmm15,%xmm0
+ pand %xmm14,%xmm4
+ pxor %xmm1,%xmm7
+
+ movdqa %xmm9,%xmm1
+ pxor %xmm2,%xmm7
+ movdqa %xmm9,%xmm2
+ psrld $2,%xmm1
+ paddd %xmm7,%xmm6
+ pxor %xmm4,%xmm0
+ movdqa %xmm10,%xmm4
+ movdqa %xmm9,%xmm7
+ pslld $10,%xmm2
+ pxor %xmm9,%xmm4
+
+ psrld $13,%xmm7
+ pxor %xmm2,%xmm1
+ paddd %xmm0,%xmm6
+ pslld $19-10,%xmm2
+ pand %xmm4,%xmm3
+ pxor %xmm7,%xmm1
+
+ psrld $22-13,%xmm7
+ pxor %xmm2,%xmm1
+ movdqa %xmm10,%xmm8
+ pslld $30-19,%xmm2
+ pxor %xmm1,%xmm7
+ pxor %xmm3,%xmm8
+ paddd %xmm6,%xmm12
+ pxor %xmm2,%xmm7
+
+ paddd %xmm6,%xmm8
+ paddd %xmm7,%xmm8
+ leaq 256(%rbp),%rbp
+ decl %ecx
+ jnz L$oop_16_xx
+
+ movl $1,%ecx
+ leaq K256+128(%rip),%rbp
+
+ movdqa (%rbx),%xmm7
+ cmpl 0(%rbx),%ecx
+ pxor %xmm0,%xmm0
+ cmovgeq %rbp,%r8
+ cmpl 4(%rbx),%ecx
+ movdqa %xmm7,%xmm6
+ cmovgeq %rbp,%r9
+ cmpl 8(%rbx),%ecx
+ pcmpgtd %xmm0,%xmm6
+ cmovgeq %rbp,%r10
+ cmpl 12(%rbx),%ecx
+ paddd %xmm6,%xmm7
+ cmovgeq %rbp,%r11
+
+ movdqu 0-128(%rdi),%xmm0
+ pand %xmm6,%xmm8
+ movdqu 32-128(%rdi),%xmm1
+ pand %xmm6,%xmm9
+ movdqu 64-128(%rdi),%xmm2
+ pand %xmm6,%xmm10
+ movdqu 96-128(%rdi),%xmm5
+ pand %xmm6,%xmm11
+ paddd %xmm0,%xmm8
+ movdqu 128-128(%rdi),%xmm0
+ pand %xmm6,%xmm12
+ paddd %xmm1,%xmm9
+ movdqu 160-128(%rdi),%xmm1
+ pand %xmm6,%xmm13
+ paddd %xmm2,%xmm10
+ movdqu 192-128(%rdi),%xmm2
+ pand %xmm6,%xmm14
+ paddd %xmm5,%xmm11
+ movdqu 224-128(%rdi),%xmm5
+ pand %xmm6,%xmm15
+ paddd %xmm0,%xmm12
+ paddd %xmm1,%xmm13
+ movdqu %xmm8,0-128(%rdi)
+ paddd %xmm2,%xmm14
+ movdqu %xmm9,32-128(%rdi)
+ paddd %xmm5,%xmm15
+ movdqu %xmm10,64-128(%rdi)
+ movdqu %xmm11,96-128(%rdi)
+ movdqu %xmm12,128-128(%rdi)
+ movdqu %xmm13,160-128(%rdi)
+ movdqu %xmm14,192-128(%rdi)
+ movdqu %xmm15,224-128(%rdi)
+
+ movdqa %xmm7,(%rbx)
+ movdqa L$pbswap(%rip),%xmm6
+ decl %edx
+ jnz L$oop
+
+ movl 280(%rsp),%edx
+ leaq 16(%rdi),%rdi
+ leaq 64(%rsi),%rsi
+ decl %edx
+ jnz L$oop_grande
+
+L$done:
+ movq 272(%rsp),%rax
+ movq -16(%rax),%rbp
+ movq -8(%rax),%rbx
+ leaq (%rax),%rsp
+ .byte 0xf3,0xc3
+
+.p2align 8
+K256:
+.long 1116352408,1116352408,1116352408,1116352408
+.long 1116352408,1116352408,1116352408,1116352408
+.long 1899447441,1899447441,1899447441,1899447441
+.long 1899447441,1899447441,1899447441,1899447441
+.long 3049323471,3049323471,3049323471,3049323471
+.long 3049323471,3049323471,3049323471,3049323471
+.long 3921009573,3921009573,3921009573,3921009573
+.long 3921009573,3921009573,3921009573,3921009573
+.long 961987163,961987163,961987163,961987163
+.long 961987163,961987163,961987163,961987163
+.long 1508970993,1508970993,1508970993,1508970993
+.long 1508970993,1508970993,1508970993,1508970993
+.long 2453635748,2453635748,2453635748,2453635748
+.long 2453635748,2453635748,2453635748,2453635748
+.long 2870763221,2870763221,2870763221,2870763221
+.long 2870763221,2870763221,2870763221,2870763221
+.long 3624381080,3624381080,3624381080,3624381080
+.long 3624381080,3624381080,3624381080,3624381080
+.long 310598401,310598401,310598401,310598401
+.long 310598401,310598401,310598401,310598401
+.long 607225278,607225278,607225278,607225278
+.long 607225278,607225278,607225278,607225278
+.long 1426881987,1426881987,1426881987,1426881987
+.long 1426881987,1426881987,1426881987,1426881987
+.long 1925078388,1925078388,1925078388,1925078388
+.long 1925078388,1925078388,1925078388,1925078388
+.long 2162078206,2162078206,2162078206,2162078206
+.long 2162078206,2162078206,2162078206,2162078206
+.long 2614888103,2614888103,2614888103,2614888103
+.long 2614888103,2614888103,2614888103,2614888103
+.long 3248222580,3248222580,3248222580,3248222580
+.long 3248222580,3248222580,3248222580,3248222580
+.long 3835390401,3835390401,3835390401,3835390401
+.long 3835390401,3835390401,3835390401,3835390401
+.long 4022224774,4022224774,4022224774,4022224774
+.long 4022224774,4022224774,4022224774,4022224774
+.long 264347078,264347078,264347078,264347078
+.long 264347078,264347078,264347078,264347078
+.long 604807628,604807628,604807628,604807628
+.long 604807628,604807628,604807628,604807628
+.long 770255983,770255983,770255983,770255983
+.long 770255983,770255983,770255983,770255983
+.long 1249150122,1249150122,1249150122,1249150122
+.long 1249150122,1249150122,1249150122,1249150122
+.long 1555081692,1555081692,1555081692,1555081692
+.long 1555081692,1555081692,1555081692,1555081692
+.long 1996064986,1996064986,1996064986,1996064986
+.long 1996064986,1996064986,1996064986,1996064986
+.long 2554220882,2554220882,2554220882,2554220882
+.long 2554220882,2554220882,2554220882,2554220882
+.long 2821834349,2821834349,2821834349,2821834349
+.long 2821834349,2821834349,2821834349,2821834349
+.long 2952996808,2952996808,2952996808,2952996808
+.long 2952996808,2952996808,2952996808,2952996808
+.long 3210313671,3210313671,3210313671,3210313671
+.long 3210313671,3210313671,3210313671,3210313671
+.long 3336571891,3336571891,3336571891,3336571891
+.long 3336571891,3336571891,3336571891,3336571891
+.long 3584528711,3584528711,3584528711,3584528711
+.long 3584528711,3584528711,3584528711,3584528711
+.long 113926993,113926993,113926993,113926993
+.long 113926993,113926993,113926993,113926993
+.long 338241895,338241895,338241895,338241895
+.long 338241895,338241895,338241895,338241895
+.long 666307205,666307205,666307205,666307205
+.long 666307205,666307205,666307205,666307205
+.long 773529912,773529912,773529912,773529912
+.long 773529912,773529912,773529912,773529912
+.long 1294757372,1294757372,1294757372,1294757372
+.long 1294757372,1294757372,1294757372,1294757372
+.long 1396182291,1396182291,1396182291,1396182291
+.long 1396182291,1396182291,1396182291,1396182291
+.long 1695183700,1695183700,1695183700,1695183700
+.long 1695183700,1695183700,1695183700,1695183700
+.long 1986661051,1986661051,1986661051,1986661051
+.long 1986661051,1986661051,1986661051,1986661051
+.long 2177026350,2177026350,2177026350,2177026350
+.long 2177026350,2177026350,2177026350,2177026350
+.long 2456956037,2456956037,2456956037,2456956037
+.long 2456956037,2456956037,2456956037,2456956037
+.long 2730485921,2730485921,2730485921,2730485921
+.long 2730485921,2730485921,2730485921,2730485921
+.long 2820302411,2820302411,2820302411,2820302411
+.long 2820302411,2820302411,2820302411,2820302411
+.long 3259730800,3259730800,3259730800,3259730800
+.long 3259730800,3259730800,3259730800,3259730800
+.long 3345764771,3345764771,3345764771,3345764771
+.long 3345764771,3345764771,3345764771,3345764771
+.long 3516065817,3516065817,3516065817,3516065817
+.long 3516065817,3516065817,3516065817,3516065817
+.long 3600352804,3600352804,3600352804,3600352804
+.long 3600352804,3600352804,3600352804,3600352804
+.long 4094571909,4094571909,4094571909,4094571909
+.long 4094571909,4094571909,4094571909,4094571909
+.long 275423344,275423344,275423344,275423344
+.long 275423344,275423344,275423344,275423344
+.long 430227734,430227734,430227734,430227734
+.long 430227734,430227734,430227734,430227734
+.long 506948616,506948616,506948616,506948616
+.long 506948616,506948616,506948616,506948616
+.long 659060556,659060556,659060556,659060556
+.long 659060556,659060556,659060556,659060556
+.long 883997877,883997877,883997877,883997877
+.long 883997877,883997877,883997877,883997877
+.long 958139571,958139571,958139571,958139571
+.long 958139571,958139571,958139571,958139571
+.long 1322822218,1322822218,1322822218,1322822218
+.long 1322822218,1322822218,1322822218,1322822218
+.long 1537002063,1537002063,1537002063,1537002063
+.long 1537002063,1537002063,1537002063,1537002063
+.long 1747873779,1747873779,1747873779,1747873779
+.long 1747873779,1747873779,1747873779,1747873779
+.long 1955562222,1955562222,1955562222,1955562222
+.long 1955562222,1955562222,1955562222,1955562222
+.long 2024104815,2024104815,2024104815,2024104815
+.long 2024104815,2024104815,2024104815,2024104815
+.long 2227730452,2227730452,2227730452,2227730452
+.long 2227730452,2227730452,2227730452,2227730452
+.long 2361852424,2361852424,2361852424,2361852424
+.long 2361852424,2361852424,2361852424,2361852424
+.long 2428436474,2428436474,2428436474,2428436474
+.long 2428436474,2428436474,2428436474,2428436474
+.long 2756734187,2756734187,2756734187,2756734187
+.long 2756734187,2756734187,2756734187,2756734187
+.long 3204031479,3204031479,3204031479,3204031479
+.long 3204031479,3204031479,3204031479,3204031479
+.long 3329325298,3329325298,3329325298,3329325298
+.long 3329325298,3329325298,3329325298,3329325298
+L$pbswap:
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/sha256-ssse3-x86.s b/lib/accelerated/x86/macosx/sha256-ssse3-x86.s
new file mode 100644
index 0000000000..d6cf6cb2a0
--- /dev/null
+++ b/lib/accelerated/x86/macosx/sha256-ssse3-x86.s
@@ -0,0 +1,3405 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha512-586.s"
+.text
+.globl _sha256_block_data_order
+.align 4
+_sha256_block_data_order:
+L_sha256_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call L000pic_point
+L000pic_point:
+ popl %ebp
+ leal L001K256-L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $6,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ movl L__gnutls_x86_cpuid_s$non_lazy_ptr-L001K256(%ebp),%edx
+ movl (%edx),%ecx
+ movl 4(%edx),%ebx
+ testl $1048576,%ecx
+ jnz L002loop
+ andl $1073741824,%ecx
+ andl $268435968,%ebx
+ orl %ebx,%ecx
+ andl $1342177280,%ecx
+ cmpl $1342177280,%ecx
+ je L003loop_shrd
+ subl %edi,%eax
+ cmpl $256,%eax
+ jae L004unrolled
+ jmp L002loop
+.align 4,0x90
+L002loop:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ bswap %eax
+ movl 12(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ bswap %eax
+ movl 28(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %eax
+ movl 44(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ bswap %eax
+ movl 60(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ addl $64,%edi
+ leal -36(%esp),%esp
+ movl %edi,104(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edi
+ movl %ebx,8(%esp)
+ xorl %ecx,%ebx
+ movl %ecx,12(%esp)
+ movl %edi,16(%esp)
+ movl %ebx,(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ movl %edi,32(%esp)
+.align 4,0x90
+L00500_15:
+ movl %edx,%ecx
+ movl 24(%esp),%esi
+ rorl $14,%ecx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl 96(%esp),%ebx
+ rorl $5,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ rorl $9,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ rorl $11,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ rorl $2,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3248222580,%esi
+ jne L00500_15
+ movl 156(%esp),%ecx
+ jmp L00616_63
+.align 4,0x90
+L00616_63:
+ movl %ecx,%ebx
+ movl 104(%esp),%esi
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 160(%esp),%ebx
+ shrl $10,%edi
+ addl 124(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 24(%esp),%esi
+ rorl $14,%ecx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl %ebx,96(%esp)
+ rorl $5,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ rorl $9,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ rorl $11,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ rorl $2,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ movl 156(%esp),%ecx
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3329325298,%esi
+ jne L00616_63
+ movl 356(%esp),%esi
+ movl 8(%esp),%ebx
+ movl 16(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebx
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ movl 32(%esp),%ecx
+ movl 360(%esp),%edi
+ addl 16(%esi),%edx
+ addl 20(%esi),%eax
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %eax,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ leal 356(%esp),%esp
+ subl $256,%ebp
+ cmpl 8(%esp),%edi
+ jb L002loop
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 5,0x90
+L003loop_shrd:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ bswap %eax
+ movl 12(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ bswap %eax
+ movl 28(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %eax
+ movl 44(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ bswap %eax
+ movl 60(%edi),%edx
+ bswap %ebx
+ pushl %eax
+ bswap %ecx
+ pushl %ebx
+ bswap %edx
+ pushl %ecx
+ pushl %edx
+ addl $64,%edi
+ leal -36(%esp),%esp
+ movl %edi,104(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edi
+ movl %ebx,8(%esp)
+ xorl %ecx,%ebx
+ movl %ecx,12(%esp)
+ movl %edi,16(%esp)
+ movl %ebx,(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ movl %edi,32(%esp)
+.align 4,0x90
+L00700_15_shrd:
+ movl %edx,%ecx
+ movl 24(%esp),%esi
+ shrdl $14,%ecx,%ecx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl 96(%esp),%ebx
+ shrdl $5,%ecx,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ shrdl $6,%edx,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ shrdl $9,%ecx,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ shrdl $11,%ecx,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ shrdl $2,%ecx,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3248222580,%esi
+ jne L00700_15_shrd
+ movl 156(%esp),%ecx
+ jmp L00816_63_shrd
+.align 4,0x90
+L00816_63_shrd:
+ movl %ecx,%ebx
+ movl 104(%esp),%esi
+ shrdl $11,%ecx,%ecx
+ movl %esi,%edi
+ shrdl $2,%esi,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ shrdl $7,%ecx,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ shrdl $17,%esi,%esi
+ addl 160(%esp),%ebx
+ shrl $10,%edi
+ addl 124(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 24(%esp),%esi
+ shrdl $14,%ecx,%ecx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %edx,%ecx
+ xorl %edi,%esi
+ movl %ebx,96(%esp)
+ shrdl $5,%ecx,%ecx
+ andl %edx,%esi
+ movl %edx,20(%esp)
+ xorl %ecx,%edx
+ addl 32(%esp),%ebx
+ xorl %edi,%esi
+ shrdl $6,%edx,%edx
+ movl %eax,%ecx
+ addl %esi,%ebx
+ shrdl $9,%ecx,%ecx
+ addl %edx,%ebx
+ movl 8(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,4(%esp)
+ leal -4(%esp),%esp
+ shrdl $11,%ecx,%ecx
+ movl (%ebp),%esi
+ xorl %eax,%ecx
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ shrdl $2,%ecx,%ecx
+ addl %esi,%ebx
+ movl %eax,(%esp)
+ addl %ebx,%edx
+ andl 4(%esp),%eax
+ addl %ecx,%ebx
+ xorl %edi,%eax
+ movl 156(%esp),%ecx
+ addl $4,%ebp
+ addl %ebx,%eax
+ cmpl $3329325298,%esi
+ jne L00816_63_shrd
+ movl 356(%esp),%esi
+ movl 8(%esp),%ebx
+ movl 16(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebx
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ movl 32(%esp),%ecx
+ movl 360(%esp),%edi
+ addl 16(%esi),%edx
+ addl 20(%esi),%eax
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %eax,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ leal 356(%esp),%esp
+ subl $256,%ebp
+ cmpl 8(%esp),%edi
+ jb L003loop_shrd
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L001K256:
+.long 1116352408,1899447441,3049323471,3921009573,961987163,1508970993,2453635748,2870763221,3624381080,310598401,607225278,1426881987,1925078388,2162078206,2614888103,3248222580,3835390401,4022224774,264347078,604807628,770255983,1249150122,1555081692,1996064986,2554220882,2821834349,2952996808,3210313671,3336571891,3584528711,113926993,338241895,666307205,773529912,1294757372,1396182291,1695183700,1986661051,2177026350,2456956037,2730485921,2820302411,3259730800,3345764771,3516065817,3600352804,4094571909,275423344,430227734,506948616,659060556,883997877,958139571,1322822218,1537002063,1747873779,1955562222,2024104815,2227730452,2361852424,2428436474,2756734187,3204031479,3329325298
+.long 66051,67438087,134810123,202182159
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
+.align 4,0x90
+L004unrolled:
+ leal -96(%esp),%esp
+ movl (%esi),%eax
+ movl 4(%esi),%ebp
+ movl 8(%esi),%ecx
+ movl 12(%esi),%ebx
+ movl %ebp,4(%esp)
+ xorl %ecx,%ebp
+ movl %ecx,8(%esp)
+ movl %ebx,12(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%esi
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %esi,28(%esp)
+ jmp L009grand_loop
+.align 4,0x90
+L009grand_loop:
+ movl (%edi),%ebx
+ movl 4(%edi),%ecx
+ bswap %ebx
+ movl 8(%edi),%esi
+ bswap %ecx
+ movl %ebx,32(%esp)
+ bswap %esi
+ movl %ecx,36(%esp)
+ movl %esi,40(%esp)
+ movl 12(%edi),%ebx
+ movl 16(%edi),%ecx
+ bswap %ebx
+ movl 20(%edi),%esi
+ bswap %ecx
+ movl %ebx,44(%esp)
+ bswap %esi
+ movl %ecx,48(%esp)
+ movl %esi,52(%esp)
+ movl 24(%edi),%ebx
+ movl 28(%edi),%ecx
+ bswap %ebx
+ movl 32(%edi),%esi
+ bswap %ecx
+ movl %ebx,56(%esp)
+ bswap %esi
+ movl %ecx,60(%esp)
+ movl %esi,64(%esp)
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ bswap %ebx
+ movl 44(%edi),%esi
+ bswap %ecx
+ movl %ebx,68(%esp)
+ bswap %esi
+ movl %ecx,72(%esp)
+ movl %esi,76(%esp)
+ movl 48(%edi),%ebx
+ movl 52(%edi),%ecx
+ bswap %ebx
+ movl 56(%edi),%esi
+ bswap %ecx
+ movl %ebx,80(%esp)
+ bswap %esi
+ movl %ecx,84(%esp)
+ movl %esi,88(%esp)
+ movl 60(%edi),%ebx
+ addl $64,%edi
+ bswap %ebx
+ movl %edi,100(%esp)
+ movl %ebx,92(%esp)
+ movl %edx,%ecx
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl 32(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1116352408(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl 36(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1899447441(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl 40(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3049323471(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl 44(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3921009573(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl 48(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 961987163(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl 52(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1508970993(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl 56(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2453635748(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl 60(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2870763221(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl 64(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3624381080(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl 68(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 310598401(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl 72(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 607225278(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl 76(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1426881987(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl 80(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1925078388(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl 84(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2162078206(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl %edx,%ecx
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl 88(%esp),%ebx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2614888103(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl %edx,%esi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl 92(%esp),%ebx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3248222580(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3835390401(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 4022224774(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 264347078(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 604807628(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 770255983(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1249150122(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1555081692(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1996064986(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2554220882(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2821834349(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2952996808(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3210313671(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3336571891(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3584528711(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,88(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 113926993(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,92(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 338241895(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 666307205(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 773529912(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1294757372(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1396182291(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1695183700(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1986661051(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2177026350(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2456956037(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2730485921(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2820302411(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3259730800(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3345764771(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3516065817(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3600352804(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,88(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 4094571909(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,92(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 275423344(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 36(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 88(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 32(%esp),%ebx
+ shrl $10,%edi
+ addl 68(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,32(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 430227734(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 40(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 92(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 36(%esp),%ebx
+ shrl $10,%edi
+ addl 72(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,36(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 506948616(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 44(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 32(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 40(%esp),%ebx
+ shrl $10,%edi
+ addl 76(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,40(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 659060556(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 36(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 44(%esp),%ebx
+ shrl $10,%edi
+ addl 80(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,44(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 883997877(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 52(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 40(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 48(%esp),%ebx
+ shrl $10,%edi
+ addl 84(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,48(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 958139571(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 44(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 52(%esp),%ebx
+ shrl $10,%edi
+ addl 88(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,52(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1322822218(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 60(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 48(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 56(%esp),%ebx
+ shrl $10,%edi
+ addl 92(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,56(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1537002063(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 64(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 52(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 60(%esp),%ebx
+ shrl $10,%edi
+ addl 32(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,60(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 1747873779(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 68(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 56(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 64(%esp),%ebx
+ shrl $10,%edi
+ addl 36(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 20(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 24(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,64(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,16(%esp)
+ xorl %ecx,%edx
+ addl 28(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 4(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 1955562222(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 72(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 12(%esp),%edx
+ addl %ecx,%ebp
+ movl 60(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 68(%esp),%ebx
+ shrl $10,%edi
+ addl 40(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 16(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 20(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,68(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,12(%esp)
+ xorl %esi,%edx
+ addl 24(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl (%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,28(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2024104815(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 76(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 8(%esp),%edx
+ addl %esi,%eax
+ movl 64(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 72(%esp),%ebx
+ shrl $10,%edi
+ addl 44(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 12(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 16(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,72(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,8(%esp)
+ xorl %ecx,%edx
+ addl 20(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 28(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,24(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2227730452(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 80(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 4(%esp),%edx
+ addl %ecx,%ebp
+ movl 68(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 76(%esp),%ebx
+ shrl $10,%edi
+ addl 48(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 8(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 12(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,76(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,4(%esp)
+ xorl %esi,%edx
+ addl 16(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 24(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,20(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2361852424(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 84(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl (%esp),%edx
+ addl %esi,%eax
+ movl 72(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 80(%esp),%ebx
+ shrl $10,%edi
+ addl 52(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 4(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 8(%esp),%edi
+ xorl %ecx,%edx
+ movl %ebx,80(%esp)
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,(%esp)
+ xorl %ecx,%edx
+ addl 12(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 20(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,16(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 2428436474(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 88(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 28(%esp),%edx
+ addl %ecx,%ebp
+ movl 76(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 84(%esp),%ebx
+ shrl $10,%edi
+ addl 56(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl (%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 4(%esp),%edi
+ xorl %esi,%edx
+ movl %ebx,84(%esp)
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,28(%esp)
+ xorl %esi,%edx
+ addl 8(%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 16(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,12(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 2756734187(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ movl 92(%esp),%ecx
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 24(%esp),%edx
+ addl %esi,%eax
+ movl 80(%esp),%esi
+ movl %ecx,%ebx
+ rorl $11,%ecx
+ movl %esi,%edi
+ rorl $2,%esi
+ xorl %ebx,%ecx
+ shrl $3,%ebx
+ rorl $7,%ecx
+ xorl %edi,%esi
+ xorl %ecx,%ebx
+ rorl $17,%esi
+ addl 88(%esp),%ebx
+ shrl $10,%edi
+ addl 60(%esp),%ebx
+ movl %edx,%ecx
+ xorl %esi,%edi
+ movl 28(%esp),%esi
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl (%esp),%edi
+ xorl %ecx,%edx
+ xorl %edi,%esi
+ rorl $5,%edx
+ andl %ecx,%esi
+ movl %ecx,24(%esp)
+ xorl %ecx,%edx
+ addl 4(%esp),%ebx
+ xorl %esi,%edi
+ rorl $6,%edx
+ movl %eax,%ecx
+ addl %edi,%ebx
+ rorl $9,%ecx
+ movl %eax,%esi
+ movl 12(%esp),%edi
+ xorl %eax,%ecx
+ movl %eax,8(%esp)
+ xorl %edi,%eax
+ rorl $11,%ecx
+ andl %eax,%ebp
+ leal 3204031479(%ebx,%edx,1),%edx
+ xorl %esi,%ecx
+ xorl %edi,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ecx
+ addl %edx,%ebp
+ addl 20(%esp),%edx
+ addl %ecx,%ebp
+ movl 84(%esp),%ecx
+ movl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ rorl $2,%ecx
+ xorl %ebx,%esi
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %edi,%ecx
+ xorl %esi,%ebx
+ rorl $17,%ecx
+ addl 92(%esp),%ebx
+ shrl $10,%edi
+ addl 64(%esp),%ebx
+ movl %edx,%esi
+ xorl %ecx,%edi
+ movl 24(%esp),%ecx
+ rorl $14,%edx
+ addl %edi,%ebx
+ movl 28(%esp),%edi
+ xorl %esi,%edx
+ xorl %edi,%ecx
+ rorl $5,%edx
+ andl %esi,%ecx
+ movl %esi,20(%esp)
+ xorl %esi,%edx
+ addl (%esp),%ebx
+ xorl %ecx,%edi
+ rorl $6,%edx
+ movl %ebp,%esi
+ addl %edi,%ebx
+ rorl $9,%esi
+ movl %ebp,%ecx
+ movl 8(%esp),%edi
+ xorl %ebp,%esi
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ rorl $11,%esi
+ andl %ebp,%eax
+ leal 3329325298(%ebx,%edx,1),%edx
+ xorl %ecx,%esi
+ xorl %edi,%eax
+ rorl $2,%esi
+ addl %edx,%eax
+ addl 16(%esp),%edx
+ addl %esi,%eax
+ movl 96(%esp),%esi
+ xorl %edi,%ebp
+ movl 12(%esp),%ecx
+ addl (%esi),%eax
+ addl 4(%esi),%ebp
+ addl 8(%esi),%edi
+ addl 12(%esi),%ecx
+ movl %eax,(%esi)
+ movl %ebp,4(%esi)
+ movl %edi,8(%esi)
+ movl %ecx,12(%esi)
+ movl %ebp,4(%esp)
+ xorl %edi,%ebp
+ movl %edi,8(%esp)
+ movl %ecx,12(%esp)
+ movl 20(%esp),%edi
+ movl 24(%esp),%ebx
+ movl 28(%esp),%ecx
+ addl 16(%esi),%edx
+ addl 20(%esi),%edi
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %edi,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ movl %edi,20(%esp)
+ movl 100(%esp),%edi
+ movl %ebx,24(%esp)
+ movl %ecx,28(%esp)
+ cmpl 104(%esp),%edi
+ jb L009grand_loop
+ movl 108(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.section __IMPORT,__pointers,non_lazy_symbol_pointers
+L__gnutls_x86_cpuid_s$non_lazy_ptr:
+.indirect_symbol __gnutls_x86_cpuid_s
+.long 0
+.comm __gnutls_x86_cpuid_s,16,2
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/sha512-ssse3-x86.s b/lib/accelerated/x86/macosx/sha512-ssse3-x86.s
new file mode 100644
index 0000000000..cb097f16c8
--- /dev/null
+++ b/lib/accelerated/x86/macosx/sha512-ssse3-x86.s
@@ -0,0 +1,604 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.file "sha512-586.s"
+.text
+.globl _sha512_block_data_order
+.align 4
+_sha512_block_data_order:
+L_sha512_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call L000pic_point
+L000pic_point:
+ popl %ebp
+ leal L001K512-L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $7,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+.align 4,0x90
+L002loop_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ movl 28(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ movl 44(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ movl 60(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 64(%edi),%eax
+ movl 68(%edi),%ebx
+ movl 72(%edi),%ecx
+ movl 76(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 80(%edi),%eax
+ movl 84(%edi),%ebx
+ movl 88(%edi),%ecx
+ movl 92(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 96(%edi),%eax
+ movl 100(%edi),%ebx
+ movl 104(%edi),%ecx
+ movl 108(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 112(%edi),%eax
+ movl 116(%edi),%ebx
+ movl 120(%edi),%ecx
+ movl 124(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ addl $128,%edi
+ subl $72,%esp
+ movl %edi,204(%esp)
+ leal 8(%esp),%edi
+ movl $16,%ecx
+.long 2784229001
+.align 4,0x90
+L00300_15_x86:
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $148,%dl
+ jne L00300_15_x86
+.align 4,0x90
+L00416_79_x86:
+ movl 312(%esp),%ecx
+ movl 316(%esp),%edx
+ movl %ecx,%esi
+ shrl $1,%ecx
+ movl %edx,%edi
+ shrl $1,%edx
+ movl %ecx,%eax
+ shll $24,%esi
+ movl %edx,%ebx
+ shll $24,%edi
+ xorl %esi,%ebx
+ shrl $6,%ecx
+ xorl %edi,%eax
+ shrl $6,%edx
+ xorl %ecx,%eax
+ shll $7,%esi
+ xorl %edx,%ebx
+ shll $1,%edi
+ xorl %esi,%ebx
+ shrl $1,%ecx
+ xorl %edi,%eax
+ shrl $1,%edx
+ xorl %ecx,%eax
+ shll $6,%edi
+ xorl %edx,%ebx
+ xorl %edi,%eax
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl 208(%esp),%ecx
+ movl 212(%esp),%edx
+ movl %ecx,%esi
+ shrl $6,%ecx
+ movl %edx,%edi
+ shrl $6,%edx
+ movl %ecx,%eax
+ shll $3,%esi
+ movl %edx,%ebx
+ shll $3,%edi
+ xorl %esi,%eax
+ shrl $13,%ecx
+ xorl %edi,%ebx
+ shrl $13,%edx
+ xorl %ecx,%eax
+ shll $10,%esi
+ xorl %edx,%ebx
+ shll $10,%edi
+ xorl %esi,%ebx
+ shrl $10,%ecx
+ xorl %edi,%eax
+ shrl $10,%edx
+ xorl %ecx,%ebx
+ shll $13,%edi
+ xorl %edx,%eax
+ xorl %edi,%eax
+ movl 320(%esp),%ecx
+ movl 324(%esp),%edx
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ movl 248(%esp),%esi
+ movl 252(%esp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,192(%esp)
+ movl %ebx,196(%esp)
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $23,%dl
+ jne L00416_79_x86
+ movl 840(%esp),%esi
+ movl 844(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ addl 8(%esp),%eax
+ adcl 12(%esp),%ebx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ addl 16(%esp),%ecx
+ adcl 20(%esp),%edx
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ addl 24(%esp),%eax
+ adcl 28(%esp),%ebx
+ movl %eax,16(%esi)
+ movl %ebx,20(%esi)
+ addl 32(%esp),%ecx
+ adcl 36(%esp),%edx
+ movl %ecx,24(%esi)
+ movl %edx,28(%esi)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ addl 40(%esp),%eax
+ adcl 44(%esp),%ebx
+ movl %eax,32(%esi)
+ movl %ebx,36(%esi)
+ addl 48(%esp),%ecx
+ adcl 52(%esp),%edx
+ movl %ecx,40(%esi)
+ movl %edx,44(%esi)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ addl 56(%esp),%eax
+ adcl 60(%esp),%ebx
+ movl %eax,48(%esi)
+ movl %ebx,52(%esi)
+ addl 64(%esp),%ecx
+ adcl 68(%esp),%edx
+ movl %ecx,56(%esi)
+ movl %edx,60(%esi)
+ addl $840,%esp
+ subl $640,%ebp
+ cmpl 8(%esp),%edi
+ jb L002loop_x86
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L001K512:
+.long 3609767458,1116352408
+.long 602891725,1899447441
+.long 3964484399,3049323471
+.long 2173295548,3921009573
+.long 4081628472,961987163
+.long 3053834265,1508970993
+.long 2937671579,2453635748
+.long 3664609560,2870763221
+.long 2734883394,3624381080
+.long 1164996542,310598401
+.long 1323610764,607225278
+.long 3590304994,1426881987
+.long 4068182383,1925078388
+.long 991336113,2162078206
+.long 633803317,2614888103
+.long 3479774868,3248222580
+.long 2666613458,3835390401
+.long 944711139,4022224774
+.long 2341262773,264347078
+.long 2007800933,604807628
+.long 1495990901,770255983
+.long 1856431235,1249150122
+.long 3175218132,1555081692
+.long 2198950837,1996064986
+.long 3999719339,2554220882
+.long 766784016,2821834349
+.long 2566594879,2952996808
+.long 3203337956,3210313671
+.long 1034457026,3336571891
+.long 2466948901,3584528711
+.long 3758326383,113926993
+.long 168717936,338241895
+.long 1188179964,666307205
+.long 1546045734,773529912
+.long 1522805485,1294757372
+.long 2643833823,1396182291
+.long 2343527390,1695183700
+.long 1014477480,1986661051
+.long 1206759142,2177026350
+.long 344077627,2456956037
+.long 1290863460,2730485921
+.long 3158454273,2820302411
+.long 3505952657,3259730800
+.long 106217008,3345764771
+.long 3606008344,3516065817
+.long 1432725776,3600352804
+.long 1467031594,4094571909
+.long 851169720,275423344
+.long 3100823752,430227734
+.long 1363258195,506948616
+.long 3750685593,659060556
+.long 3785050280,883997877
+.long 3318307427,958139571
+.long 3812723403,1322822218
+.long 2003034995,1537002063
+.long 3602036899,1747873779
+.long 1575990012,1955562222
+.long 1125592928,2024104815
+.long 2716904306,2227730452
+.long 442776044,2361852424
+.long 593698344,2428436474
+.long 3733110249,2756734187
+.long 2999351573,3204031479
+.long 3815920427,3329325298
+.long 3928383900,3391569614
+.long 566280711,3515267271
+.long 3454069534,3940187606
+.long 4000239992,4118630271
+.long 1914138554,116418474
+.long 2731055270,174292421
+.long 3203993006,289380356
+.long 320620315,460393269
+.long 587496836,685471733
+.long 1086792851,852142971
+.long 365543100,1017036298
+.long 2618297676,1126000580
+.long 3409855158,1288033470
+.long 4234509866,1501505948
+.long 987167468,1607167915
+.long 1246189591,1816402316
+.long 67438087,66051
+.long 202182159,134810123
+.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/macosx/sha512-ssse3-x86_64.s b/lib/accelerated/x86/macosx/sha512-ssse3-x86_64.s
new file mode 100644
index 0000000000..a845708030
--- /dev/null
+++ b/lib/accelerated/x86/macosx/sha512-ssse3-x86_64.s
@@ -0,0 +1,2881 @@
+# Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain copyright notices,
+# this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials
+# provided with the distribution.
+#
+# * Neither the name of the Andy Polyakov nor the names of its
+# copyright holder and contributors may be used to endorse or
+# promote products derived from this software without specific
+# prior written permission.
+#
+# ALTERNATIVELY, provided that this notice is retained in full, this
+# product may be distributed under the terms of the GNU General Public
+# License (GPL), in which case the provisions of the GPL apply INSTEAD OF
+# those given above.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# *** This file is auto-generated ***
+#
+.text
+
+
+.globl _sha256_block_data_order
+
+.p2align 4
+_sha256_block_data_order:
+ leaq __gnutls_x86_cpuid_s(%rip),%r11
+ movl 0(%r11),%r9d
+ movl 4(%r11),%r10d
+ movl 8(%r11),%r11d
+ testl $512,%r10d
+ jnz L$ssse3_shortcut
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp,%r11
+ shlq $4,%rdx
+ subq $64+32,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+L$prologue:
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+ jmp L$loop
+
+.p2align 4
+L$loop:
+ movl %ebx,%edi
+ leaq K256(%rip),%rbp
+ xorl %ecx,%edi
+ movl 0(%rsi),%r12d
+ movl %r8d,%r13d
+ movl %eax,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,0(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r11d
+ movl 4(%rsi),%r12d
+ movl %edx,%r13d
+ movl %r11d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,4(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r10d
+ movl 8(%rsi),%r12d
+ movl %ecx,%r13d
+ movl %r10d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,8(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r9d
+ movl 12(%rsi),%r12d
+ movl %ebx,%r13d
+ movl %r9d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,12(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%r8d
+ movl 16(%rsi),%r12d
+ movl %eax,%r13d
+ movl %r8d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,16(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%edx
+ movl 20(%rsi),%r12d
+ movl %r11d,%r13d
+ movl %edx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,20(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ecx
+ movl 24(%rsi),%r12d
+ movl %r10d,%r13d
+ movl %ecx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,24(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ebx
+ movl 28(%rsi),%r12d
+ movl %r9d,%r13d
+ movl %ebx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,28(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%eax
+ movl 32(%rsi),%r12d
+ movl %r8d,%r13d
+ movl %eax,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,32(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r11d
+ movl 36(%rsi),%r12d
+ movl %edx,%r13d
+ movl %r11d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,36(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r10d
+ movl 40(%rsi),%r12d
+ movl %ecx,%r13d
+ movl %r10d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,40(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%r9d
+ movl 44(%rsi),%r12d
+ movl %ebx,%r13d
+ movl %r9d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,44(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ addl %r14d,%r8d
+ movl 48(%rsi),%r12d
+ movl %eax,%r13d
+ movl %r8d,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,48(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%edx
+ movl 52(%rsi),%r12d
+ movl %r11d,%r13d
+ movl %edx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,52(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ecx
+ movl 56(%rsi),%r12d
+ movl %r10d,%r13d
+ movl %ecx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,56(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ addl %r14d,%ebx
+ movl 60(%rsi),%r12d
+ movl %r9d,%r13d
+ movl %ebx,%r14d
+ bswapl %r12d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,60(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ jmp L$rounds_16_xx
+.p2align 4
+L$rounds_16_xx:
+ movl 4(%rsp),%r13d
+ movl 56(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%eax
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 36(%rsp),%r12d
+
+ addl 0(%rsp),%r12d
+ movl %r8d,%r13d
+ addl %r15d,%r12d
+ movl %eax,%r14d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,0(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ movl 8(%rsp),%r13d
+ movl 60(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r11d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 40(%rsp),%r12d
+
+ addl 4(%rsp),%r12d
+ movl %edx,%r13d
+ addl %edi,%r12d
+ movl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,4(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ movl 12(%rsp),%r13d
+ movl 0(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r10d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 44(%rsp),%r12d
+
+ addl 8(%rsp),%r12d
+ movl %ecx,%r13d
+ addl %r15d,%r12d
+ movl %r10d,%r14d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,8(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ movl 16(%rsp),%r13d
+ movl 4(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r9d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 48(%rsp),%r12d
+
+ addl 12(%rsp),%r12d
+ movl %ebx,%r13d
+ addl %edi,%r12d
+ movl %r9d,%r14d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,12(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ movl 20(%rsp),%r13d
+ movl 8(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r8d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 52(%rsp),%r12d
+
+ addl 16(%rsp),%r12d
+ movl %eax,%r13d
+ addl %r15d,%r12d
+ movl %r8d,%r14d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,16(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ movl 24(%rsp),%r13d
+ movl 12(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%edx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 56(%rsp),%r12d
+
+ addl 20(%rsp),%r12d
+ movl %r11d,%r13d
+ addl %edi,%r12d
+ movl %edx,%r14d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,20(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ movl 28(%rsp),%r13d
+ movl 16(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ecx
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 60(%rsp),%r12d
+
+ addl 24(%rsp),%r12d
+ movl %r10d,%r13d
+ addl %r15d,%r12d
+ movl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,24(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ movl 32(%rsp),%r13d
+ movl 20(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ebx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 0(%rsp),%r12d
+
+ addl 28(%rsp),%r12d
+ movl %r9d,%r13d
+ addl %edi,%r12d
+ movl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,28(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ movl 36(%rsp),%r13d
+ movl 24(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%eax
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 4(%rsp),%r12d
+
+ addl 32(%rsp),%r12d
+ movl %r8d,%r13d
+ addl %r15d,%r12d
+ movl %eax,%r14d
+ rorl $14,%r13d
+ movl %r9d,%r15d
+
+ xorl %r8d,%r13d
+ rorl $9,%r14d
+ xorl %r10d,%r15d
+
+ movl %r12d,32(%rsp)
+ xorl %eax,%r14d
+ andl %r8d,%r15d
+
+ rorl $5,%r13d
+ addl %r11d,%r12d
+ xorl %r10d,%r15d
+
+ rorl $11,%r14d
+ xorl %r8d,%r13d
+ addl %r15d,%r12d
+
+ movl %eax,%r15d
+ addl (%rbp),%r12d
+ xorl %eax,%r14d
+
+ xorl %ebx,%r15d
+ rorl $6,%r13d
+ movl %ebx,%r11d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r11d
+ addl %r12d,%edx
+ addl %r12d,%r11d
+
+ leaq 4(%rbp),%rbp
+ movl 40(%rsp),%r13d
+ movl 28(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r11d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 8(%rsp),%r12d
+
+ addl 36(%rsp),%r12d
+ movl %edx,%r13d
+ addl %edi,%r12d
+ movl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r8d,%edi
+
+ xorl %edx,%r13d
+ rorl $9,%r14d
+ xorl %r9d,%edi
+
+ movl %r12d,36(%rsp)
+ xorl %r11d,%r14d
+ andl %edx,%edi
+
+ rorl $5,%r13d
+ addl %r10d,%r12d
+ xorl %r9d,%edi
+
+ rorl $11,%r14d
+ xorl %edx,%r13d
+ addl %edi,%r12d
+
+ movl %r11d,%edi
+ addl (%rbp),%r12d
+ xorl %r11d,%r14d
+
+ xorl %eax,%edi
+ rorl $6,%r13d
+ movl %eax,%r10d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r10d
+ addl %r12d,%ecx
+ addl %r12d,%r10d
+
+ leaq 4(%rbp),%rbp
+ movl 44(%rsp),%r13d
+ movl 32(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r10d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 12(%rsp),%r12d
+
+ addl 40(%rsp),%r12d
+ movl %ecx,%r13d
+ addl %r15d,%r12d
+ movl %r10d,%r14d
+ rorl $14,%r13d
+ movl %edx,%r15d
+
+ xorl %ecx,%r13d
+ rorl $9,%r14d
+ xorl %r8d,%r15d
+
+ movl %r12d,40(%rsp)
+ xorl %r10d,%r14d
+ andl %ecx,%r15d
+
+ rorl $5,%r13d
+ addl %r9d,%r12d
+ xorl %r8d,%r15d
+
+ rorl $11,%r14d
+ xorl %ecx,%r13d
+ addl %r15d,%r12d
+
+ movl %r10d,%r15d
+ addl (%rbp),%r12d
+ xorl %r10d,%r14d
+
+ xorl %r11d,%r15d
+ rorl $6,%r13d
+ movl %r11d,%r9d
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%r9d
+ addl %r12d,%ebx
+ addl %r12d,%r9d
+
+ leaq 4(%rbp),%rbp
+ movl 48(%rsp),%r13d
+ movl 36(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r9d
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 16(%rsp),%r12d
+
+ addl 44(%rsp),%r12d
+ movl %ebx,%r13d
+ addl %edi,%r12d
+ movl %r9d,%r14d
+ rorl $14,%r13d
+ movl %ecx,%edi
+
+ xorl %ebx,%r13d
+ rorl $9,%r14d
+ xorl %edx,%edi
+
+ movl %r12d,44(%rsp)
+ xorl %r9d,%r14d
+ andl %ebx,%edi
+
+ rorl $5,%r13d
+ addl %r8d,%r12d
+ xorl %edx,%edi
+
+ rorl $11,%r14d
+ xorl %ebx,%r13d
+ addl %edi,%r12d
+
+ movl %r9d,%edi
+ addl (%rbp),%r12d
+ xorl %r9d,%r14d
+
+ xorl %r10d,%edi
+ rorl $6,%r13d
+ movl %r10d,%r8d
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%r8d
+ addl %r12d,%eax
+ addl %r12d,%r8d
+
+ leaq 20(%rbp),%rbp
+ movl 52(%rsp),%r13d
+ movl 40(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%r8d
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 20(%rsp),%r12d
+
+ addl 48(%rsp),%r12d
+ movl %eax,%r13d
+ addl %r15d,%r12d
+ movl %r8d,%r14d
+ rorl $14,%r13d
+ movl %ebx,%r15d
+
+ xorl %eax,%r13d
+ rorl $9,%r14d
+ xorl %ecx,%r15d
+
+ movl %r12d,48(%rsp)
+ xorl %r8d,%r14d
+ andl %eax,%r15d
+
+ rorl $5,%r13d
+ addl %edx,%r12d
+ xorl %ecx,%r15d
+
+ rorl $11,%r14d
+ xorl %eax,%r13d
+ addl %r15d,%r12d
+
+ movl %r8d,%r15d
+ addl (%rbp),%r12d
+ xorl %r8d,%r14d
+
+ xorl %r9d,%r15d
+ rorl $6,%r13d
+ movl %r9d,%edx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%edx
+ addl %r12d,%r11d
+ addl %r12d,%edx
+
+ leaq 4(%rbp),%rbp
+ movl 56(%rsp),%r13d
+ movl 44(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%edx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 24(%rsp),%r12d
+
+ addl 52(%rsp),%r12d
+ movl %r11d,%r13d
+ addl %edi,%r12d
+ movl %edx,%r14d
+ rorl $14,%r13d
+ movl %eax,%edi
+
+ xorl %r11d,%r13d
+ rorl $9,%r14d
+ xorl %ebx,%edi
+
+ movl %r12d,52(%rsp)
+ xorl %edx,%r14d
+ andl %r11d,%edi
+
+ rorl $5,%r13d
+ addl %ecx,%r12d
+ xorl %ebx,%edi
+
+ rorl $11,%r14d
+ xorl %r11d,%r13d
+ addl %edi,%r12d
+
+ movl %edx,%edi
+ addl (%rbp),%r12d
+ xorl %edx,%r14d
+
+ xorl %r8d,%edi
+ rorl $6,%r13d
+ movl %r8d,%ecx
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%ecx
+ addl %r12d,%r10d
+ addl %r12d,%ecx
+
+ leaq 4(%rbp),%rbp
+ movl 60(%rsp),%r13d
+ movl 48(%rsp),%r15d
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ecx
+ movl %r15d,%r14d
+ rorl $2,%r15d
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%r15d
+ shrl $10,%r14d
+
+ rorl $17,%r15d
+ xorl %r13d,%r12d
+ xorl %r14d,%r15d
+ addl 28(%rsp),%r12d
+
+ addl 56(%rsp),%r12d
+ movl %r10d,%r13d
+ addl %r15d,%r12d
+ movl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r11d,%r15d
+
+ xorl %r10d,%r13d
+ rorl $9,%r14d
+ xorl %eax,%r15d
+
+ movl %r12d,56(%rsp)
+ xorl %ecx,%r14d
+ andl %r10d,%r15d
+
+ rorl $5,%r13d
+ addl %ebx,%r12d
+ xorl %eax,%r15d
+
+ rorl $11,%r14d
+ xorl %r10d,%r13d
+ addl %r15d,%r12d
+
+ movl %ecx,%r15d
+ addl (%rbp),%r12d
+ xorl %ecx,%r14d
+
+ xorl %edx,%r15d
+ rorl $6,%r13d
+ movl %edx,%ebx
+
+ andl %r15d,%edi
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %edi,%ebx
+ addl %r12d,%r9d
+ addl %r12d,%ebx
+
+ leaq 4(%rbp),%rbp
+ movl 0(%rsp),%r13d
+ movl 52(%rsp),%edi
+
+ movl %r13d,%r12d
+ rorl $11,%r13d
+ addl %r14d,%ebx
+ movl %edi,%r14d
+ rorl $2,%edi
+
+ xorl %r12d,%r13d
+ shrl $3,%r12d
+ rorl $7,%r13d
+ xorl %r14d,%edi
+ shrl $10,%r14d
+
+ rorl $17,%edi
+ xorl %r13d,%r12d
+ xorl %r14d,%edi
+ addl 32(%rsp),%r12d
+
+ addl 60(%rsp),%r12d
+ movl %r9d,%r13d
+ addl %edi,%r12d
+ movl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r10d,%edi
+
+ xorl %r9d,%r13d
+ rorl $9,%r14d
+ xorl %r11d,%edi
+
+ movl %r12d,60(%rsp)
+ xorl %ebx,%r14d
+ andl %r9d,%edi
+
+ rorl $5,%r13d
+ addl %eax,%r12d
+ xorl %r11d,%edi
+
+ rorl $11,%r14d
+ xorl %r9d,%r13d
+ addl %edi,%r12d
+
+ movl %ebx,%edi
+ addl (%rbp),%r12d
+ xorl %ebx,%r14d
+
+ xorl %ecx,%edi
+ rorl $6,%r13d
+ movl %ecx,%eax
+
+ andl %edi,%r15d
+ rorl $2,%r14d
+ addl %r13d,%r12d
+
+ xorl %r15d,%eax
+ addl %r12d,%r8d
+ addl %r12d,%eax
+
+ leaq 20(%rbp),%rbp
+ cmpb $0,3(%rbp)
+ jnz L$rounds_16_xx
+
+ movq 64+0(%rsp),%rdi
+ addl %r14d,%eax
+ leaq 64(%rsi),%rsi
+
+ addl 0(%rdi),%eax
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb L$loop
+
+ movq 64+24(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+L$epilogue:
+ .byte 0xf3,0xc3
+
+.p2align 6
+
+K256:
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
+
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
+.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
+.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+
+.p2align 6
+sha256_block_data_order_ssse3:
+L$ssse3_shortcut:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp,%r11
+ shlq $4,%rdx
+ subq $96,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+L$prologue_ssse3:
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+
+
+ jmp L$loop_ssse3
+.p2align 4
+L$loop_ssse3:
+ movdqa K256+512(%rip),%xmm7
+ movdqu 0(%rsi),%xmm0
+ movdqu 16(%rsi),%xmm1
+ movdqu 32(%rsi),%xmm2
+ movdqu 48(%rsi),%xmm3
+.byte 102,15,56,0,199
+ leaq K256(%rip),%rbp
+.byte 102,15,56,0,207
+ movdqa 0(%rbp),%xmm4
+.byte 102,15,56,0,215
+ movdqa 32(%rbp),%xmm5
+ paddd %xmm0,%xmm4
+ movdqa 64(%rbp),%xmm6
+.byte 102,15,56,0,223
+ movdqa 96(%rbp),%xmm7
+ paddd %xmm1,%xmm5
+ paddd %xmm2,%xmm6
+ paddd %xmm3,%xmm7
+ movdqa %xmm4,0(%rsp)
+ movl %eax,%r14d
+ movdqa %xmm5,16(%rsp)
+ movl %ebx,%edi
+ movdqa %xmm6,32(%rsp)
+ xorl %ecx,%edi
+ movdqa %xmm7,48(%rsp)
+ movl %r8d,%r13d
+ jmp L$ssse3_00_47
+
+.p2align 4
+L$ssse3_00_47:
+ subq $-32*4,%rbp
+ rorl $14,%r13d
+ movdqa %xmm1,%xmm4
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ movdqa %xmm3,%xmm7
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+.byte 102,15,58,15,224,4
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+.byte 102,15,58,15,250,4
+ addl 0(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ paddd %xmm7,%xmm0
+ rorl $2,%r14d
+ addl %r11d,%edx
+ psrld $7,%xmm6
+ addl %edi,%r11d
+ movl %edx,%r13d
+ pshufd $250,%xmm3,%xmm7
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %r11d,%r14d
+ pxor %xmm5,%xmm4
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ pslld $11,%xmm5
+ addl 4(%rsp),%r10d
+ movl %r11d,%edi
+ pxor %xmm6,%xmm4
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ paddd %xmm4,%xmm0
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ psrlq $17,%xmm6
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %ecx,%r13d
+ addl 8(%rsp),%r9d
+ movl %r10d,%r15d
+ psrldq $8,%xmm7
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ paddd %xmm7,%xmm0
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ pshufd $80,%xmm0,%xmm7
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ movdqa %xmm7,%xmm6
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ psrld $10,%xmm7
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ psrlq $2,%xmm6
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 12(%rsp),%r8d
+ pxor %xmm6,%xmm7
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ movdqa 0(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ paddd %xmm7,%xmm0
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ paddd %xmm0,%xmm6
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ movdqa %xmm6,0(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm2,%xmm4
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ movdqa %xmm0,%xmm7
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+.byte 102,15,58,15,225,4
+ andl %eax,%r12d
+ xorl %eax,%r13d
+.byte 102,15,58,15,251,4
+ addl 16(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ paddd %xmm7,%xmm1
+ rorl $2,%r14d
+ addl %edx,%r11d
+ psrld $7,%xmm6
+ addl %edi,%edx
+ movl %r11d,%r13d
+ pshufd $250,%xmm0,%xmm7
+ addl %edx,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%edx
+ movl %eax,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %edx,%r14d
+ pxor %xmm5,%xmm4
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ pslld $11,%xmm5
+ addl 20(%rsp),%ecx
+ movl %edx,%edi
+ pxor %xmm6,%xmm4
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ paddd %xmm4,%xmm1
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ psrlq $17,%xmm6
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %r10d,%r13d
+ addl 24(%rsp),%ebx
+ movl %ecx,%r15d
+ psrldq $8,%xmm7
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ paddd %xmm7,%xmm1
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ pshufd $80,%xmm1,%xmm7
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ movdqa %xmm7,%xmm6
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ psrld $10,%xmm7
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ psrlq $2,%xmm6
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 28(%rsp),%eax
+ pxor %xmm6,%xmm7
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ movdqa 32(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ paddd %xmm7,%xmm1
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ paddd %xmm1,%xmm6
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movdqa %xmm6,16(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm3,%xmm4
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ movdqa %xmm1,%xmm7
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+.byte 102,15,58,15,226,4
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+.byte 102,15,58,15,248,4
+ addl 32(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ paddd %xmm7,%xmm2
+ rorl $2,%r14d
+ addl %r11d,%edx
+ psrld $7,%xmm6
+ addl %edi,%r11d
+ movl %edx,%r13d
+ pshufd $250,%xmm1,%xmm7
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %r11d,%r14d
+ pxor %xmm5,%xmm4
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ pslld $11,%xmm5
+ addl 36(%rsp),%r10d
+ movl %r11d,%edi
+ pxor %xmm6,%xmm4
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ paddd %xmm4,%xmm2
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ psrlq $17,%xmm6
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %ecx,%r13d
+ addl 40(%rsp),%r9d
+ movl %r10d,%r15d
+ psrldq $8,%xmm7
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ paddd %xmm7,%xmm2
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ pshufd $80,%xmm2,%xmm7
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ movdqa %xmm7,%xmm6
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ psrld $10,%xmm7
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ psrlq $2,%xmm6
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 44(%rsp),%r8d
+ pxor %xmm6,%xmm7
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ movdqa 64(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ paddd %xmm7,%xmm2
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ paddd %xmm2,%xmm6
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ movdqa %xmm6,32(%rsp)
+ rorl $14,%r13d
+ movdqa %xmm0,%xmm4
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ movdqa %xmm2,%xmm7
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+.byte 102,15,58,15,227,4
+ andl %eax,%r12d
+ xorl %eax,%r13d
+.byte 102,15,58,15,249,4
+ addl 48(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm4,%xmm5
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ movdqa %xmm4,%xmm6
+ rorl $6,%r13d
+ andl %r15d,%edi
+ psrld $3,%xmm4
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ paddd %xmm7,%xmm3
+ rorl $2,%r14d
+ addl %edx,%r11d
+ psrld $7,%xmm6
+ addl %edi,%edx
+ movl %r11d,%r13d
+ pshufd $250,%xmm2,%xmm7
+ addl %edx,%r14d
+ rorl $14,%r13d
+ pslld $14,%xmm5
+ movl %r14d,%edx
+ movl %eax,%r12d
+ pxor %xmm6,%xmm4
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ psrld $11,%xmm6
+ xorl %edx,%r14d
+ pxor %xmm5,%xmm4
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ pslld $11,%xmm5
+ addl 52(%rsp),%ecx
+ movl %edx,%edi
+ pxor %xmm6,%xmm4
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ movdqa %xmm7,%xmm6
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ pxor %xmm5,%xmm4
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ psrld $10,%xmm7
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ paddd %xmm4,%xmm3
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ psrlq $17,%xmm6
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ pxor %xmm6,%xmm7
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ psrlq $2,%xmm6
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ pxor %xmm6,%xmm7
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ pshufd $128,%xmm7,%xmm7
+ xorl %r10d,%r13d
+ addl 56(%rsp),%ebx
+ movl %ecx,%r15d
+ psrldq $8,%xmm7
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ paddd %xmm7,%xmm3
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ pshufd $80,%xmm3,%xmm7
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ movdqa %xmm7,%xmm6
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ psrld $10,%xmm7
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ psrlq $17,%xmm6
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ pxor %xmm6,%xmm7
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ psrlq $2,%xmm6
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 60(%rsp),%eax
+ pxor %xmm6,%xmm7
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ pshufd $8,%xmm7,%xmm7
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ movdqa 96(%rbp),%xmm6
+ rorl $6,%r13d
+ andl %edi,%r15d
+ pslldq $8,%xmm7
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ paddd %xmm7,%xmm3
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ paddd %xmm3,%xmm6
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movdqa %xmm6,48(%rsp)
+ cmpb $0,131(%rbp)
+ jne L$ssse3_00_47
+ rorl $14,%r13d
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+ addl 0(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ rorl $2,%r14d
+ addl %r11d,%edx
+ addl %edi,%r11d
+ movl %edx,%r13d
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ xorl %r11d,%r14d
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ addl 4(%rsp),%r10d
+ movl %r11d,%edi
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ xorl %ecx,%r13d
+ addl 8(%rsp),%r9d
+ movl %r10d,%r15d
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 12(%rsp),%r8d
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+ andl %eax,%r12d
+ xorl %eax,%r13d
+ addl 16(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ rorl $2,%r14d
+ addl %edx,%r11d
+ addl %edi,%edx
+ movl %r11d,%r13d
+ addl %edx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%edx
+ movl %eax,%r12d
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ xorl %edx,%r14d
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ addl 20(%rsp),%ecx
+ movl %edx,%edi
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ xorl %r10d,%r13d
+ addl 24(%rsp),%ebx
+ movl %ecx,%r15d
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 28(%rsp),%eax
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ rorl $14,%r13d
+ movl %r14d,%eax
+ movl %r9d,%r12d
+ rorl $9,%r14d
+ xorl %r8d,%r13d
+ xorl %r10d,%r12d
+ rorl $5,%r13d
+ xorl %eax,%r14d
+ andl %r8d,%r12d
+ xorl %r8d,%r13d
+ addl 32(%rsp),%r11d
+ movl %eax,%r15d
+ xorl %r10d,%r12d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+ addl %r12d,%r11d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %eax,%r14d
+ addl %r13d,%r11d
+ xorl %ebx,%edi
+ rorl $2,%r14d
+ addl %r11d,%edx
+ addl %edi,%r11d
+ movl %edx,%r13d
+ addl %r11d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r11d
+ movl %r8d,%r12d
+ rorl $9,%r14d
+ xorl %edx,%r13d
+ xorl %r9d,%r12d
+ rorl $5,%r13d
+ xorl %r11d,%r14d
+ andl %edx,%r12d
+ xorl %edx,%r13d
+ addl 36(%rsp),%r10d
+ movl %r11d,%edi
+ xorl %r9d,%r12d
+ rorl $11,%r14d
+ xorl %eax,%edi
+ addl %r12d,%r10d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r11d,%r14d
+ addl %r13d,%r10d
+ xorl %eax,%r15d
+ rorl $2,%r14d
+ addl %r10d,%ecx
+ addl %r15d,%r10d
+ movl %ecx,%r13d
+ addl %r10d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r10d
+ movl %edx,%r12d
+ rorl $9,%r14d
+ xorl %ecx,%r13d
+ xorl %r8d,%r12d
+ rorl $5,%r13d
+ xorl %r10d,%r14d
+ andl %ecx,%r12d
+ xorl %ecx,%r13d
+ addl 40(%rsp),%r9d
+ movl %r10d,%r15d
+ xorl %r8d,%r12d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+ addl %r12d,%r9d
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r10d,%r14d
+ addl %r13d,%r9d
+ xorl %r11d,%edi
+ rorl $2,%r14d
+ addl %r9d,%ebx
+ addl %edi,%r9d
+ movl %ebx,%r13d
+ addl %r9d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r9d
+ movl %ecx,%r12d
+ rorl $9,%r14d
+ xorl %ebx,%r13d
+ xorl %edx,%r12d
+ rorl $5,%r13d
+ xorl %r9d,%r14d
+ andl %ebx,%r12d
+ xorl %ebx,%r13d
+ addl 44(%rsp),%r8d
+ movl %r9d,%edi
+ xorl %edx,%r12d
+ rorl $11,%r14d
+ xorl %r10d,%edi
+ addl %r12d,%r8d
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %r9d,%r14d
+ addl %r13d,%r8d
+ xorl %r10d,%r15d
+ rorl $2,%r14d
+ addl %r8d,%eax
+ addl %r15d,%r8d
+ movl %eax,%r13d
+ addl %r8d,%r14d
+ rorl $14,%r13d
+ movl %r14d,%r8d
+ movl %ebx,%r12d
+ rorl $9,%r14d
+ xorl %eax,%r13d
+ xorl %ecx,%r12d
+ rorl $5,%r13d
+ xorl %r8d,%r14d
+ andl %eax,%r12d
+ xorl %eax,%r13d
+ addl 48(%rsp),%edx
+ movl %r8d,%r15d
+ xorl %ecx,%r12d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+ addl %r12d,%edx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %r8d,%r14d
+ addl %r13d,%edx
+ xorl %r9d,%edi
+ rorl $2,%r14d
+ addl %edx,%r11d
+ addl %edi,%edx
+ movl %r11d,%r13d
+ addl %edx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%edx
+ movl %eax,%r12d
+ rorl $9,%r14d
+ xorl %r11d,%r13d
+ xorl %ebx,%r12d
+ rorl $5,%r13d
+ xorl %edx,%r14d
+ andl %r11d,%r12d
+ xorl %r11d,%r13d
+ addl 52(%rsp),%ecx
+ movl %edx,%edi
+ xorl %ebx,%r12d
+ rorl $11,%r14d
+ xorl %r8d,%edi
+ addl %r12d,%ecx
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %edx,%r14d
+ addl %r13d,%ecx
+ xorl %r8d,%r15d
+ rorl $2,%r14d
+ addl %ecx,%r10d
+ addl %r15d,%ecx
+ movl %r10d,%r13d
+ addl %ecx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ecx
+ movl %r11d,%r12d
+ rorl $9,%r14d
+ xorl %r10d,%r13d
+ xorl %eax,%r12d
+ rorl $5,%r13d
+ xorl %ecx,%r14d
+ andl %r10d,%r12d
+ xorl %r10d,%r13d
+ addl 56(%rsp),%ebx
+ movl %ecx,%r15d
+ xorl %eax,%r12d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+ addl %r12d,%ebx
+ rorl $6,%r13d
+ andl %r15d,%edi
+ xorl %ecx,%r14d
+ addl %r13d,%ebx
+ xorl %edx,%edi
+ rorl $2,%r14d
+ addl %ebx,%r9d
+ addl %edi,%ebx
+ movl %r9d,%r13d
+ addl %ebx,%r14d
+ rorl $14,%r13d
+ movl %r14d,%ebx
+ movl %r10d,%r12d
+ rorl $9,%r14d
+ xorl %r9d,%r13d
+ xorl %r11d,%r12d
+ rorl $5,%r13d
+ xorl %ebx,%r14d
+ andl %r9d,%r12d
+ xorl %r9d,%r13d
+ addl 60(%rsp),%eax
+ movl %ebx,%edi
+ xorl %r11d,%r12d
+ rorl $11,%r14d
+ xorl %ecx,%edi
+ addl %r12d,%eax
+ rorl $6,%r13d
+ andl %edi,%r15d
+ xorl %ebx,%r14d
+ addl %r13d,%eax
+ xorl %ecx,%r15d
+ rorl $2,%r14d
+ addl %eax,%r8d
+ addl %r15d,%eax
+ movl %r8d,%r13d
+ addl %eax,%r14d
+ movq 64+0(%rsp),%rdi
+ movl %r14d,%eax
+
+ addl 0(%rdi),%eax
+ leaq 64(%rsi),%rsi
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb L$loop_ssse3
+
+ movq 64+24(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+L$epilogue_ssse3:
+ .byte 0xf3,0xc3
+
+
+.section .note.GNU-stack,"",%progbits
diff --git a/lib/accelerated/x86/sha-padlock.h b/lib/accelerated/x86/sha-padlock.h
index 05af543075..30d3ccec28 100644
--- a/lib/accelerated/x86/sha-padlock.h
+++ b/lib/accelerated/x86/sha-padlock.h
@@ -30,4 +30,7 @@ extern const struct nettle_hash padlock_sha256;
extern const struct nettle_hash padlock_sha384;
extern const struct nettle_hash padlock_sha512;
+extern const gnutls_crypto_mac_st hmac_sha_padlock_nano_struct;
+extern const gnutls_crypto_digest_st sha_padlock_nano_struct;
+
#endif
diff --git a/lib/accelerated/x86/sha-x86.c b/lib/accelerated/x86/sha-x86.c
new file mode 100644
index 0000000000..054dc6ce73
--- /dev/null
+++ b/lib/accelerated/x86/sha-x86.c
@@ -0,0 +1,365 @@
+/*
+ * Copyright (C) 2011-2012 Free Software Foundation, Inc.
+ *
+ * Author: Nikos Mavrogiannopoulos
+ *
+ * This file is part of GnuTLS.
+ *
+ * The GnuTLS is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>
+ *
+ */
+
+#include <gnutls_errors.h>
+#include <gnutls_int.h>
+#include <gnutls/crypto.h>
+#include <gnutls_errors.h>
+#include <aes-x86.h>
+#include <nettle/sha.h>
+#include <nettle/macros.h>
+#include <nettle/nettle-meta.h>
+#include <sha-x86.h>
+#include <x86.h>
+
+void sha1_block_data_order(void *c, const void *p, size_t len);
+void sha256_block_data_order(void *c, const void *p, size_t len);
+void sha512_block_data_order(void *c, const void *p, size_t len);
+
+typedef void (*update_func) (void *, unsigned, const uint8_t *);
+typedef void (*digest_func) (void *, unsigned, uint8_t *);
+typedef void (*set_key_func) (void *, unsigned, const uint8_t *);
+typedef void (*init_func) (void *);
+
+struct x86_hash_ctx {
+ union {
+ struct sha1_ctx sha1;
+ struct sha224_ctx sha224;
+ struct sha256_ctx sha256;
+#ifdef ENABLE_SHA512
+ struct sha384_ctx sha384;
+ struct sha512_ctx sha512;
+#endif
+ } ctx;
+ void *ctx_ptr;
+ gnutls_digest_algorithm_t algo;
+ size_t length;
+ update_func update;
+ digest_func digest;
+ init_func init;
+};
+
+static int
+wrap_x86_hash_update(void *_ctx, const void *text, size_t textsize)
+{
+ struct x86_hash_ctx *ctx = _ctx;
+
+ ctx->update(ctx->ctx_ptr, textsize, text);
+
+ return GNUTLS_E_SUCCESS;
+}
+
+static void wrap_x86_hash_deinit(void *hd)
+{
+ gnutls_free(hd);
+}
+
+void x86_sha1_update(struct sha1_ctx *ctx, size_t length,
+ const uint8_t * data)
+{
+ struct {
+ uint32_t h0, h1, h2, h3, h4;
+ uint32_t Nl, Nh;
+ uint32_t data[16];
+ unsigned int num;
+ } octx;
+ size_t res;
+ unsigned t2, i;
+
+ if ((res = ctx->index)) {
+ res = SHA1_DATA_SIZE - res;
+ if (length < res)
+ res = length;
+ sha1_update(ctx, res, data);
+ data += res;
+ length -= res;
+ }
+
+ octx.h0 = ctx->state[0];
+ octx.h1 = ctx->state[1];
+ octx.h2 = ctx->state[2];
+ octx.h3 = ctx->state[3];
+ octx.h4 = ctx->state[4];
+
+ memcpy(octx.data, ctx->block, SHA1_DATA_SIZE);
+ octx.num = ctx->index;
+
+ res = length % SHA1_DATA_SIZE;
+ length -= res;
+
+ if (length > 0) {
+
+ t2 = length / SHA1_DATA_SIZE;
+
+ sha1_block_data_order(&octx, data, t2);
+
+ for (i=0;i<t2;i++)
+ MD_INCR(ctx);
+ data += length;
+ }
+
+ ctx->state[0] = octx.h0;
+ ctx->state[1] = octx.h1;
+ ctx->state[2] = octx.h2;
+ ctx->state[3] = octx.h3;
+ ctx->state[4] = octx.h4;
+
+ memcpy(ctx->block, octx.data, octx.num);
+ ctx->index = octx.num;
+
+ if (res > 0) {
+ sha1_update(ctx, res, data);
+ }
+
+}
+
+void x86_sha256_update(struct sha256_ctx *ctx, size_t length,
+ const uint8_t * data)
+{
+ struct {
+ uint32_t h[8];
+ uint32_t Nl, Nh;
+ uint32_t data[16];
+ unsigned int num;
+ unsigned md_len;
+ } octx;
+ size_t res;
+ unsigned t2, i;
+
+ if ((res = ctx->index)) {
+ res = SHA256_DATA_SIZE - res;
+ if (length < res)
+ res = length;
+ sha256_update(ctx, res, data);
+ data += res;
+ length -= res;
+ }
+
+ memcpy(octx.h, ctx->state, sizeof(octx.h));
+ memcpy(octx.data, ctx->block, SHA256_DATA_SIZE);
+ octx.num = ctx->index;
+
+ res = length % SHA256_DATA_SIZE;
+ length -= res;
+
+ if (length > 0) {
+ t2 = length / SHA1_DATA_SIZE;
+ sha256_block_data_order(&octx, data, t2);
+
+ for (i=0;i<t2;i++)
+ MD_INCR(ctx);
+ data += length;
+ }
+
+ memcpy(ctx->state, octx.h, sizeof(octx.h));
+
+ memcpy(ctx->block, octx.data, octx.num);
+ ctx->index = octx.num;
+
+ if (res > 0) {
+ sha256_update(ctx, res, data);
+ }
+}
+
+#ifdef ENABLE_SHA512
+void x86_sha512_update(struct sha512_ctx *ctx, size_t length,
+ const uint8_t * data)
+{
+ struct {
+ uint64_t h[8];
+ uint64_t Nl, Nh;
+ union {
+ uint64_t d[16];
+ uint8_t p[16*8];
+ } u;
+ unsigned int num;
+ unsigned md_len;
+ } octx;
+ size_t res;
+ unsigned t2, i;
+
+ if ((res = ctx->index)) {
+ res = SHA512_DATA_SIZE - res;
+ if (length < res)
+ res = length;
+ sha512_update(ctx, res, data);
+ data += res;
+ length -= res;
+ }
+
+ memcpy(octx.h, ctx->state, sizeof(octx.h));
+ memcpy(octx.u.p, ctx->block, SHA512_DATA_SIZE);
+ octx.num = ctx->index;
+
+ res = length % SHA512_DATA_SIZE;
+ length -= res;
+
+ if (length > 0) {
+ t2 = length / SHA512_DATA_SIZE;
+ sha512_block_data_order(&octx, data, t2);
+
+ for (i=0;i<t2;i++)
+ MD_INCR(ctx);
+ data += length;
+ }
+
+ memcpy(ctx->state, octx.h, sizeof(octx.h));
+
+ memcpy(ctx->block, octx.u.p, octx.num);
+ ctx->index = octx.num;
+
+ if (res > 0) {
+ sha512_update(ctx, res, data);
+ }
+}
+#endif
+
+static int _ctx_init(gnutls_digest_algorithm_t algo,
+ struct x86_hash_ctx *ctx)
+{
+ switch (algo) {
+ case GNUTLS_DIG_SHA1:
+ sha1_init(&ctx->ctx.sha1);
+ ctx->update = (update_func) x86_sha1_update;
+ ctx->digest = (digest_func) sha1_digest;
+ ctx->init = (init_func) sha1_init;
+ ctx->ctx_ptr = &ctx->ctx.sha1;
+ ctx->length = SHA1_DIGEST_SIZE;
+ break;
+ case GNUTLS_DIG_SHA224:
+ sha224_init(&ctx->ctx.sha224);
+ ctx->update = (update_func) x86_sha256_update;
+ ctx->digest = (digest_func) sha256_digest;
+ ctx->init = (init_func) sha224_init;
+ ctx->ctx_ptr = &ctx->ctx.sha224;
+ ctx->length = SHA224_DIGEST_SIZE;
+ break;
+ case GNUTLS_DIG_SHA256:
+ sha256_init(&ctx->ctx.sha256);
+ ctx->update = (update_func) x86_sha256_update;
+ ctx->digest = (digest_func) sha256_digest;
+ ctx->init = (init_func) sha256_init;
+ ctx->ctx_ptr = &ctx->ctx.sha256;
+ ctx->length = SHA256_DIGEST_SIZE;
+ break;
+#ifdef ENABLE_SHA512
+ case GNUTLS_DIG_SHA384:
+ sha384_init(&ctx->ctx.sha384);
+ ctx->update = (update_func) x86_sha512_update;
+ ctx->digest = (digest_func) sha512_digest;
+ ctx->init = (init_func) sha384_init;
+ ctx->ctx_ptr = &ctx->ctx.sha384;
+ ctx->length = SHA384_DIGEST_SIZE;
+ break;
+ case GNUTLS_DIG_SHA512:
+ sha512_init(&ctx->ctx.sha512);
+ ctx->update = (update_func) x86_sha512_update;
+ ctx->digest = (digest_func) sha512_digest;
+ ctx->init = (init_func) sha512_init;
+ ctx->ctx_ptr = &ctx->ctx.sha512;
+ ctx->length = SHA512_DIGEST_SIZE;
+ break;
+#endif
+ default:
+ gnutls_assert();
+ return GNUTLS_E_INVALID_REQUEST;
+ }
+
+ return 0;
+}
+
+
+static int wrap_x86_hash_init(gnutls_digest_algorithm_t algo, void **_ctx)
+{
+ struct x86_hash_ctx *ctx;
+ int ret;
+
+ ctx = gnutls_malloc(sizeof(struct x86_hash_ctx));
+ if (ctx == NULL) {
+ gnutls_assert();
+ return GNUTLS_E_MEMORY_ERROR;
+ }
+
+ ctx->algo = algo;
+
+ if ((ret = _ctx_init(algo, ctx)) < 0) {
+ gnutls_assert();
+ return ret;
+ }
+
+ *_ctx = ctx;
+
+ return 0;
+}
+
+static int
+wrap_x86_hash_output(void *src_ctx, void *digest, size_t digestsize)
+{
+ struct x86_hash_ctx *ctx;
+ ctx = src_ctx;
+
+ if (digestsize < ctx->length)
+ return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER);
+
+ ctx->digest(ctx->ctx_ptr, digestsize, digest);
+
+ return 0;
+}
+
+static int wrap_x86_hash_fast(gnutls_digest_algorithm_t algo,
+ const void *text, size_t text_size,
+ void *digest)
+{
+ struct x86_hash_ctx ctx;
+ int ret;
+
+ ret = _ctx_init(algo, &ctx);
+ if (ret < 0)
+ return gnutls_assert_val(ret);
+
+ ctx.update(&ctx, text_size, text);
+ ctx.digest(&ctx, ctx.length, digest);
+
+ return 0;
+}
+
+const struct nettle_hash x86_sha1 =
+NN_HASH(sha1, x86_sha1_update, sha1_digest, SHA1);
+const struct nettle_hash x86_sha224 =
+NN_HASH(sha224, x86_sha256_update, sha224_digest, SHA224);
+const struct nettle_hash x86_sha256 =
+NN_HASH(sha256, x86_sha256_update, sha256_digest, SHA256);
+
+#ifdef ENABLE_SHA512
+const struct nettle_hash x86_sha384 =
+NN_HASH(sha384, x86_sha512_update, sha384_digest, SHA384);
+const struct nettle_hash x86_sha512 =
+NN_HASH(sha512, x86_sha512_update, sha512_digest, SHA512);
+#endif
+
+const gnutls_crypto_digest_st sha_x86_struct = {
+ .init = wrap_x86_hash_init,
+ .hash = wrap_x86_hash_update,
+ .output = wrap_x86_hash_output,
+ .deinit = wrap_x86_hash_deinit,
+ .fast = wrap_x86_hash_fast,
+};
diff --git a/lib/accelerated/x86/sha-x86.h b/lib/accelerated/x86/sha-x86.h
new file mode 100644
index 0000000000..a288962da5
--- /dev/null
+++ b/lib/accelerated/x86/sha-x86.h
@@ -0,0 +1,22 @@
+#ifndef SHA_X86_H
+#define SHA_X86_H
+
+#include <nettle/sha.h>
+
+/* nettle's SHA512 is faster than openssl's */
+#undef ENABLE_SHA512
+
+extern const struct nettle_hash x86_sha1;
+extern const struct nettle_hash x86_sha224;
+extern const struct nettle_hash x86_sha256;
+extern const struct nettle_hash x86_sha384;
+extern const struct nettle_hash x86_sha512;
+
+void x86_sha1_update(struct sha1_ctx *ctx, size_t length, const uint8_t * data);
+void x86_sha256_update(struct sha256_ctx *ctx, size_t length, const uint8_t * data);
+void x86_sha512_update(struct sha512_ctx *ctx, size_t length, const uint8_t * data);
+
+extern const gnutls_crypto_digest_st sha_x86_struct;
+extern const gnutls_crypto_mac_st hmac_sha_x86_struct;
+
+#endif