diff options
Diffstat (limited to 'crypto/des/asm/cx86-cpp.s')
-rw-r--r-- | crypto/des/asm/cx86-cpp.s | 932 |
1 files changed, 932 insertions, 0 deletions
diff --git a/crypto/des/asm/cx86-cpp.s b/crypto/des/asm/cx86-cpp.s new file mode 100644 index 0000000000..e5165fadf9 --- /dev/null +++ b/crypto/des/asm/cx86-cpp.s @@ -0,0 +1,932 @@ + /* Don't even think of reading this code */ + /* It was automatically generated by crypt.pl */ + /* Which is a perl program used to generate the x86 assember for */ + /* any of elf, a.out, Win32, or Solaris */ + /* It can be found in SSLeay 0.6.5+ or in libdes 3.26+ */ + /* eric <eay@cryptsoft.com> */ + /* The inner loop instruction sequence and the IP/FP modifications */ + /* are from Svend Olaf Mikkelsen <svolaf@inet.uni-c.dk> */ + + .file "dx86xxxx.s" + .version "01.01" +gcc2_compiled.: +.text + .align ALIGN +.globl fcrypt_body + TYPE(fcrypt_body,@function) +fcrypt_body: + pushl %ebp + pushl %ebx + pushl %esi + pushl %edi + + + /* Load the 2 words */ + xorl %edi, %edi + xorl %esi, %esi + movl 24(%esp), %ebp + movl $25, -8(%esp) +.align ALIGN +.L000start: + + /* Round 0 */ + movl 28(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl (%ebp), %ebx + xorl %ebx, %eax + movl 4(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 1 */ + movl 28(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 8(%ebp), %ebx + xorl %ebx, %eax + movl 12(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 2 */ + movl 28(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 16(%ebp), %ebx + xorl %ebx, %eax + movl 20(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 3 */ + movl 28(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 24(%ebp), %ebx + xorl %ebx, %eax + movl 28(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 4 */ + movl 28(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 32(%ebp), %ebx + xorl %ebx, %eax + movl 36(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 5 */ + movl 28(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 40(%ebp), %ebx + xorl %ebx, %eax + movl 44(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 6 */ + movl 28(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 48(%ebp), %ebx + xorl %ebx, %eax + movl 52(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 7 */ + movl 28(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 56(%ebp), %ebx + xorl %ebx, %eax + movl 60(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 8 */ + movl 28(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 64(%ebp), %ebx + xorl %ebx, %eax + movl 68(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 9 */ + movl 28(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 72(%ebp), %ebx + xorl %ebx, %eax + movl 76(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 10 */ + movl 28(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 80(%ebp), %ebx + xorl %ebx, %eax + movl 84(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 11 */ + movl 28(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 88(%ebp), %ebx + xorl %ebx, %eax + movl 92(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 12 */ + movl 28(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 96(%ebp), %ebx + xorl %ebx, %eax + movl 100(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 13 */ + movl 28(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 104(%ebp), %ebx + xorl %ebx, %eax + movl 108(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + + /* Round 14 */ + movl 28(%esp), %eax + movl %esi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %esi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 112(%ebp), %ebx + xorl %ebx, %eax + movl 116(%ebp), %ecx + xorl %esi, %eax + xorl %esi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %edi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %edi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %edi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %edi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %edi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %edi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %edi + + /* Round 15 */ + movl 28(%esp), %eax + movl %edi, %edx + shrl $16, %edx + movl 32(%esp), %ecx + xorl %edi, %edx + andl %edx, %eax + andl %ecx, %edx + movl %eax, %ebx + sall $16, %ebx + movl %edx, %ecx + sall $16, %ecx + xorl %ebx, %eax + xorl %ecx, %edx + movl 120(%ebp), %ebx + xorl %ebx, %eax + movl 124(%ebp), %ecx + xorl %edi, %eax + xorl %edi, %edx + xorl %ecx, %edx + andl $0xfcfcfcfc, %eax + xorl %ebx, %ebx + andl $0xcfcfcfcf, %edx + xorl %ecx, %ecx + movb %al, %bl + movb %ah, %cl + rorl $4, %edx + movl des_SPtrans(%ebx),%ebp + movb %dl, %bl + xorl %ebp, %esi + movl 0x200+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movb %dh, %cl + shrl $16, %eax + movl 0x100+des_SPtrans(%ebx),%ebp + xorl %ebp, %esi + movb %ah, %bl + shrl $16, %edx + movl 0x300+des_SPtrans(%ecx),%ebp + xorl %ebp, %esi + movl 24(%esp), %ebp + movb %dh, %cl + andl $0xff, %eax + andl $0xff, %edx + movl 0x600+des_SPtrans(%ebx),%ebx + xorl %ebx, %esi + movl 0x700+des_SPtrans(%ecx),%ebx + xorl %ebx, %esi + movl 0x400+des_SPtrans(%eax),%ebx + xorl %ebx, %esi + movl 0x500+des_SPtrans(%edx),%ebx + xorl %ebx, %esi + movl %edi, %eax + decl -8(%esp) + movl %esi, %edi + movl %eax, %esi + jnz .L000start + + /* FP */ + movl 20(%esp), %edx + rorl $1, %edi + movl %esi, %eax + xorl %edi, %esi + andl $0xaaaaaaaa, %esi + xorl %esi, %eax + xorl %esi, %edi + + roll $23, %eax + movl %eax, %esi + xorl %edi, %eax + andl $0x03fc03fc, %eax + xorl %eax, %esi + xorl %eax, %edi + + roll $10, %esi + movl %esi, %eax + xorl %edi, %esi + andl $0x33333333, %esi + xorl %esi, %eax + xorl %esi, %edi + + roll $18, %edi + movl %edi, %esi + xorl %eax, %edi + andl $0xfff0000f, %edi + xorl %edi, %esi + xorl %edi, %eax + + roll $12, %esi + movl %esi, %edi + xorl %eax, %esi + andl $0xf0f0f0f0, %esi + xorl %esi, %edi + xorl %esi, %eax + + rorl $4, %eax + movl %eax, (%edx) + movl %edi, 4(%edx) + popl %edi + popl %esi + popl %ebx + popl %ebp + ret +.fcrypt_body_end: + SIZE(fcrypt_body,.fcrypt_body_end-fcrypt_body) +.ident "desasm.pl" |