diff options
Diffstat (limited to 'deps/openssl/config/archs/linux-x86_64/asm/crypto')
21 files changed, 428 insertions, 5474 deletions
diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aes-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aes-x86_64.s deleted file mode 100644 index f0c1685fb9..0000000000 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aes-x86_64.s +++ /dev/null @@ -1,2649 +0,0 @@ -.text -.type _x86_64_AES_encrypt,@function -.align 16 -_x86_64_AES_encrypt: - xorl 0(%r15),%eax - xorl 4(%r15),%ebx - xorl 8(%r15),%ecx - xorl 12(%r15),%edx - - movl 240(%r15),%r13d - subl $1,%r13d - jmp .Lenc_loop -.align 16 -.Lenc_loop: - - movzbl %al,%esi - movzbl %bl,%edi - movzbl %cl,%ebp - movl 0(%r14,%rsi,8),%r10d - movl 0(%r14,%rdi,8),%r11d - movl 0(%r14,%rbp,8),%r12d - - movzbl %bh,%esi - movzbl %ch,%edi - movzbl %dl,%ebp - xorl 3(%r14,%rsi,8),%r10d - xorl 3(%r14,%rdi,8),%r11d - movl 0(%r14,%rbp,8),%r8d - - movzbl %dh,%esi - shrl $16,%ecx - movzbl %ah,%ebp - xorl 3(%r14,%rsi,8),%r12d - shrl $16,%edx - xorl 3(%r14,%rbp,8),%r8d - - shrl $16,%ebx - leaq 16(%r15),%r15 - shrl $16,%eax - - movzbl %cl,%esi - movzbl %dl,%edi - movzbl %al,%ebp - xorl 2(%r14,%rsi,8),%r10d - xorl 2(%r14,%rdi,8),%r11d - xorl 2(%r14,%rbp,8),%r12d - - movzbl %dh,%esi - movzbl %ah,%edi - movzbl %bl,%ebp - xorl 1(%r14,%rsi,8),%r10d - xorl 1(%r14,%rdi,8),%r11d - xorl 2(%r14,%rbp,8),%r8d - - movl 12(%r15),%edx - movzbl %bh,%edi - movzbl %ch,%ebp - movl 0(%r15),%eax - xorl 1(%r14,%rdi,8),%r12d - xorl 1(%r14,%rbp,8),%r8d - - movl 4(%r15),%ebx - movl 8(%r15),%ecx - xorl %r10d,%eax - xorl %r11d,%ebx - xorl %r12d,%ecx - xorl %r8d,%edx - subl $1,%r13d - jnz .Lenc_loop - movzbl %al,%esi - movzbl %bl,%edi - movzbl %cl,%ebp - movzbl 2(%r14,%rsi,8),%r10d - movzbl 2(%r14,%rdi,8),%r11d - movzbl 2(%r14,%rbp,8),%r12d - - movzbl %dl,%esi - movzbl %bh,%edi - movzbl %ch,%ebp - movzbl 2(%r14,%rsi,8),%r8d - movl 0(%r14,%rdi,8),%edi - movl 0(%r14,%rbp,8),%ebp - - andl $0x0000ff00,%edi - andl $0x0000ff00,%ebp - - xorl %edi,%r10d - xorl %ebp,%r11d - shrl $16,%ecx - - movzbl %dh,%esi - movzbl %ah,%edi - shrl $16,%edx - movl 0(%r14,%rsi,8),%esi - movl 0(%r14,%rdi,8),%edi - - andl $0x0000ff00,%esi - andl $0x0000ff00,%edi - shrl $16,%ebx - xorl %esi,%r12d - xorl %edi,%r8d - shrl $16,%eax - - movzbl %cl,%esi - movzbl %dl,%edi - movzbl %al,%ebp - movl 0(%r14,%rsi,8),%esi - movl 0(%r14,%rdi,8),%edi - movl 0(%r14,%rbp,8),%ebp - - andl $0x00ff0000,%esi - andl $0x00ff0000,%edi - andl $0x00ff0000,%ebp - - xorl %esi,%r10d - xorl %edi,%r11d - xorl %ebp,%r12d - - movzbl %bl,%esi - movzbl %dh,%edi - movzbl %ah,%ebp - movl 0(%r14,%rsi,8),%esi - movl 2(%r14,%rdi,8),%edi - movl 2(%r14,%rbp,8),%ebp - - andl $0x00ff0000,%esi - andl $0xff000000,%edi - andl $0xff000000,%ebp - - xorl %esi,%r8d - xorl %edi,%r10d - xorl %ebp,%r11d - - movzbl %bh,%esi - movzbl %ch,%edi - movl 16+12(%r15),%edx - movl 2(%r14,%rsi,8),%esi - movl 2(%r14,%rdi,8),%edi - movl 16+0(%r15),%eax - - andl $0xff000000,%esi - andl $0xff000000,%edi - - xorl %esi,%r12d - xorl %edi,%r8d - - movl 16+4(%r15),%ebx - movl 16+8(%r15),%ecx - xorl %r10d,%eax - xorl %r11d,%ebx - xorl %r12d,%ecx - xorl %r8d,%edx -.byte 0xf3,0xc3 -.size _x86_64_AES_encrypt,.-_x86_64_AES_encrypt -.type _x86_64_AES_encrypt_compact,@function -.align 16 -_x86_64_AES_encrypt_compact: -.cfi_startproc - leaq 128(%r14),%r8 - movl 0-128(%r8),%edi - movl 32-128(%r8),%ebp - movl 64-128(%r8),%r10d - movl 96-128(%r8),%r11d - movl 128-128(%r8),%edi - movl 160-128(%r8),%ebp - movl 192-128(%r8),%r10d - movl 224-128(%r8),%r11d - jmp .Lenc_loop_compact -.align 16 -.Lenc_loop_compact: - xorl 0(%r15),%eax - xorl 4(%r15),%ebx - xorl 8(%r15),%ecx - xorl 12(%r15),%edx - leaq 16(%r15),%r15 - movzbl %al,%r10d - movzbl %bl,%r11d - movzbl %cl,%r12d - movzbl %dl,%r8d - movzbl %bh,%esi - movzbl %ch,%edi - shrl $16,%ecx - movzbl %dh,%ebp - movzbl (%r14,%r10,1),%r10d - movzbl (%r14,%r11,1),%r11d - movzbl (%r14,%r12,1),%r12d - movzbl (%r14,%r8,1),%r8d - - movzbl (%r14,%rsi,1),%r9d - movzbl %ah,%esi - movzbl (%r14,%rdi,1),%r13d - movzbl %cl,%edi - movzbl (%r14,%rbp,1),%ebp - movzbl (%r14,%rsi,1),%esi - - shll $8,%r9d - shrl $16,%edx - shll $8,%r13d - xorl %r9d,%r10d - shrl $16,%eax - movzbl %dl,%r9d - shrl $16,%ebx - xorl %r13d,%r11d - shll $8,%ebp - movzbl %al,%r13d - movzbl (%r14,%rdi,1),%edi - xorl %ebp,%r12d - - shll $8,%esi - movzbl %bl,%ebp - shll $16,%edi - xorl %esi,%r8d - movzbl (%r14,%r9,1),%r9d - movzbl %dh,%esi - movzbl (%r14,%r13,1),%r13d - xorl %edi,%r10d - - shrl $8,%ecx - movzbl %ah,%edi - shll $16,%r9d - shrl $8,%ebx - shll $16,%r13d - xorl %r9d,%r11d - movzbl (%r14,%rbp,1),%ebp - movzbl (%r14,%rsi,1),%esi - movzbl (%r14,%rdi,1),%edi - movzbl (%r14,%rcx,1),%edx - movzbl (%r14,%rbx,1),%ecx - - shll $16,%ebp - xorl %r13d,%r12d - shll $24,%esi - xorl %ebp,%r8d - shll $24,%edi - xorl %esi,%r10d - shll $24,%edx - xorl %edi,%r11d - shll $24,%ecx - movl %r10d,%eax - movl %r11d,%ebx - xorl %r12d,%ecx - xorl %r8d,%edx - cmpq 16(%rsp),%r15 - je .Lenc_compact_done - movl $0x80808080,%r10d - movl $0x80808080,%r11d - andl %eax,%r10d - andl %ebx,%r11d - movl %r10d,%esi - movl %r11d,%edi - shrl $7,%r10d - leal (%rax,%rax,1),%r8d - shrl $7,%r11d - leal (%rbx,%rbx,1),%r9d - subl %r10d,%esi - subl %r11d,%edi - andl $0xfefefefe,%r8d - andl $0xfefefefe,%r9d - andl $0x1b1b1b1b,%esi - andl $0x1b1b1b1b,%edi - movl %eax,%r10d - movl %ebx,%r11d - xorl %esi,%r8d - xorl %edi,%r9d - - xorl %r8d,%eax - xorl %r9d,%ebx - movl $0x80808080,%r12d - roll $24,%eax - movl $0x80808080,%ebp - roll $24,%ebx - andl %ecx,%r12d - andl %edx,%ebp - xorl %r8d,%eax - xorl %r9d,%ebx - movl %r12d,%esi - rorl $16,%r10d - movl %ebp,%edi - rorl $16,%r11d - leal (%rcx,%rcx,1),%r8d - shrl $7,%r12d - xorl %r10d,%eax - shrl $7,%ebp - xorl %r11d,%ebx - rorl $8,%r10d - leal (%rdx,%rdx,1),%r9d - rorl $8,%r11d - subl %r12d,%esi - subl %ebp,%edi - xorl %r10d,%eax - xorl %r11d,%ebx - - andl $0xfefefefe,%r8d - andl $0xfefefefe,%r9d - andl $0x1b1b1b1b,%esi - andl $0x1b1b1b1b,%edi - movl %ecx,%r12d - movl %edx,%ebp - xorl %esi,%r8d - xorl %edi,%r9d - - rorl $16,%r12d - xorl %r8d,%ecx - rorl $16,%ebp - xorl %r9d,%edx - roll $24,%ecx - movl 0(%r14),%esi - roll $24,%edx - xorl %r8d,%ecx - movl 64(%r14),%edi - xorl %r9d,%edx - movl 128(%r14),%r8d - xorl %r12d,%ecx - rorl $8,%r12d - xorl %ebp,%edx - rorl $8,%ebp - xorl %r12d,%ecx - movl 192(%r14),%r9d - xorl %ebp,%edx - jmp .Lenc_loop_compact -.align 16 -.Lenc_compact_done: - xorl 0(%r15),%eax - xorl 4(%r15),%ebx - xorl 8(%r15),%ecx - xorl 12(%r15),%edx -.byte 0xf3,0xc3 -.cfi_endproc -.size _x86_64_AES_encrypt_compact,.-_x86_64_AES_encrypt_compact -.globl AES_encrypt -.type AES_encrypt,@function -.align 16 -.globl asm_AES_encrypt -.hidden asm_AES_encrypt -asm_AES_encrypt: -AES_encrypt: -.cfi_startproc - movq %rsp,%rax -.cfi_def_cfa_register %rax - pushq %rbx -.cfi_offset %rbx,-16 - pushq %rbp -.cfi_offset %rbp,-24 - pushq %r12 -.cfi_offset %r12,-32 - pushq %r13 -.cfi_offset %r13,-40 - pushq %r14 -.cfi_offset %r14,-48 - pushq %r15 -.cfi_offset %r15,-56 - - - leaq -63(%rdx),%rcx - andq $-64,%rsp - subq %rsp,%rcx - negq %rcx - andq $0x3c0,%rcx - subq %rcx,%rsp - subq $32,%rsp - - movq %rsi,16(%rsp) - movq %rax,24(%rsp) -.cfi_escape 0x0f,0x05,0x77,0x18,0x06,0x23,0x08 -.Lenc_prologue: - - movq %rdx,%r15 - movl 240(%r15),%r13d - - movl 0(%rdi),%eax - movl 4(%rdi),%ebx - movl 8(%rdi),%ecx - movl 12(%rdi),%edx - - shll $4,%r13d - leaq (%r15,%r13,1),%rbp - movq %r15,(%rsp) - movq %rbp,8(%rsp) - - - leaq .LAES_Te+2048(%rip),%r14 - leaq 768(%rsp),%rbp - subq %r14,%rbp - andq $0x300,%rbp - leaq (%r14,%rbp,1),%r14 - - call _x86_64_AES_encrypt_compact - - movq 16(%rsp),%r9 - movq 24(%rsp),%rsi -.cfi_def_cfa %rsi,8 - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - - movq -48(%rsi),%r15 -.cfi_restore %r15 - movq -40(%rsi),%r14 -.cfi_restore %r14 - movq -32(%rsi),%r13 -.cfi_restore %r13 - movq -24(%rsi),%r12 -.cfi_restore %r12 - movq -16(%rsi),%rbp -.cfi_restore %rbp - movq -8(%rsi),%rbx -.cfi_restore %rbx - leaq (%rsi),%rsp -.cfi_def_cfa_register %rsp -.Lenc_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size AES_encrypt,.-AES_encrypt -.type _x86_64_AES_decrypt,@function -.align 16 -_x86_64_AES_decrypt: - xorl 0(%r15),%eax - xorl 4(%r15),%ebx - xorl 8(%r15),%ecx - xorl 12(%r15),%edx - - movl 240(%r15),%r13d - subl $1,%r13d - jmp .Ldec_loop -.align 16 -.Ldec_loop: - - movzbl %al,%esi - movzbl %bl,%edi - movzbl %cl,%ebp - movl 0(%r14,%rsi,8),%r10d - movl 0(%r14,%rdi,8),%r11d - movl 0(%r14,%rbp,8),%r12d - - movzbl %dh,%esi - movzbl %ah,%edi - movzbl %dl,%ebp - xorl 3(%r14,%rsi,8),%r10d - xorl 3(%r14,%rdi,8),%r11d - movl 0(%r14,%rbp,8),%r8d - - movzbl %bh,%esi - shrl $16,%eax - movzbl %ch,%ebp - xorl 3(%r14,%rsi,8),%r12d - shrl $16,%edx - xorl 3(%r14,%rbp,8),%r8d - - shrl $16,%ebx - leaq 16(%r15),%r15 - shrl $16,%ecx - - movzbl %cl,%esi - movzbl %dl,%edi - movzbl %al,%ebp - xorl 2(%r14,%rsi,8),%r10d - xorl 2(%r14,%rdi,8),%r11d - xorl 2(%r14,%rbp,8),%r12d - - movzbl %bh,%esi - movzbl %ch,%edi - movzbl %bl,%ebp - xorl 1(%r14,%rsi,8),%r10d - xorl 1(%r14,%rdi,8),%r11d - xorl 2(%r14,%rbp,8),%r8d - - movzbl %dh,%esi - movl 12(%r15),%edx - movzbl %ah,%ebp - xorl 1(%r14,%rsi,8),%r12d - movl 0(%r15),%eax - xorl 1(%r14,%rbp,8),%r8d - - xorl %r10d,%eax - movl 4(%r15),%ebx - movl 8(%r15),%ecx - xorl %r12d,%ecx - xorl %r11d,%ebx - xorl %r8d,%edx - subl $1,%r13d - jnz .Ldec_loop - leaq 2048(%r14),%r14 - movzbl %al,%esi - movzbl %bl,%edi - movzbl %cl,%ebp - movzbl (%r14,%rsi,1),%r10d - movzbl (%r14,%rdi,1),%r11d - movzbl (%r14,%rbp,1),%r12d - - movzbl %dl,%esi - movzbl %dh,%edi - movzbl %ah,%ebp - movzbl (%r14,%rsi,1),%r8d - movzbl (%r14,%rdi,1),%edi - movzbl (%r14,%rbp,1),%ebp - - shll $8,%edi - shll $8,%ebp - - xorl %edi,%r10d - xorl %ebp,%r11d - shrl $16,%edx - - movzbl %bh,%esi - movzbl %ch,%edi - shrl $16,%eax - movzbl (%r14,%rsi,1),%esi - movzbl (%r14,%rdi,1),%edi - - shll $8,%esi - shll $8,%edi - shrl $16,%ebx - xorl %esi,%r12d - xorl %edi,%r8d - shrl $16,%ecx - - movzbl %cl,%esi - movzbl %dl,%edi - movzbl %al,%ebp - movzbl (%r14,%rsi,1),%esi - movzbl (%r14,%rdi,1),%edi - movzbl (%r14,%rbp,1),%ebp - - shll $16,%esi - shll $16,%edi - shll $16,%ebp - - xorl %esi,%r10d - xorl %edi,%r11d - xorl %ebp,%r12d - - movzbl %bl,%esi - movzbl %bh,%edi - movzbl %ch,%ebp - movzbl (%r14,%rsi,1),%esi - movzbl (%r14,%rdi,1),%edi - movzbl (%r14,%rbp,1),%ebp - - shll $16,%esi - shll $24,%edi - shll $24,%ebp - - xorl %esi,%r8d - xorl %edi,%r10d - xorl %ebp,%r11d - - movzbl %dh,%esi - movzbl %ah,%edi - movl 16+12(%r15),%edx - movzbl (%r14,%rsi,1),%esi - movzbl (%r14,%rdi,1),%edi - movl 16+0(%r15),%eax - - shll $24,%esi - shll $24,%edi - - xorl %esi,%r12d - xorl %edi,%r8d - - movl 16+4(%r15),%ebx - movl 16+8(%r15),%ecx - leaq -2048(%r14),%r14 - xorl %r10d,%eax - xorl %r11d,%ebx - xorl %r12d,%ecx - xorl %r8d,%edx -.byte 0xf3,0xc3 -.size _x86_64_AES_decrypt,.-_x86_64_AES_decrypt -.type _x86_64_AES_decrypt_compact,@function -.align 16 -_x86_64_AES_decrypt_compact: -.cfi_startproc - leaq 128(%r14),%r8 - movl 0-128(%r8),%edi - movl 32-128(%r8),%ebp - movl 64-128(%r8),%r10d - movl 96-128(%r8),%r11d - movl 128-128(%r8),%edi - movl 160-128(%r8),%ebp - movl 192-128(%r8),%r10d - movl 224-128(%r8),%r11d - jmp .Ldec_loop_compact - -.align 16 -.Ldec_loop_compact: - xorl 0(%r15),%eax - xorl 4(%r15),%ebx - xorl 8(%r15),%ecx - xorl 12(%r15),%edx - leaq 16(%r15),%r15 - movzbl %al,%r10d - movzbl %bl,%r11d - movzbl %cl,%r12d - movzbl %dl,%r8d - movzbl %dh,%esi - movzbl %ah,%edi - shrl $16,%edx - movzbl %bh,%ebp - movzbl (%r14,%r10,1),%r10d - movzbl (%r14,%r11,1),%r11d - movzbl (%r14,%r12,1),%r12d - movzbl (%r14,%r8,1),%r8d - - movzbl (%r14,%rsi,1),%r9d - movzbl %ch,%esi - movzbl (%r14,%rdi,1),%r13d - movzbl (%r14,%rbp,1),%ebp - movzbl (%r14,%rsi,1),%esi - - shrl $16,%ecx - shll $8,%r13d - shll $8,%r9d - movzbl %cl,%edi - shrl $16,%eax - xorl %r9d,%r10d - shrl $16,%ebx - movzbl %dl,%r9d - - shll $8,%ebp - xorl %r13d,%r11d - shll $8,%esi - movzbl %al,%r13d - movzbl (%r14,%rdi,1),%edi - xorl %ebp,%r12d - movzbl %bl,%ebp - - shll $16,%edi - xorl %esi,%r8d - movzbl (%r14,%r9,1),%r9d - movzbl %bh,%esi - movzbl (%r14,%rbp,1),%ebp - xorl %edi,%r10d - movzbl (%r14,%r13,1),%r13d - movzbl %ch,%edi - - shll $16,%ebp - shll $16,%r9d - shll $16,%r13d - xorl %ebp,%r8d - movzbl %dh,%ebp - xorl %r9d,%r11d - shrl $8,%eax - xorl %r13d,%r12d - - movzbl (%r14,%rsi,1),%esi - movzbl (%r14,%rdi,1),%ebx - movzbl (%r14,%rbp,1),%ecx - movzbl (%r14,%rax,1),%edx - - movl %r10d,%eax - shll $24,%esi - shll $24,%ebx - shll $24,%ecx - xorl %esi,%eax - shll $24,%edx - xorl %r11d,%ebx - xorl %r12d,%ecx - xorl %r8d,%edx - cmpq 16(%rsp),%r15 - je .Ldec_compact_done - - movq 256+0(%r14),%rsi - shlq $32,%rbx - shlq $32,%rdx - movq 256+8(%r14),%rdi - orq %rbx,%rax - orq %rdx,%rcx - movq 256+16(%r14),%rbp - movq %rsi,%r9 - movq %rsi,%r12 - andq %rax,%r9 - andq %rcx,%r12 - movq %r9,%rbx - movq %r12,%rdx - shrq $7,%r9 - leaq (%rax,%rax,1),%r8 - shrq $7,%r12 - leaq (%rcx,%rcx,1),%r11 - subq %r9,%rbx - subq %r12,%rdx - andq %rdi,%r8 - andq %rdi,%r11 - andq %rbp,%rbx - andq %rbp,%rdx - xorq %rbx,%r8 - xorq %rdx,%r11 - movq %rsi,%r10 - movq %rsi,%r13 - - andq %r8,%r10 - andq %r11,%r13 - movq %r10,%rbx - movq %r13,%rdx - shrq $7,%r10 - leaq (%r8,%r8,1),%r9 - shrq $7,%r13 - leaq (%r11,%r11,1),%r12 - subq %r10,%rbx - subq %r13,%rdx - andq %rdi,%r9 - andq %rdi,%r12 - andq %rbp,%rbx - andq %rbp,%rdx - xorq %rbx,%r9 - xorq %rdx,%r12 - movq %rsi,%r10 - movq %rsi,%r13 - - andq %r9,%r10 - andq %r12,%r13 - movq %r10,%rbx - movq %r13,%rdx - shrq $7,%r10 - xorq %rax,%r8 - shrq $7,%r13 - xorq %rcx,%r11 - subq %r10,%rbx - subq %r13,%rdx - leaq (%r9,%r9,1),%r10 - leaq (%r12,%r12,1),%r13 - xorq %rax,%r9 - xorq %rcx,%r12 - andq %rdi,%r10 - andq %rdi,%r13 - andq %rbp,%rbx - andq %rbp,%rdx - xorq %rbx,%r10 - xorq %rdx,%r13 - - xorq %r10,%rax - xorq %r13,%rcx - xorq %r10,%r8 - xorq %r13,%r11 - movq %rax,%rbx - movq %rcx,%rdx - xorq %r10,%r9 - shrq $32,%rbx - xorq %r13,%r12 - shrq $32,%rdx - xorq %r8,%r10 - roll $8,%eax - xorq %r11,%r13 - roll $8,%ecx - xorq %r9,%r10 - roll $8,%ebx - xorq %r12,%r13 - - roll $8,%edx - xorl %r10d,%eax - shrq $32,%r10 - xorl %r13d,%ecx - shrq $32,%r13 - xorl %r10d,%ebx - xorl %r13d,%edx - - movq %r8,%r10 - roll $24,%r8d - movq %r11,%r13 - roll $24,%r11d - shrq $32,%r10 - xorl %r8d,%eax - shrq $32,%r13 - xorl %r11d,%ecx - roll $24,%r10d - movq %r9,%r8 - roll $24,%r13d - movq %r12,%r11 - shrq $32,%r8 - xorl %r10d,%ebx - shrq $32,%r11 - xorl %r13d,%edx - - movq 0(%r14),%rsi - roll $16,%r9d - movq 64(%r14),%rdi - roll $16,%r12d - movq 128(%r14),%rbp - roll $16,%r8d - movq 192(%r14),%r10 - xorl %r9d,%eax - roll $16,%r11d - xorl %r12d,%ecx - movq 256(%r14),%r13 - xorl %r8d,%ebx - xorl %r11d,%edx - jmp .Ldec_loop_compact -.align 16 -.Ldec_compact_done: - xorl 0(%r15),%eax - xorl 4(%r15),%ebx - xorl 8(%r15),%ecx - xorl 12(%r15),%edx -.byte 0xf3,0xc3 -.cfi_endproc -.size _x86_64_AES_decrypt_compact,.-_x86_64_AES_decrypt_compact -.globl AES_decrypt -.type AES_decrypt,@function -.align 16 -.globl asm_AES_decrypt -.hidden asm_AES_decrypt -asm_AES_decrypt: -AES_decrypt: -.cfi_startproc - movq %rsp,%rax -.cfi_def_cfa_register %rax - pushq %rbx -.cfi_offset %rbx,-16 - pushq %rbp -.cfi_offset %rbp,-24 - pushq %r12 -.cfi_offset %r12,-32 - pushq %r13 -.cfi_offset %r13,-40 - pushq %r14 -.cfi_offset %r14,-48 - pushq %r15 -.cfi_offset %r15,-56 - - - leaq -63(%rdx),%rcx - andq $-64,%rsp - subq %rsp,%rcx - negq %rcx - andq $0x3c0,%rcx - subq %rcx,%rsp - subq $32,%rsp - - movq %rsi,16(%rsp) - movq %rax,24(%rsp) -.cfi_escape 0x0f,0x05,0x77,0x18,0x06,0x23,0x08 -.Ldec_prologue: - - movq %rdx,%r15 - movl 240(%r15),%r13d - - movl 0(%rdi),%eax - movl 4(%rdi),%ebx - movl 8(%rdi),%ecx - movl 12(%rdi),%edx - - shll $4,%r13d - leaq (%r15,%r13,1),%rbp - movq %r15,(%rsp) - movq %rbp,8(%rsp) - - - leaq .LAES_Td+2048(%rip),%r14 - leaq 768(%rsp),%rbp - subq %r14,%rbp - andq $0x300,%rbp - leaq (%r14,%rbp,1),%r14 - shrq $3,%rbp - addq %rbp,%r14 - - call _x86_64_AES_decrypt_compact - - movq 16(%rsp),%r9 - movq 24(%rsp),%rsi -.cfi_def_cfa %rsi,8 - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - - movq -48(%rsi),%r15 -.cfi_restore %r15 - movq -40(%rsi),%r14 -.cfi_restore %r14 - movq -32(%rsi),%r13 -.cfi_restore %r13 - movq -24(%rsi),%r12 -.cfi_restore %r12 - movq -16(%rsi),%rbp -.cfi_restore %rbp - movq -8(%rsi),%rbx -.cfi_restore %rbx - leaq (%rsi),%rsp -.cfi_def_cfa_register %rsp -.Ldec_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size AES_decrypt,.-AES_decrypt -.globl AES_set_encrypt_key -.type AES_set_encrypt_key,@function -.align 16 -AES_set_encrypt_key: -.cfi_startproc - pushq %rbx -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbx,-16 - pushq %rbp -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbp,-24 - pushq %r12 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r12,-32 - pushq %r13 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r13,-40 - pushq %r14 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r14,-48 - pushq %r15 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r15,-56 - subq $8,%rsp -.cfi_adjust_cfa_offset 8 -.Lenc_key_prologue: - - call _x86_64_AES_set_encrypt_key - - movq 40(%rsp),%rbp -.cfi_restore %rbp - movq 48(%rsp),%rbx -.cfi_restore %rbx - addq $56,%rsp -.cfi_adjust_cfa_offset -56 -.Lenc_key_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size AES_set_encrypt_key,.-AES_set_encrypt_key - -.type _x86_64_AES_set_encrypt_key,@function -.align 16 -_x86_64_AES_set_encrypt_key: -.cfi_startproc - movl %esi,%ecx - movq %rdi,%rsi - movq %rdx,%rdi - - testq $-1,%rsi - jz .Lbadpointer - testq $-1,%rdi - jz .Lbadpointer - - leaq .LAES_Te(%rip),%rbp - leaq 2048+128(%rbp),%rbp - - - movl 0-128(%rbp),%eax - movl 32-128(%rbp),%ebx - movl 64-128(%rbp),%r8d - movl 96-128(%rbp),%edx - movl 128-128(%rbp),%eax - movl 160-128(%rbp),%ebx - movl 192-128(%rbp),%r8d - movl 224-128(%rbp),%edx - - cmpl $128,%ecx - je .L10rounds - cmpl $192,%ecx - je .L12rounds - cmpl $256,%ecx - je .L14rounds - movq $-2,%rax - jmp .Lexit - -.L10rounds: - movq 0(%rsi),%rax - movq 8(%rsi),%rdx - movq %rax,0(%rdi) - movq %rdx,8(%rdi) - - shrq $32,%rdx - xorl %ecx,%ecx - jmp .L10shortcut -.align 4 -.L10loop: - movl 0(%rdi),%eax - movl 12(%rdi),%edx -.L10shortcut: - movzbl %dl,%esi - movzbl -128(%rbp,%rsi,1),%ebx - movzbl %dh,%esi - shll $24,%ebx - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - shrl $16,%edx - movzbl %dl,%esi - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - movzbl %dh,%esi - shll $8,%ebx - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - shll $16,%ebx - xorl %ebx,%eax - - xorl 1024-128(%rbp,%rcx,4),%eax - movl %eax,16(%rdi) - xorl 4(%rdi),%eax - movl %eax,20(%rdi) - xorl 8(%rdi),%eax - movl %eax,24(%rdi) - xorl 12(%rdi),%eax - movl %eax,28(%rdi) - addl $1,%ecx - leaq 16(%rdi),%rdi - cmpl $10,%ecx - jl .L10loop - - movl $10,80(%rdi) - xorq %rax,%rax - jmp .Lexit - -.L12rounds: - movq 0(%rsi),%rax - movq 8(%rsi),%rbx - movq 16(%rsi),%rdx - movq %rax,0(%rdi) - movq %rbx,8(%rdi) - movq %rdx,16(%rdi) - - shrq $32,%rdx - xorl %ecx,%ecx - jmp .L12shortcut -.align 4 -.L12loop: - movl 0(%rdi),%eax - movl 20(%rdi),%edx -.L12shortcut: - movzbl %dl,%esi - movzbl -128(%rbp,%rsi,1),%ebx - movzbl %dh,%esi - shll $24,%ebx - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - shrl $16,%edx - movzbl %dl,%esi - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - movzbl %dh,%esi - shll $8,%ebx - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - shll $16,%ebx - xorl %ebx,%eax - - xorl 1024-128(%rbp,%rcx,4),%eax - movl %eax,24(%rdi) - xorl 4(%rdi),%eax - movl %eax,28(%rdi) - xorl 8(%rdi),%eax - movl %eax,32(%rdi) - xorl 12(%rdi),%eax - movl %eax,36(%rdi) - - cmpl $7,%ecx - je .L12break - addl $1,%ecx - - xorl 16(%rdi),%eax - movl %eax,40(%rdi) - xorl 20(%rdi),%eax - movl %eax,44(%rdi) - - leaq 24(%rdi),%rdi - jmp .L12loop -.L12break: - movl $12,72(%rdi) - xorq %rax,%rax - jmp .Lexit - -.L14rounds: - movq 0(%rsi),%rax - movq 8(%rsi),%rbx - movq 16(%rsi),%rcx - movq 24(%rsi),%rdx - movq %rax,0(%rdi) - movq %rbx,8(%rdi) - movq %rcx,16(%rdi) - movq %rdx,24(%rdi) - - shrq $32,%rdx - xorl %ecx,%ecx - jmp .L14shortcut -.align 4 -.L14loop: - movl 0(%rdi),%eax - movl 28(%rdi),%edx -.L14shortcut: - movzbl %dl,%esi - movzbl -128(%rbp,%rsi,1),%ebx - movzbl %dh,%esi - shll $24,%ebx - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - shrl $16,%edx - movzbl %dl,%esi - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - movzbl %dh,%esi - shll $8,%ebx - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - shll $16,%ebx - xorl %ebx,%eax - - xorl 1024-128(%rbp,%rcx,4),%eax - movl %eax,32(%rdi) - xorl 4(%rdi),%eax - movl %eax,36(%rdi) - xorl 8(%rdi),%eax - movl %eax,40(%rdi) - xorl 12(%rdi),%eax - movl %eax,44(%rdi) - - cmpl $6,%ecx - je .L14break - addl $1,%ecx - - movl %eax,%edx - movl 16(%rdi),%eax - movzbl %dl,%esi - movzbl -128(%rbp,%rsi,1),%ebx - movzbl %dh,%esi - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - shrl $16,%edx - shll $8,%ebx - movzbl %dl,%esi - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - movzbl %dh,%esi - shll $16,%ebx - xorl %ebx,%eax - - movzbl -128(%rbp,%rsi,1),%ebx - shll $24,%ebx - xorl %ebx,%eax - - movl %eax,48(%rdi) - xorl 20(%rdi),%eax - movl %eax,52(%rdi) - xorl 24(%rdi),%eax - movl %eax,56(%rdi) - xorl 28(%rdi),%eax - movl %eax,60(%rdi) - - leaq 32(%rdi),%rdi - jmp .L14loop -.L14break: - movl $14,48(%rdi) - xorq %rax,%rax - jmp .Lexit - -.Lbadpointer: - movq $-1,%rax -.Lexit: -.byte 0xf3,0xc3 -.cfi_endproc -.size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key -.globl AES_set_decrypt_key -.type AES_set_decrypt_key,@function -.align 16 -AES_set_decrypt_key: -.cfi_startproc - pushq %rbx -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbx,-16 - pushq %rbp -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbp,-24 - pushq %r12 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r12,-32 - pushq %r13 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r13,-40 - pushq %r14 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r14,-48 - pushq %r15 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r15,-56 - pushq %rdx -.cfi_adjust_cfa_offset 8 -.Ldec_key_prologue: - - call _x86_64_AES_set_encrypt_key - movq (%rsp),%r8 - cmpl $0,%eax - jne .Labort - - movl 240(%r8),%r14d - xorq %rdi,%rdi - leaq (%rdi,%r14,4),%rcx - movq %r8,%rsi - leaq (%r8,%rcx,4),%rdi -.align 4 -.Linvert: - movq 0(%rsi),%rax - movq 8(%rsi),%rbx - movq 0(%rdi),%rcx - movq 8(%rdi),%rdx - movq %rax,0(%rdi) - movq %rbx,8(%rdi) - movq %rcx,0(%rsi) - movq %rdx,8(%rsi) - leaq 16(%rsi),%rsi - leaq -16(%rdi),%rdi - cmpq %rsi,%rdi - jne .Linvert - - leaq .LAES_Te+2048+1024(%rip),%rax - - movq 40(%rax),%rsi - movq 48(%rax),%rdi - movq 56(%rax),%rbp - - movq %r8,%r15 - subl $1,%r14d -.align 4 -.Lpermute: - leaq 16(%r15),%r15 - movq 0(%r15),%rax - movq 8(%r15),%rcx - movq %rsi,%r9 - movq %rsi,%r12 - andq %rax,%r9 - andq %rcx,%r12 - movq %r9,%rbx - movq %r12,%rdx - shrq $7,%r9 - leaq (%rax,%rax,1),%r8 - shrq $7,%r12 - leaq (%rcx,%rcx,1),%r11 - subq %r9,%rbx - subq %r12,%rdx - andq %rdi,%r8 - andq %rdi,%r11 - andq %rbp,%rbx - andq %rbp,%rdx - xorq %rbx,%r8 - xorq %rdx,%r11 - movq %rsi,%r10 - movq %rsi,%r13 - - andq %r8,%r10 - andq %r11,%r13 - movq %r10,%rbx - movq %r13,%rdx - shrq $7,%r10 - leaq (%r8,%r8,1),%r9 - shrq $7,%r13 - leaq (%r11,%r11,1),%r12 - subq %r10,%rbx - subq %r13,%rdx - andq %rdi,%r9 - andq %rdi,%r12 - andq %rbp,%rbx - andq %rbp,%rdx - xorq %rbx,%r9 - xorq %rdx,%r12 - movq %rsi,%r10 - movq %rsi,%r13 - - andq %r9,%r10 - andq %r12,%r13 - movq %r10,%rbx - movq %r13,%rdx - shrq $7,%r10 - xorq %rax,%r8 - shrq $7,%r13 - xorq %rcx,%r11 - subq %r10,%rbx - subq %r13,%rdx - leaq (%r9,%r9,1),%r10 - leaq (%r12,%r12,1),%r13 - xorq %rax,%r9 - xorq %rcx,%r12 - andq %rdi,%r10 - andq %rdi,%r13 - andq %rbp,%rbx - andq %rbp,%rdx - xorq %rbx,%r10 - xorq %rdx,%r13 - - xorq %r10,%rax - xorq %r13,%rcx - xorq %r10,%r8 - xorq %r13,%r11 - movq %rax,%rbx - movq %rcx,%rdx - xorq %r10,%r9 - shrq $32,%rbx - xorq %r13,%r12 - shrq $32,%rdx - xorq %r8,%r10 - roll $8,%eax - xorq %r11,%r13 - roll $8,%ecx - xorq %r9,%r10 - roll $8,%ebx - xorq %r12,%r13 - - roll $8,%edx - xorl %r10d,%eax - shrq $32,%r10 - xorl %r13d,%ecx - shrq $32,%r13 - xorl %r10d,%ebx - xorl %r13d,%edx - - movq %r8,%r10 - roll $24,%r8d - movq %r11,%r13 - roll $24,%r11d - shrq $32,%r10 - xorl %r8d,%eax - shrq $32,%r13 - xorl %r11d,%ecx - roll $24,%r10d - movq %r9,%r8 - roll $24,%r13d - movq %r12,%r11 - shrq $32,%r8 - xorl %r10d,%ebx - shrq $32,%r11 - xorl %r13d,%edx - - - roll $16,%r9d - - roll $16,%r12d - - roll $16,%r8d - - xorl %r9d,%eax - roll $16,%r11d - xorl %r12d,%ecx - - xorl %r8d,%ebx - xorl %r11d,%edx - movl %eax,0(%r15) - movl %ebx,4(%r15) - movl %ecx,8(%r15) - movl %edx,12(%r15) - subl $1,%r14d - jnz .Lpermute - - xorq %rax,%rax -.Labort: - movq 8(%rsp),%r15 -.cfi_restore %r15 - movq 16(%rsp),%r14 -.cfi_restore %r14 - movq 24(%rsp),%r13 -.cfi_restore %r13 - movq 32(%rsp),%r12 -.cfi_restore %r12 - movq 40(%rsp),%rbp -.cfi_restore %rbp - movq 48(%rsp),%rbx -.cfi_restore %rbx - addq $56,%rsp -.cfi_adjust_cfa_offset -56 -.Ldec_key_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size AES_set_decrypt_key,.-AES_set_decrypt_key -.globl AES_cbc_encrypt -.type AES_cbc_encrypt,@function -.align 16 - -.globl asm_AES_cbc_encrypt -.hidden asm_AES_cbc_encrypt -asm_AES_cbc_encrypt: -AES_cbc_encrypt: -.cfi_startproc - cmpq $0,%rdx - je .Lcbc_epilogue - pushfq - - -.cfi_adjust_cfa_offset 8 - pushq %rbx -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbx,-24 - pushq %rbp -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbp,-32 - pushq %r12 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r12,-40 - pushq %r13 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r13,-48 - pushq %r14 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r14,-56 - pushq %r15 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r15,-64 -.Lcbc_prologue: - - cld - movl %r9d,%r9d - - leaq .LAES_Te(%rip),%r14 - leaq .LAES_Td(%rip),%r10 - cmpq $0,%r9 - cmoveq %r10,%r14 - -.cfi_remember_state - movl OPENSSL_ia32cap_P(%rip),%r10d - cmpq $512,%rdx - jb .Lcbc_slow_prologue - testq $15,%rdx - jnz .Lcbc_slow_prologue - btl $28,%r10d - jc .Lcbc_slow_prologue - - - leaq -88-248(%rsp),%r15 - andq $-64,%r15 - - - movq %r14,%r10 - leaq 2304(%r14),%r11 - movq %r15,%r12 - andq $0xFFF,%r10 - andq $0xFFF,%r11 - andq $0xFFF,%r12 - - cmpq %r11,%r12 - jb .Lcbc_te_break_out - subq %r11,%r12 - subq %r12,%r15 - jmp .Lcbc_te_ok -.Lcbc_te_break_out: - subq %r10,%r12 - andq $0xFFF,%r12 - addq $320,%r12 - subq %r12,%r15 -.align 4 -.Lcbc_te_ok: - - xchgq %rsp,%r15 -.cfi_def_cfa_register %r15 - - movq %r15,16(%rsp) -.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x40 -.Lcbc_fast_body: - movq %rdi,24(%rsp) - movq %rsi,32(%rsp) - movq %rdx,40(%rsp) - movq %rcx,48(%rsp) - movq %r8,56(%rsp) - movl $0,80+240(%rsp) - movq %r8,%rbp - movq %r9,%rbx - movq %rsi,%r9 - movq %rdi,%r8 - movq %rcx,%r15 - - movl 240(%r15),%eax - - movq %r15,%r10 - subq %r14,%r10 - andq $0xfff,%r10 - cmpq $2304,%r10 - jb .Lcbc_do_ecopy - cmpq $4096-248,%r10 - jb .Lcbc_skip_ecopy -.align 4 -.Lcbc_do_ecopy: - movq %r15,%rsi - leaq 80(%rsp),%rdi - leaq 80(%rsp),%r15 - movl $30,%ecx -.long 0x90A548F3 - movl %eax,(%rdi) -.Lcbc_skip_ecopy: - movq %r15,0(%rsp) - - movl $18,%ecx -.align 4 -.Lcbc_prefetch_te: - movq 0(%r14),%r10 - movq 32(%r14),%r11 - movq 64(%r14),%r12 - movq 96(%r14),%r13 - leaq 128(%r14),%r14 - subl $1,%ecx - jnz .Lcbc_prefetch_te - leaq -2304(%r14),%r14 - - cmpq $0,%rbx - je .LFAST_DECRYPT - - - movl 0(%rbp),%eax - movl 4(%rbp),%ebx - movl 8(%rbp),%ecx - movl 12(%rbp),%edx - -.align 4 -.Lcbc_fast_enc_loop: - xorl 0(%r8),%eax - xorl 4(%r8),%ebx - xorl 8(%r8),%ecx - xorl 12(%r8),%edx - movq 0(%rsp),%r15 - movq %r8,24(%rsp) - - call _x86_64_AES_encrypt - - movq 24(%rsp),%r8 - movq 40(%rsp),%r10 - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - - leaq 16(%r8),%r8 - leaq 16(%r9),%r9 - subq $16,%r10 - testq $-16,%r10 - movq %r10,40(%rsp) - jnz .Lcbc_fast_enc_loop - movq 56(%rsp),%rbp - movl %eax,0(%rbp) - movl %ebx,4(%rbp) - movl %ecx,8(%rbp) - movl %edx,12(%rbp) - - jmp .Lcbc_fast_cleanup - - -.align 16 -.LFAST_DECRYPT: - cmpq %r8,%r9 - je .Lcbc_fast_dec_in_place - - movq %rbp,64(%rsp) -.align 4 -.Lcbc_fast_dec_loop: - movl 0(%r8),%eax - movl 4(%r8),%ebx - movl 8(%r8),%ecx - movl 12(%r8),%edx - movq 0(%rsp),%r15 - movq %r8,24(%rsp) - - call _x86_64_AES_decrypt - - movq 64(%rsp),%rbp - movq 24(%rsp),%r8 - movq 40(%rsp),%r10 - xorl 0(%rbp),%eax - xorl 4(%rbp),%ebx - xorl 8(%rbp),%ecx - xorl 12(%rbp),%edx - movq %r8,%rbp - - subq $16,%r10 - movq %r10,40(%rsp) - movq %rbp,64(%rsp) - - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - - leaq 16(%r8),%r8 - leaq 16(%r9),%r9 - jnz .Lcbc_fast_dec_loop - movq 56(%rsp),%r12 - movq 0(%rbp),%r10 - movq 8(%rbp),%r11 - movq %r10,0(%r12) - movq %r11,8(%r12) - jmp .Lcbc_fast_cleanup - -.align 16 -.Lcbc_fast_dec_in_place: - movq 0(%rbp),%r10 - movq 8(%rbp),%r11 - movq %r10,0+64(%rsp) - movq %r11,8+64(%rsp) -.align 4 -.Lcbc_fast_dec_in_place_loop: - movl 0(%r8),%eax - movl 4(%r8),%ebx - movl 8(%r8),%ecx - movl 12(%r8),%edx - movq 0(%rsp),%r15 - movq %r8,24(%rsp) - - call _x86_64_AES_decrypt - - movq 24(%rsp),%r8 - movq 40(%rsp),%r10 - xorl 0+64(%rsp),%eax - xorl 4+64(%rsp),%ebx - xorl 8+64(%rsp),%ecx - xorl 12+64(%rsp),%edx - - movq 0(%r8),%r11 - movq 8(%r8),%r12 - subq $16,%r10 - jz .Lcbc_fast_dec_in_place_done - - movq %r11,0+64(%rsp) - movq %r12,8+64(%rsp) - - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - - leaq 16(%r8),%r8 - leaq 16(%r9),%r9 - movq %r10,40(%rsp) - jmp .Lcbc_fast_dec_in_place_loop -.Lcbc_fast_dec_in_place_done: - movq 56(%rsp),%rdi - movq %r11,0(%rdi) - movq %r12,8(%rdi) - - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - -.align 4 -.Lcbc_fast_cleanup: - cmpl $0,80+240(%rsp) - leaq 80(%rsp),%rdi - je .Lcbc_exit - movl $30,%ecx - xorq %rax,%rax -.long 0x90AB48F3 - - jmp .Lcbc_exit - - -.align 16 -.Lcbc_slow_prologue: -.cfi_restore_state - - leaq -88(%rsp),%rbp - andq $-64,%rbp - - leaq -88-63(%rcx),%r10 - subq %rbp,%r10 - negq %r10 - andq $0x3c0,%r10 - subq %r10,%rbp - - xchgq %rsp,%rbp -.cfi_def_cfa_register %rbp - - movq %rbp,16(%rsp) -.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x40 -.Lcbc_slow_body: - - - - - movq %r8,56(%rsp) - movq %r8,%rbp - movq %r9,%rbx - movq %rsi,%r9 - movq %rdi,%r8 - movq %rcx,%r15 - movq %rdx,%r10 - - movl 240(%r15),%eax - movq %r15,0(%rsp) - shll $4,%eax - leaq (%r15,%rax,1),%rax - movq %rax,8(%rsp) - - - leaq 2048(%r14),%r14 - leaq 768-8(%rsp),%rax - subq %r14,%rax - andq $0x300,%rax - leaq (%r14,%rax,1),%r14 - - cmpq $0,%rbx - je .LSLOW_DECRYPT - - - testq $-16,%r10 - movl 0(%rbp),%eax - movl 4(%rbp),%ebx - movl 8(%rbp),%ecx - movl 12(%rbp),%edx - jz .Lcbc_slow_enc_tail - -.align 4 -.Lcbc_slow_enc_loop: - xorl 0(%r8),%eax - xorl 4(%r8),%ebx - xorl 8(%r8),%ecx - xorl 12(%r8),%edx - movq 0(%rsp),%r15 - movq %r8,24(%rsp) - movq %r9,32(%rsp) - movq %r10,40(%rsp) - - call _x86_64_AES_encrypt_compact - - movq 24(%rsp),%r8 - movq 32(%rsp),%r9 - movq 40(%rsp),%r10 - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - - leaq 16(%r8),%r8 - leaq 16(%r9),%r9 - subq $16,%r10 - testq $-16,%r10 - jnz .Lcbc_slow_enc_loop - testq $15,%r10 - jnz .Lcbc_slow_enc_tail - movq 56(%rsp),%rbp - movl %eax,0(%rbp) - movl %ebx,4(%rbp) - movl %ecx,8(%rbp) - movl %edx,12(%rbp) - - jmp .Lcbc_exit - -.align 4 -.Lcbc_slow_enc_tail: - movq %rax,%r11 - movq %rcx,%r12 - movq %r10,%rcx - movq %r8,%rsi - movq %r9,%rdi -.long 0x9066A4F3 - movq $16,%rcx - subq %r10,%rcx - xorq %rax,%rax -.long 0x9066AAF3 - movq %r9,%r8 - movq $16,%r10 - movq %r11,%rax - movq %r12,%rcx - jmp .Lcbc_slow_enc_loop - -.align 16 -.LSLOW_DECRYPT: - shrq $3,%rax - addq %rax,%r14 - - movq 0(%rbp),%r11 - movq 8(%rbp),%r12 - movq %r11,0+64(%rsp) - movq %r12,8+64(%rsp) - -.align 4 -.Lcbc_slow_dec_loop: - movl 0(%r8),%eax - movl 4(%r8),%ebx - movl 8(%r8),%ecx - movl 12(%r8),%edx - movq 0(%rsp),%r15 - movq %r8,24(%rsp) - movq %r9,32(%rsp) - movq %r10,40(%rsp) - - call _x86_64_AES_decrypt_compact - - movq 24(%rsp),%r8 - movq 32(%rsp),%r9 - movq 40(%rsp),%r10 - xorl 0+64(%rsp),%eax - xorl 4+64(%rsp),%ebx - xorl 8+64(%rsp),%ecx - xorl 12+64(%rsp),%edx - - movq 0(%r8),%r11 - movq 8(%r8),%r12 - subq $16,%r10 - jc .Lcbc_slow_dec_partial - jz .Lcbc_slow_dec_done - - movq %r11,0+64(%rsp) - movq %r12,8+64(%rsp) - - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - - leaq 16(%r8),%r8 - leaq 16(%r9),%r9 - jmp .Lcbc_slow_dec_loop -.Lcbc_slow_dec_done: - movq 56(%rsp),%rdi - movq %r11,0(%rdi) - movq %r12,8(%rdi) - - movl %eax,0(%r9) - movl %ebx,4(%r9) - movl %ecx,8(%r9) - movl %edx,12(%r9) - - jmp .Lcbc_exit - -.align 4 -.Lcbc_slow_dec_partial: - movq 56(%rsp),%rdi - movq %r11,0(%rdi) - movq %r12,8(%rdi) - - movl %eax,0+64(%rsp) - movl %ebx,4+64(%rsp) - movl %ecx,8+64(%rsp) - movl %edx,12+64(%rsp) - - movq %r9,%rdi - leaq 64(%rsp),%rsi - leaq 16(%r10),%rcx -.long 0x9066A4F3 - jmp .Lcbc_exit - -.align 16 -.Lcbc_exit: - movq 16(%rsp),%rsi -.cfi_def_cfa %rsi,64 - movq (%rsi),%r15 -.cfi_restore %r15 - movq 8(%rsi),%r14 -.cfi_restore %r14 - movq 16(%rsi),%r13 -.cfi_restore %r13 - movq 24(%rsi),%r12 -.cfi_restore %r12 - movq 32(%rsi),%rbp -.cfi_restore %rbp - movq 40(%rsi),%rbx -.cfi_restore %rbx - leaq 48(%rsi),%rsp -.cfi_def_cfa %rsp,16 -.Lcbc_popfq: - popfq - - -.cfi_adjust_cfa_offset -8 -.Lcbc_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size AES_cbc_encrypt,.-AES_cbc_encrypt -.align 64 -.LAES_Te: -.long 0xa56363c6,0xa56363c6 -.long 0x847c7cf8,0x847c7cf8 -.long 0x997777ee,0x997777ee -.long 0x8d7b7bf6,0x8d7b7bf6 -.long 0x0df2f2ff,0x0df2f2ff -.long 0xbd6b6bd6,0xbd6b6bd6 -.long 0xb16f6fde,0xb16f6fde -.long 0x54c5c591,0x54c5c591 -.long 0x50303060,0x50303060 -.long 0x03010102,0x03010102 -.long 0xa96767ce,0xa96767ce -.long 0x7d2b2b56,0x7d2b2b56 -.long 0x19fefee7,0x19fefee7 -.long 0x62d7d7b5,0x62d7d7b5 -.long 0xe6abab4d,0xe6abab4d -.long 0x9a7676ec,0x9a7676ec -.long 0x45caca8f,0x45caca8f -.long 0x9d82821f,0x9d82821f -.long 0x40c9c989,0x40c9c989 -.long 0x877d7dfa,0x877d7dfa -.long 0x15fafaef,0x15fafaef -.long 0xeb5959b2,0xeb5959b2 -.long 0xc947478e,0xc947478e -.long 0x0bf0f0fb,0x0bf0f0fb -.long 0xecadad41,0xecadad41 -.long 0x67d4d4b3,0x67d4d4b3 -.long 0xfda2a25f,0xfda2a25f -.long 0xeaafaf45,0xeaafaf45 -.long 0xbf9c9c23,0xbf9c9c23 -.long 0xf7a4a453,0xf7a4a453 -.long 0x967272e4,0x967272e4 -.long 0x5bc0c09b,0x5bc0c09b -.long 0xc2b7b775,0xc2b7b775 -.long 0x1cfdfde1,0x1cfdfde1 -.long 0xae93933d,0xae93933d -.long 0x6a26264c,0x6a26264c -.long 0x5a36366c,0x5a36366c -.long 0x413f3f7e,0x413f3f7e -.long 0x02f7f7f5,0x02f7f7f5 -.long 0x4fcccc83,0x4fcccc83 -.long 0x5c343468,0x5c343468 -.long 0xf4a5a551,0xf4a5a551 -.long 0x34e5e5d1,0x34e5e5d1 -.long 0x08f1f1f9,0x08f1f1f9 -.long 0x937171e2,0x937171e2 -.long 0x73d8d8ab,0x73d8d8ab -.long 0x53313162,0x53313162 -.long 0x3f15152a,0x3f15152a -.long 0x0c040408,0x0c040408 -.long 0x52c7c795,0x52c7c795 -.long 0x65232346,0x65232346 -.long 0x5ec3c39d,0x5ec3c39d -.long 0x28181830,0x28181830 -.long 0xa1969637,0xa1969637 -.long 0x0f05050a,0x0f05050a -.long 0xb59a9a2f,0xb59a9a2f -.long 0x0907070e,0x0907070e -.long 0x36121224,0x36121224 -.long 0x9b80801b,0x9b80801b -.long 0x3de2e2df,0x3de2e2df -.long 0x26ebebcd,0x26ebebcd -.long 0x6927274e,0x6927274e -.long 0xcdb2b27f,0xcdb2b27f -.long 0x9f7575ea,0x9f7575ea -.long 0x1b090912,0x1b090912 -.long 0x9e83831d,0x9e83831d -.long 0x742c2c58,0x742c2c58 -.long 0x2e1a1a34,0x2e1a1a34 -.long 0x2d1b1b36,0x2d1b1b36 -.long 0xb26e6edc,0xb26e6edc -.long 0xee5a5ab4,0xee5a5ab4 -.long 0xfba0a05b,0xfba0a05b -.long 0xf65252a4,0xf65252a4 -.long 0x4d3b3b76,0x4d3b3b76 -.long 0x61d6d6b7,0x61d6d6b7 -.long 0xceb3b37d,0xceb3b37d -.long 0x7b292952,0x7b292952 -.long 0x3ee3e3dd,0x3ee3e3dd -.long 0x712f2f5e,0x712f2f5e -.long 0x97848413,0x97848413 -.long 0xf55353a6,0xf55353a6 -.long 0x68d1d1b9,0x68d1d1b9 -.long 0x00000000,0x00000000 -.long 0x2cededc1,0x2cededc1 -.long 0x60202040,0x60202040 -.long 0x1ffcfce3,0x1ffcfce3 -.long 0xc8b1b179,0xc8b1b179 -.long 0xed5b5bb6,0xed5b5bb6 -.long 0xbe6a6ad4,0xbe6a6ad4 -.long 0x46cbcb8d,0x46cbcb8d -.long 0xd9bebe67,0xd9bebe67 -.long 0x4b393972,0x4b393972 -.long 0xde4a4a94,0xde4a4a94 -.long 0xd44c4c98,0xd44c4c98 -.long 0xe85858b0,0xe85858b0 -.long 0x4acfcf85,0x4acfcf85 -.long 0x6bd0d0bb,0x6bd0d0bb -.long 0x2aefefc5,0x2aefefc5 -.long 0xe5aaaa4f,0xe5aaaa4f -.long 0x16fbfbed,0x16fbfbed -.long 0xc5434386,0xc5434386 -.long 0xd74d4d9a,0xd74d4d9a -.long 0x55333366,0x55333366 -.long 0x94858511,0x94858511 -.long 0xcf45458a,0xcf45458a -.long 0x10f9f9e9,0x10f9f9e9 -.long 0x06020204,0x06020204 -.long 0x817f7ffe,0x817f7ffe -.long 0xf05050a0,0xf05050a0 -.long 0x443c3c78,0x443c3c78 -.long 0xba9f9f25,0xba9f9f25 -.long 0xe3a8a84b,0xe3a8a84b -.long 0xf35151a2,0xf35151a2 -.long 0xfea3a35d,0xfea3a35d -.long 0xc0404080,0xc0404080 -.long 0x8a8f8f05,0x8a8f8f05 -.long 0xad92923f,0xad92923f -.long 0xbc9d9d21,0xbc9d9d21 -.long 0x48383870,0x48383870 -.long 0x04f5f5f1,0x04f5f5f1 -.long 0xdfbcbc63,0xdfbcbc63 -.long 0xc1b6b677,0xc1b6b677 -.long 0x75dadaaf,0x75dadaaf -.long 0x63212142,0x63212142 -.long 0x30101020,0x30101020 -.long 0x1affffe5,0x1affffe5 -.long 0x0ef3f3fd,0x0ef3f3fd -.long 0x6dd2d2bf,0x6dd2d2bf -.long 0x4ccdcd81,0x4ccdcd81 -.long 0x140c0c18,0x140c0c18 -.long 0x35131326,0x35131326 -.long 0x2fececc3,0x2fececc3 -.long 0xe15f5fbe,0xe15f5fbe -.long 0xa2979735,0xa2979735 -.long 0xcc444488,0xcc444488 -.long 0x3917172e,0x3917172e -.long 0x57c4c493,0x57c4c493 -.long 0xf2a7a755,0xf2a7a755 -.long 0x827e7efc,0x827e7efc -.long 0x473d3d7a,0x473d3d7a -.long 0xac6464c8,0xac6464c8 -.long 0xe75d5dba,0xe75d5dba -.long 0x2b191932,0x2b191932 -.long 0x957373e6,0x957373e6 -.long 0xa06060c0,0xa06060c0 -.long 0x98818119,0x98818119 -.long 0xd14f4f9e,0xd14f4f9e -.long 0x7fdcdca3,0x7fdcdca3 -.long 0x66222244,0x66222244 -.long 0x7e2a2a54,0x7e2a2a54 -.long 0xab90903b,0xab90903b -.long 0x8388880b,0x8388880b -.long 0xca46468c,0xca46468c -.long 0x29eeeec7,0x29eeeec7 -.long 0xd3b8b86b,0xd3b8b86b -.long 0x3c141428,0x3c141428 -.long 0x79dedea7,0x79dedea7 -.long 0xe25e5ebc,0xe25e5ebc -.long 0x1d0b0b16,0x1d0b0b16 -.long 0x76dbdbad,0x76dbdbad -.long 0x3be0e0db,0x3be0e0db -.long 0x56323264,0x56323264 -.long 0x4e3a3a74,0x4e3a3a74 -.long 0x1e0a0a14,0x1e0a0a14 -.long 0xdb494992,0xdb494992 -.long 0x0a06060c,0x0a06060c -.long 0x6c242448,0x6c242448 -.long 0xe45c5cb8,0xe45c5cb8 -.long 0x5dc2c29f,0x5dc2c29f -.long 0x6ed3d3bd,0x6ed3d3bd -.long 0xefacac43,0xefacac43 -.long 0xa66262c4,0xa66262c4 -.long 0xa8919139,0xa8919139 -.long 0xa4959531,0xa4959531 -.long 0x37e4e4d3,0x37e4e4d3 -.long 0x8b7979f2,0x8b7979f2 -.long 0x32e7e7d5,0x32e7e7d5 -.long 0x43c8c88b,0x43c8c88b -.long 0x5937376e,0x5937376e -.long 0xb76d6dda,0xb76d6dda -.long 0x8c8d8d01,0x8c8d8d01 -.long 0x64d5d5b1,0x64d5d5b1 -.long 0xd24e4e9c,0xd24e4e9c -.long 0xe0a9a949,0xe0a9a949 -.long 0xb46c6cd8,0xb46c6cd8 -.long 0xfa5656ac,0xfa5656ac -.long 0x07f4f4f3,0x07f4f4f3 -.long 0x25eaeacf,0x25eaeacf -.long 0xaf6565ca,0xaf6565ca -.long 0x8e7a7af4,0x8e7a7af4 -.long 0xe9aeae47,0xe9aeae47 -.long 0x18080810,0x18080810 -.long 0xd5baba6f,0xd5baba6f -.long 0x887878f0,0x887878f0 -.long 0x6f25254a,0x6f25254a -.long 0x722e2e5c,0x722e2e5c -.long 0x241c1c38,0x241c1c38 -.long 0xf1a6a657,0xf1a6a657 -.long 0xc7b4b473,0xc7b4b473 -.long 0x51c6c697,0x51c6c697 -.long 0x23e8e8cb,0x23e8e8cb -.long 0x7cdddda1,0x7cdddda1 -.long 0x9c7474e8,0x9c7474e8 -.long 0x211f1f3e,0x211f1f3e -.long 0xdd4b4b96,0xdd4b4b96 -.long 0xdcbdbd61,0xdcbdbd61 -.long 0x868b8b0d,0x868b8b0d -.long 0x858a8a0f,0x858a8a0f -.long 0x907070e0,0x907070e0 -.long 0x423e3e7c,0x423e3e7c -.long 0xc4b5b571,0xc4b5b571 -.long 0xaa6666cc,0xaa6666cc -.long 0xd8484890,0xd8484890 -.long 0x05030306,0x05030306 -.long 0x01f6f6f7,0x01f6f6f7 -.long 0x120e0e1c,0x120e0e1c -.long 0xa36161c2,0xa36161c2 -.long 0x5f35356a,0x5f35356a -.long 0xf95757ae,0xf95757ae -.long 0xd0b9b969,0xd0b9b969 -.long 0x91868617,0x91868617 -.long 0x58c1c199,0x58c1c199 -.long 0x271d1d3a,0x271d1d3a -.long 0xb99e9e27,0xb99e9e27 -.long 0x38e1e1d9,0x38e1e1d9 -.long 0x13f8f8eb,0x13f8f8eb -.long 0xb398982b,0xb398982b -.long 0x33111122,0x33111122 -.long 0xbb6969d2,0xbb6969d2 -.long 0x70d9d9a9,0x70d9d9a9 -.long 0x898e8e07,0x898e8e07 -.long 0xa7949433,0xa7949433 -.long 0xb69b9b2d,0xb69b9b2d -.long 0x221e1e3c,0x221e1e3c -.long 0x92878715,0x92878715 -.long 0x20e9e9c9,0x20e9e9c9 -.long 0x49cece87,0x49cece87 -.long 0xff5555aa,0xff5555aa -.long 0x78282850,0x78282850 -.long 0x7adfdfa5,0x7adfdfa5 -.long 0x8f8c8c03,0x8f8c8c03 -.long 0xf8a1a159,0xf8a1a159 -.long 0x80898909,0x80898909 -.long 0x170d0d1a,0x170d0d1a -.long 0xdabfbf65,0xdabfbf65 -.long 0x31e6e6d7,0x31e6e6d7 -.long 0xc6424284,0xc6424284 -.long 0xb86868d0,0xb86868d0 -.long 0xc3414182,0xc3414182 -.long 0xb0999929,0xb0999929 -.long 0x772d2d5a,0x772d2d5a -.long 0x110f0f1e,0x110f0f1e -.long 0xcbb0b07b,0xcbb0b07b -.long 0xfc5454a8,0xfc5454a8 -.long 0xd6bbbb6d,0xd6bbbb6d -.long 0x3a16162c,0x3a16162c -.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5 -.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76 -.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0 -.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0 -.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc -.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15 -.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a -.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75 -.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0 -.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84 -.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b -.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf -.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85 -.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8 -.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5 -.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2 -.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17 -.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73 -.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88 -.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb -.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c -.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79 -.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9 -.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08 -.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6 -.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a -.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e -.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e -.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94 -.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf -.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68 -.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16 -.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5 -.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76 -.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0 -.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0 -.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc -.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15 -.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a -.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75 -.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0 -.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84 -.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b -.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf -.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85 -.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8 -.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5 -.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2 -.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17 -.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73 -.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88 -.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb -.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c -.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79 -.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9 -.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08 -.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6 -.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a -.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e -.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e -.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94 -.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf -.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68 -.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16 -.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5 -.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76 -.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0 -.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0 -.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc -.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15 -.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a -.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75 -.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0 -.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84 -.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b -.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf -.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85 -.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8 -.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5 -.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2 -.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17 -.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73 -.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88 -.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb -.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c -.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79 -.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9 -.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08 -.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6 -.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a -.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e -.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e -.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94 -.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf -.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68 -.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16 -.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5 -.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76 -.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0 -.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0 -.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc -.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15 -.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a -.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75 -.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0 -.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84 -.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b -.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf -.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85 -.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8 -.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5 -.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2 -.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17 -.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73 -.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88 -.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb -.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c -.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79 -.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9 -.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08 -.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6 -.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a -.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e -.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e -.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94 -.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf -.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68 -.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16 -.long 0x00000001, 0x00000002, 0x00000004, 0x00000008 -.long 0x00000010, 0x00000020, 0x00000040, 0x00000080 -.long 0x0000001b, 0x00000036, 0x80808080, 0x80808080 -.long 0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b -.align 64 -.LAES_Td: -.long 0x50a7f451,0x50a7f451 -.long 0x5365417e,0x5365417e -.long 0xc3a4171a,0xc3a4171a -.long 0x965e273a,0x965e273a -.long 0xcb6bab3b,0xcb6bab3b -.long 0xf1459d1f,0xf1459d1f -.long 0xab58faac,0xab58faac -.long 0x9303e34b,0x9303e34b -.long 0x55fa3020,0x55fa3020 -.long 0xf66d76ad,0xf66d76ad -.long 0x9176cc88,0x9176cc88 -.long 0x254c02f5,0x254c02f5 -.long 0xfcd7e54f,0xfcd7e54f -.long 0xd7cb2ac5,0xd7cb2ac5 -.long 0x80443526,0x80443526 -.long 0x8fa362b5,0x8fa362b5 -.long 0x495ab1de,0x495ab1de -.long 0x671bba25,0x671bba25 -.long 0x980eea45,0x980eea45 -.long 0xe1c0fe5d,0xe1c0fe5d -.long 0x02752fc3,0x02752fc3 -.long 0x12f04c81,0x12f04c81 -.long 0xa397468d,0xa397468d -.long 0xc6f9d36b,0xc6f9d36b -.long 0xe75f8f03,0xe75f8f03 -.long 0x959c9215,0x959c9215 -.long 0xeb7a6dbf,0xeb7a6dbf -.long 0xda595295,0xda595295 -.long 0x2d83bed4,0x2d83bed4 -.long 0xd3217458,0xd3217458 -.long 0x2969e049,0x2969e049 -.long 0x44c8c98e,0x44c8c98e -.long 0x6a89c275,0x6a89c275 -.long 0x78798ef4,0x78798ef4 -.long 0x6b3e5899,0x6b3e5899 -.long 0xdd71b927,0xdd71b927 -.long 0xb64fe1be,0xb64fe1be -.long 0x17ad88f0,0x17ad88f0 -.long 0x66ac20c9,0x66ac20c9 -.long 0xb43ace7d,0xb43ace7d -.long 0x184adf63,0x184adf63 -.long 0x82311ae5,0x82311ae5 -.long 0x60335197,0x60335197 -.long 0x457f5362,0x457f5362 -.long 0xe07764b1,0xe07764b1 -.long 0x84ae6bbb,0x84ae6bbb -.long 0x1ca081fe,0x1ca081fe -.long 0x942b08f9,0x942b08f9 -.long 0x58684870,0x58684870 -.long 0x19fd458f,0x19fd458f -.long 0x876cde94,0x876cde94 -.long 0xb7f87b52,0xb7f87b52 -.long 0x23d373ab,0x23d373ab -.long 0xe2024b72,0xe2024b72 -.long 0x578f1fe3,0x578f1fe3 -.long 0x2aab5566,0x2aab5566 -.long 0x0728ebb2,0x0728ebb2 -.long 0x03c2b52f,0x03c2b52f -.long 0x9a7bc586,0x9a7bc586 -.long 0xa50837d3,0xa50837d3 -.long 0xf2872830,0xf2872830 -.long 0xb2a5bf23,0xb2a5bf23 -.long 0xba6a0302,0xba6a0302 -.long 0x5c8216ed,0x5c8216ed -.long 0x2b1ccf8a,0x2b1ccf8a -.long 0x92b479a7,0x92b479a7 -.long 0xf0f207f3,0xf0f207f3 -.long 0xa1e2694e,0xa1e2694e -.long 0xcdf4da65,0xcdf4da65 -.long 0xd5be0506,0xd5be0506 -.long 0x1f6234d1,0x1f6234d1 -.long 0x8afea6c4,0x8afea6c4 -.long 0x9d532e34,0x9d532e34 -.long 0xa055f3a2,0xa055f3a2 -.long 0x32e18a05,0x32e18a05 -.long 0x75ebf6a4,0x75ebf6a4 -.long 0x39ec830b,0x39ec830b -.long 0xaaef6040,0xaaef6040 -.long 0x069f715e,0x069f715e -.long 0x51106ebd,0x51106ebd -.long 0xf98a213e,0xf98a213e -.long 0x3d06dd96,0x3d06dd96 -.long 0xae053edd,0xae053edd -.long 0x46bde64d,0x46bde64d -.long 0xb58d5491,0xb58d5491 -.long 0x055dc471,0x055dc471 -.long 0x6fd40604,0x6fd40604 -.long 0xff155060,0xff155060 -.long 0x24fb9819,0x24fb9819 -.long 0x97e9bdd6,0x97e9bdd6 -.long 0xcc434089,0xcc434089 -.long 0x779ed967,0x779ed967 -.long 0xbd42e8b0,0xbd42e8b0 -.long 0x888b8907,0x888b8907 -.long 0x385b19e7,0x385b19e7 -.long 0xdbeec879,0xdbeec879 -.long 0x470a7ca1,0x470a7ca1 -.long 0xe90f427c,0xe90f427c -.long 0xc91e84f8,0xc91e84f8 -.long 0x00000000,0x00000000 -.long 0x83868009,0x83868009 -.long 0x48ed2b32,0x48ed2b32 -.long 0xac70111e,0xac70111e -.long 0x4e725a6c,0x4e725a6c -.long 0xfbff0efd,0xfbff0efd -.long 0x5638850f,0x5638850f -.long 0x1ed5ae3d,0x1ed5ae3d -.long 0x27392d36,0x27392d36 -.long 0x64d90f0a,0x64d90f0a -.long 0x21a65c68,0x21a65c68 -.long 0xd1545b9b,0xd1545b9b -.long 0x3a2e3624,0x3a2e3624 -.long 0xb1670a0c,0xb1670a0c -.long 0x0fe75793,0x0fe75793 -.long 0xd296eeb4,0xd296eeb4 -.long 0x9e919b1b,0x9e919b1b -.long 0x4fc5c080,0x4fc5c080 -.long 0xa220dc61,0xa220dc61 -.long 0x694b775a,0x694b775a -.long 0x161a121c,0x161a121c -.long 0x0aba93e2,0x0aba93e2 -.long 0xe52aa0c0,0xe52aa0c0 -.long 0x43e0223c,0x43e0223c -.long 0x1d171b12,0x1d171b12 -.long 0x0b0d090e,0x0b0d090e -.long 0xadc78bf2,0xadc78bf2 -.long 0xb9a8b62d,0xb9a8b62d -.long 0xc8a91e14,0xc8a91e14 -.long 0x8519f157,0x8519f157 -.long 0x4c0775af,0x4c0775af -.long 0xbbdd99ee,0xbbdd99ee -.long 0xfd607fa3,0xfd607fa3 -.long 0x9f2601f7,0x9f2601f7 -.long 0xbcf5725c,0xbcf5725c -.long 0xc53b6644,0xc53b6644 -.long 0x347efb5b,0x347efb5b -.long 0x7629438b,0x7629438b -.long 0xdcc623cb,0xdcc623cb -.long 0x68fcedb6,0x68fcedb6 -.long 0x63f1e4b8,0x63f1e4b8 -.long 0xcadc31d7,0xcadc31d7 -.long 0x10856342,0x10856342 -.long 0x40229713,0x40229713 -.long 0x2011c684,0x2011c684 -.long 0x7d244a85,0x7d244a85 -.long 0xf83dbbd2,0xf83dbbd2 -.long 0x1132f9ae,0x1132f9ae -.long 0x6da129c7,0x6da129c7 -.long 0x4b2f9e1d,0x4b2f9e1d -.long 0xf330b2dc,0xf330b2dc -.long 0xec52860d,0xec52860d -.long 0xd0e3c177,0xd0e3c177 -.long 0x6c16b32b,0x6c16b32b -.long 0x99b970a9,0x99b970a9 -.long 0xfa489411,0xfa489411 -.long 0x2264e947,0x2264e947 -.long 0xc48cfca8,0xc48cfca8 -.long 0x1a3ff0a0,0x1a3ff0a0 -.long 0xd82c7d56,0xd82c7d56 -.long 0xef903322,0xef903322 -.long 0xc74e4987,0xc74e4987 -.long 0xc1d138d9,0xc1d138d9 -.long 0xfea2ca8c,0xfea2ca8c -.long 0x360bd498,0x360bd498 -.long 0xcf81f5a6,0xcf81f5a6 -.long 0x28de7aa5,0x28de7aa5 -.long 0x268eb7da,0x268eb7da -.long 0xa4bfad3f,0xa4bfad3f -.long 0xe49d3a2c,0xe49d3a2c -.long 0x0d927850,0x0d927850 -.long 0x9bcc5f6a,0x9bcc5f6a -.long 0x62467e54,0x62467e54 -.long 0xc2138df6,0xc2138df6 -.long 0xe8b8d890,0xe8b8d890 -.long 0x5ef7392e,0x5ef7392e -.long 0xf5afc382,0xf5afc382 -.long 0xbe805d9f,0xbe805d9f -.long 0x7c93d069,0x7c93d069 -.long 0xa92dd56f,0xa92dd56f -.long 0xb31225cf,0xb31225cf -.long 0x3b99acc8,0x3b99acc8 -.long 0xa77d1810,0xa77d1810 -.long 0x6e639ce8,0x6e639ce8 -.long 0x7bbb3bdb,0x7bbb3bdb -.long 0x097826cd,0x097826cd -.long 0xf418596e,0xf418596e -.long 0x01b79aec,0x01b79aec -.long 0xa89a4f83,0xa89a4f83 -.long 0x656e95e6,0x656e95e6 -.long 0x7ee6ffaa,0x7ee6ffaa -.long 0x08cfbc21,0x08cfbc21 -.long 0xe6e815ef,0xe6e815ef -.long 0xd99be7ba,0xd99be7ba -.long 0xce366f4a,0xce366f4a -.long 0xd4099fea,0xd4099fea -.long 0xd67cb029,0xd67cb029 -.long 0xafb2a431,0xafb2a431 -.long 0x31233f2a,0x31233f2a -.long 0x3094a5c6,0x3094a5c6 -.long 0xc066a235,0xc066a235 -.long 0x37bc4e74,0x37bc4e74 -.long 0xa6ca82fc,0xa6ca82fc -.long 0xb0d090e0,0xb0d090e0 -.long 0x15d8a733,0x15d8a733 -.long 0x4a9804f1,0x4a9804f1 -.long 0xf7daec41,0xf7daec41 -.long 0x0e50cd7f,0x0e50cd7f -.long 0x2ff69117,0x2ff69117 -.long 0x8dd64d76,0x8dd64d76 -.long 0x4db0ef43,0x4db0ef43 -.long 0x544daacc,0x544daacc -.long 0xdf0496e4,0xdf0496e4 -.long 0xe3b5d19e,0xe3b5d19e -.long 0x1b886a4c,0x1b886a4c -.long 0xb81f2cc1,0xb81f2cc1 -.long 0x7f516546,0x7f516546 -.long 0x04ea5e9d,0x04ea5e9d -.long 0x5d358c01,0x5d358c01 -.long 0x737487fa,0x737487fa -.long 0x2e410bfb,0x2e410bfb -.long 0x5a1d67b3,0x5a1d67b3 -.long 0x52d2db92,0x52d2db92 -.long 0x335610e9,0x335610e9 -.long 0x1347d66d,0x1347d66d -.long 0x8c61d79a,0x8c61d79a -.long 0x7a0ca137,0x7a0ca137 -.long 0x8e14f859,0x8e14f859 -.long 0x893c13eb,0x893c13eb -.long 0xee27a9ce,0xee27a9ce -.long 0x35c961b7,0x35c961b7 -.long 0xede51ce1,0xede51ce1 -.long 0x3cb1477a,0x3cb1477a -.long 0x59dfd29c,0x59dfd29c -.long 0x3f73f255,0x3f73f255 -.long 0x79ce1418,0x79ce1418 -.long 0xbf37c773,0xbf37c773 -.long 0xeacdf753,0xeacdf753 -.long 0x5baafd5f,0x5baafd5f -.long 0x146f3ddf,0x146f3ddf -.long 0x86db4478,0x86db4478 -.long 0x81f3afca,0x81f3afca -.long 0x3ec468b9,0x3ec468b9 -.long 0x2c342438,0x2c342438 -.long 0x5f40a3c2,0x5f40a3c2 -.long 0x72c31d16,0x72c31d16 -.long 0x0c25e2bc,0x0c25e2bc -.long 0x8b493c28,0x8b493c28 -.long 0x41950dff,0x41950dff -.long 0x7101a839,0x7101a839 -.long 0xdeb30c08,0xdeb30c08 -.long 0x9ce4b4d8,0x9ce4b4d8 -.long 0x90c15664,0x90c15664 -.long 0x6184cb7b,0x6184cb7b -.long 0x70b632d5,0x70b632d5 -.long 0x745c6c48,0x745c6c48 -.long 0x4257b8d0,0x4257b8d0 -.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38 -.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb -.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87 -.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb -.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d -.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e -.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2 -.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25 -.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16 -.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92 -.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda -.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84 -.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a -.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06 -.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02 -.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b -.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea -.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73 -.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85 -.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e -.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89 -.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b -.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20 -.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4 -.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31 -.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f -.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d -.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef -.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0 -.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61 -.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26 -.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d -.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe -.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0 -.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38 -.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb -.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87 -.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb -.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d -.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e -.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2 -.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25 -.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16 -.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92 -.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda -.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84 -.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a -.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06 -.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02 -.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b -.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea -.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73 -.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85 -.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e -.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89 -.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b -.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20 -.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4 -.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31 -.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f -.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d -.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef -.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0 -.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61 -.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26 -.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d -.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe -.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0 -.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38 -.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb -.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87 -.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb -.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d -.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e -.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2 -.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25 -.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16 -.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92 -.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda -.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84 -.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a -.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06 -.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02 -.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b -.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea -.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73 -.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85 -.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e -.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89 -.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b -.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20 -.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4 -.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31 -.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f -.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d -.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef -.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0 -.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61 -.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26 -.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d -.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe -.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0 -.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38 -.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb -.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87 -.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb -.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d -.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e -.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2 -.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25 -.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16 -.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92 -.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda -.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84 -.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a -.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06 -.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02 -.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b -.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea -.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73 -.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85 -.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e -.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89 -.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b -.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20 -.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4 -.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31 -.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f -.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d -.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef -.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0 -.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61 -.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26 -.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d -.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe -.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0 -.byte 65,69,83,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 -.align 64 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s index 4d2dfe4489..978bd2b623 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s @@ -5,6 +5,7 @@ .type aesni_cbc_sha1_enc,@function .align 32 aesni_cbc_sha1_enc: +.cfi_startproc movl OPENSSL_ia32cap_P+0(%rip),%r10d movq OPENSSL_ia32cap_P+4(%rip),%r11 @@ -17,6 +18,7 @@ aesni_cbc_sha1_enc: je aesni_cbc_sha1_enc_avx jmp aesni_cbc_sha1_enc_ssse3 .byte 0xf3,0xc3 +.cfi_endproc .size aesni_cbc_sha1_enc,.-aesni_cbc_sha1_enc .type aesni_cbc_sha1_enc_ssse3,@function .align 32 @@ -2730,6 +2732,7 @@ K_XX_XX: .type aesni_cbc_sha1_enc_shaext,@function .align 32 aesni_cbc_sha1_enc_shaext: +.cfi_startproc movq 8(%rsp),%r10 movdqu (%r9),%xmm8 movd 16(%r9),%xmm9 @@ -3028,4 +3031,5 @@ aesni_cbc_sha1_enc_shaext: movdqu %xmm8,(%r9) movd %xmm9,16(%r9) .byte 0xf3,0xc3 +.cfi_endproc .size aesni_cbc_sha1_enc_shaext,.-aesni_cbc_sha1_enc_shaext diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s index 5a47b3ee51..dd09f1b290 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s @@ -5,6 +5,7 @@ .type aesni_cbc_sha256_enc,@function .align 16 aesni_cbc_sha256_enc: +.cfi_startproc leaq OPENSSL_ia32cap_P(%rip),%r11 movl $1,%eax cmpq $0,%rdi @@ -30,6 +31,7 @@ aesni_cbc_sha256_enc: ud2 .Lprobe: .byte 0xf3,0xc3 +.cfi_endproc .size aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc .align 64 @@ -2528,7 +2530,15 @@ aesni_cbc_sha256_enc_avx2: vmovdqa %ymm4,0(%rsp) xorl %r14d,%r14d vmovdqa %ymm5,32(%rsp) + + movq 120(%rsp),%rsi +.cfi_def_cfa %rsi,8 leaq -64(%rsp),%rsp + + + + movq %rsi,-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 movl %ebx,%esi vmovdqa %ymm6,0(%rsp) xorl %ecx,%esi @@ -2542,6 +2552,12 @@ aesni_cbc_sha256_enc_avx2: vmovdqu (%r13),%xmm9 vpinsrq $0,%r13,%xmm15,%xmm15 leaq -64(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x38,0x06,0x23,0x08 + + pushq 64-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08 + leaq 8(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpalignr $4,%ymm0,%ymm1,%ymm4 addl 0+128(%rsp),%r11d andl %r8d,%r12d @@ -2816,6 +2832,12 @@ aesni_cbc_sha256_enc_avx2: movl %r9d,%r12d vmovdqa %ymm6,32(%rsp) leaq -64(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x38,0x06,0x23,0x08 + + pushq 64-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08 + leaq 8(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpalignr $4,%ymm2,%ymm3,%ymm4 addl 0+128(%rsp),%r11d andl %r8d,%r12d @@ -4029,10 +4051,12 @@ aesni_cbc_sha256_enc_avx2: jbe .Loop_avx2 leaq (%rsp),%rbp + +.cfi_escape 0x0f,0x06,0x76,0xf8,0x00,0x06,0x23,0x08 + .Ldone_avx2: - leaq (%rbp),%rsp - movq 64+32(%rsp),%r8 - movq 120(%rsp),%rsi + movq 64+32(%rbp),%r8 + movq 64+56(%rbp),%rsi .cfi_def_cfa %rsi,8 vmovdqu %xmm8,(%r8) vzeroall @@ -4057,6 +4081,7 @@ aesni_cbc_sha256_enc_avx2: .type aesni_cbc_sha256_enc_shaext,@function .align 32 aesni_cbc_sha256_enc_shaext: +.cfi_startproc movq 8(%rsp),%r10 leaq K256+128(%rip),%rax movdqu (%r9),%xmm1 @@ -4406,4 +4431,5 @@ aesni_cbc_sha256_enc_shaext: movdqu %xmm1,(%r9) movdqu %xmm2,16(%r9) .byte 0xf3,0xc3 +.cfi_endproc .size aesni_cbc_sha256_enc_shaext,.-aesni_cbc_sha256_enc_shaext diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-x86_64.s index 2986a647c1..c1e791eff5 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/aesni-x86_64.s @@ -861,6 +861,7 @@ aesni_ecb_encrypt: .type aesni_ccm64_encrypt_blocks,@function .align 16 aesni_ccm64_encrypt_blocks: +.cfi_startproc movl 240(%rcx),%eax movdqu (%r8),%xmm6 movdqa .Lincrement64(%rip),%xmm9 @@ -919,11 +920,13 @@ aesni_ccm64_encrypt_blocks: pxor %xmm8,%xmm8 pxor %xmm6,%xmm6 .byte 0xf3,0xc3 +.cfi_endproc .size aesni_ccm64_encrypt_blocks,.-aesni_ccm64_encrypt_blocks .globl aesni_ccm64_decrypt_blocks .type aesni_ccm64_decrypt_blocks,@function .align 16 aesni_ccm64_decrypt_blocks: +.cfi_startproc movl 240(%rcx),%eax movups (%r8),%xmm6 movdqu (%r9),%xmm3 @@ -1016,6 +1019,7 @@ aesni_ccm64_decrypt_blocks: pxor %xmm8,%xmm8 pxor %xmm6,%xmm6 .byte 0xf3,0xc3 +.cfi_endproc .size aesni_ccm64_decrypt_blocks,.-aesni_ccm64_decrypt_blocks .globl aesni_ctr32_encrypt_blocks .type aesni_ctr32_encrypt_blocks,@function @@ -2790,6 +2794,7 @@ aesni_ocb_encrypt: .type __ocb_encrypt6,@function .align 32 __ocb_encrypt6: +.cfi_startproc pxor %xmm9,%xmm15 movdqu (%rbx,%r12,1),%xmm11 movdqa %xmm10,%xmm12 @@ -2887,11 +2892,13 @@ __ocb_encrypt6: .byte 102,65,15,56,221,246 .byte 102,65,15,56,221,255 .byte 0xf3,0xc3 +.cfi_endproc .size __ocb_encrypt6,.-__ocb_encrypt6 .type __ocb_encrypt4,@function .align 32 __ocb_encrypt4: +.cfi_startproc pxor %xmm9,%xmm15 movdqu (%rbx,%r12,1),%xmm11 movdqa %xmm10,%xmm12 @@ -2956,11 +2963,13 @@ __ocb_encrypt4: .byte 102,65,15,56,221,228 .byte 102,65,15,56,221,237 .byte 0xf3,0xc3 +.cfi_endproc .size __ocb_encrypt4,.-__ocb_encrypt4 .type __ocb_encrypt1,@function .align 32 __ocb_encrypt1: +.cfi_startproc pxor %xmm15,%xmm7 pxor %xmm9,%xmm7 pxor %xmm2,%xmm8 @@ -2991,6 +3000,7 @@ __ocb_encrypt1: .byte 102,15,56,221,215 .byte 0xf3,0xc3 +.cfi_endproc .size __ocb_encrypt1,.-__ocb_encrypt1 .globl aesni_ocb_decrypt @@ -3233,6 +3243,7 @@ aesni_ocb_decrypt: .type __ocb_decrypt6,@function .align 32 __ocb_decrypt6: +.cfi_startproc pxor %xmm9,%xmm15 movdqu (%rbx,%r12,1),%xmm11 movdqa %xmm10,%xmm12 @@ -3324,11 +3335,13 @@ __ocb_decrypt6: .byte 102,65,15,56,223,246 .byte 102,65,15,56,223,255 .byte 0xf3,0xc3 +.cfi_endproc .size __ocb_decrypt6,.-__ocb_decrypt6 .type __ocb_decrypt4,@function .align 32 __ocb_decrypt4: +.cfi_startproc pxor %xmm9,%xmm15 movdqu (%rbx,%r12,1),%xmm11 movdqa %xmm10,%xmm12 @@ -3389,11 +3402,13 @@ __ocb_decrypt4: .byte 102,65,15,56,223,228 .byte 102,65,15,56,223,237 .byte 0xf3,0xc3 +.cfi_endproc .size __ocb_decrypt4,.-__ocb_decrypt4 .type __ocb_decrypt1,@function .align 32 __ocb_decrypt1: +.cfi_startproc pxor %xmm15,%xmm7 pxor %xmm9,%xmm7 pxor %xmm7,%xmm2 @@ -3423,6 +3438,7 @@ __ocb_decrypt1: .byte 102,15,56,223,215 .byte 0xf3,0xc3 +.cfi_endproc .size __ocb_decrypt1,.-__ocb_decrypt1 .globl aesni_cbc_encrypt .type aesni_cbc_encrypt,@function @@ -4361,7 +4377,6 @@ __aesni_set_encrypt_key: addq $8,%rsp .cfi_adjust_cfa_offset -8 .byte 0xf3,0xc3 -.cfi_endproc .LSEH_end_set_encrypt_key: .align 16 @@ -4432,6 +4447,7 @@ __aesni_set_encrypt_key: shufps $170,%xmm1,%xmm1 xorps %xmm1,%xmm2 .byte 0xf3,0xc3 +.cfi_endproc .size aesni_set_encrypt_key,.-aesni_set_encrypt_key .size __aesni_set_encrypt_key,.-__aesni_set_encrypt_key .align 64 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/bsaes-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/bsaes-x86_64.s deleted file mode 100644 index e1f3abadbc..0000000000 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/aes/bsaes-x86_64.s +++ /dev/null @@ -1,2595 +0,0 @@ -.text - - - - -.type _bsaes_encrypt8,@function -.align 64 -_bsaes_encrypt8: -.cfi_startproc - leaq .LBS0(%rip),%r11 - - movdqa (%rax),%xmm8 - leaq 16(%rax),%rax - movdqa 80(%r11),%xmm7 - pxor %xmm8,%xmm15 - pxor %xmm8,%xmm0 - pxor %xmm8,%xmm1 - pxor %xmm8,%xmm2 -.byte 102,68,15,56,0,255 -.byte 102,15,56,0,199 - pxor %xmm8,%xmm3 - pxor %xmm8,%xmm4 -.byte 102,15,56,0,207 -.byte 102,15,56,0,215 - pxor %xmm8,%xmm5 - pxor %xmm8,%xmm6 -.byte 102,15,56,0,223 -.byte 102,15,56,0,231 -.byte 102,15,56,0,239 -.byte 102,15,56,0,247 -_bsaes_encrypt8_bitslice: - movdqa 0(%r11),%xmm7 - movdqa 16(%r11),%xmm8 - movdqa %xmm5,%xmm9 - psrlq $1,%xmm5 - movdqa %xmm3,%xmm10 - psrlq $1,%xmm3 - pxor %xmm6,%xmm5 - pxor %xmm4,%xmm3 - pand %xmm7,%xmm5 - pand %xmm7,%xmm3 - pxor %xmm5,%xmm6 - psllq $1,%xmm5 - pxor %xmm3,%xmm4 - psllq $1,%xmm3 - pxor %xmm9,%xmm5 - pxor %xmm10,%xmm3 - movdqa %xmm1,%xmm9 - psrlq $1,%xmm1 - movdqa %xmm15,%xmm10 - psrlq $1,%xmm15 - pxor %xmm2,%xmm1 - pxor %xmm0,%xmm15 - pand %xmm7,%xmm1 - pand %xmm7,%xmm15 - pxor %xmm1,%xmm2 - psllq $1,%xmm1 - pxor %xmm15,%xmm0 - psllq $1,%xmm15 - pxor %xmm9,%xmm1 - pxor %xmm10,%xmm15 - movdqa 32(%r11),%xmm7 - movdqa %xmm4,%xmm9 - psrlq $2,%xmm4 - movdqa %xmm3,%xmm10 - psrlq $2,%xmm3 - pxor %xmm6,%xmm4 - pxor %xmm5,%xmm3 - pand %xmm8,%xmm4 - pand %xmm8,%xmm3 - pxor %xmm4,%xmm6 - psllq $2,%xmm4 - pxor %xmm3,%xmm5 - psllq $2,%xmm3 - pxor %xmm9,%xmm4 - pxor %xmm10,%xmm3 - movdqa %xmm0,%xmm9 - psrlq $2,%xmm0 - movdqa %xmm15,%xmm10 - psrlq $2,%xmm15 - pxor %xmm2,%xmm0 - pxor %xmm1,%xmm15 - pand %xmm8,%xmm0 - pand %xmm8,%xmm15 - pxor %xmm0,%xmm2 - psllq $2,%xmm0 - pxor %xmm15,%xmm1 - psllq $2,%xmm15 - pxor %xmm9,%xmm0 - pxor %xmm10,%xmm15 - movdqa %xmm2,%xmm9 - psrlq $4,%xmm2 - movdqa %xmm1,%xmm10 - psrlq $4,%xmm1 - pxor %xmm6,%xmm2 - pxor %xmm5,%xmm1 - pand %xmm7,%xmm2 - pand %xmm7,%xmm1 - pxor %xmm2,%xmm6 - psllq $4,%xmm2 - pxor %xmm1,%xmm5 - psllq $4,%xmm1 - pxor %xmm9,%xmm2 - pxor %xmm10,%xmm1 - movdqa %xmm0,%xmm9 - psrlq $4,%xmm0 - movdqa %xmm15,%xmm10 - psrlq $4,%xmm15 - pxor %xmm4,%xmm0 - pxor %xmm3,%xmm15 - pand %xmm7,%xmm0 - pand %xmm7,%xmm15 - pxor %xmm0,%xmm4 - psllq $4,%xmm0 - pxor %xmm15,%xmm3 - psllq $4,%xmm15 - pxor %xmm9,%xmm0 - pxor %xmm10,%xmm15 - decl %r10d - jmp .Lenc_sbox -.align 16 -.Lenc_loop: - pxor 0(%rax),%xmm15 - pxor 16(%rax),%xmm0 - pxor 32(%rax),%xmm1 - pxor 48(%rax),%xmm2 -.byte 102,68,15,56,0,255 -.byte 102,15,56,0,199 - pxor 64(%rax),%xmm3 - pxor 80(%rax),%xmm4 -.byte 102,15,56,0,207 -.byte 102,15,56,0,215 - pxor 96(%rax),%xmm5 - pxor 112(%rax),%xmm6 -.byte 102,15,56,0,223 -.byte 102,15,56,0,231 -.byte 102,15,56,0,239 -.byte 102,15,56,0,247 - leaq 128(%rax),%rax -.Lenc_sbox: - pxor %xmm5,%xmm4 - pxor %xmm0,%xmm1 - pxor %xmm15,%xmm2 - pxor %xmm1,%xmm5 - pxor %xmm15,%xmm4 - - pxor %xmm2,%xmm5 - pxor %xmm6,%xmm2 - pxor %xmm4,%xmm6 - pxor %xmm3,%xmm2 - pxor %xmm4,%xmm3 - pxor %xmm0,%xmm2 - - pxor %xmm6,%xmm1 - pxor %xmm4,%xmm0 - movdqa %xmm6,%xmm10 - movdqa %xmm0,%xmm9 - movdqa %xmm4,%xmm8 - movdqa %xmm1,%xmm12 - movdqa %xmm5,%xmm11 - - pxor %xmm3,%xmm10 - pxor %xmm1,%xmm9 - pxor %xmm2,%xmm8 - movdqa %xmm10,%xmm13 - pxor %xmm3,%xmm12 - movdqa %xmm9,%xmm7 - pxor %xmm15,%xmm11 - movdqa %xmm10,%xmm14 - - por %xmm8,%xmm9 - por %xmm11,%xmm10 - pxor %xmm7,%xmm14 - pand %xmm11,%xmm13 - pxor %xmm8,%xmm11 - pand %xmm8,%xmm7 - pand %xmm11,%xmm14 - movdqa %xmm2,%xmm11 - pxor %xmm15,%xmm11 - pand %xmm11,%xmm12 - pxor %xmm12,%xmm10 - pxor %xmm12,%xmm9 - movdqa %xmm6,%xmm12 - movdqa %xmm4,%xmm11 - pxor %xmm0,%xmm12 - pxor %xmm5,%xmm11 - movdqa %xmm12,%xmm8 - pand %xmm11,%xmm12 - por %xmm11,%xmm8 - pxor %xmm12,%xmm7 - pxor %xmm14,%xmm10 - pxor %xmm13,%xmm9 - pxor %xmm14,%xmm8 - movdqa %xmm1,%xmm11 - pxor %xmm13,%xmm7 - movdqa %xmm3,%xmm12 - pxor %xmm13,%xmm8 - movdqa %xmm0,%xmm13 - pand %xmm2,%xmm11 - movdqa %xmm6,%xmm14 - pand %xmm15,%xmm12 - pand %xmm4,%xmm13 - por %xmm5,%xmm14 - pxor %xmm11,%xmm10 - pxor %xmm12,%xmm9 - pxor %xmm13,%xmm8 - pxor %xmm14,%xmm7 - - - - - - movdqa %xmm10,%xmm11 - pand %xmm8,%xmm10 - pxor %xmm9,%xmm11 - - movdqa %xmm7,%xmm13 - movdqa %xmm11,%xmm14 - pxor %xmm10,%xmm13 - pand %xmm13,%xmm14 - - movdqa %xmm8,%xmm12 - pxor %xmm9,%xmm14 - pxor %xmm7,%xmm12 - - pxor %xmm9,%xmm10 - - pand %xmm10,%xmm12 - - movdqa %xmm13,%xmm9 - pxor %xmm7,%xmm12 - - pxor %xmm12,%xmm9 - pxor %xmm12,%xmm8 - - pand %xmm7,%xmm9 - - pxor %xmm9,%xmm13 - pxor %xmm9,%xmm8 - - pand %xmm14,%xmm13 - - pxor %xmm11,%xmm13 - movdqa %xmm5,%xmm11 - movdqa %xmm4,%xmm7 - movdqa %xmm14,%xmm9 - pxor %xmm13,%xmm9 - pand %xmm5,%xmm9 - pxor %xmm4,%xmm5 - pand %xmm14,%xmm4 - pand %xmm13,%xmm5 - pxor %xmm4,%xmm5 - pxor %xmm9,%xmm4 - pxor %xmm15,%xmm11 - pxor %xmm2,%xmm7 - pxor %xmm12,%xmm14 - pxor %xmm8,%xmm13 - movdqa %xmm14,%xmm10 - movdqa %xmm12,%xmm9 - pxor %xmm13,%xmm10 - pxor %xmm8,%xmm9 - pand %xmm11,%xmm10 - pand %xmm15,%xmm9 - pxor %xmm7,%xmm11 - pxor %xmm2,%xmm15 - pand %xmm14,%xmm7 - pand %xmm12,%xmm2 - pand %xmm13,%xmm11 - pand %xmm8,%xmm15 - pxor %xmm11,%xmm7 - pxor %xmm2,%xmm15 - pxor %xmm10,%xmm11 - pxor %xmm9,%xmm2 - pxor %xmm11,%xmm5 - pxor %xmm11,%xmm15 - pxor %xmm7,%xmm4 - pxor %xmm7,%xmm2 - - movdqa %xmm6,%xmm11 - movdqa %xmm0,%xmm7 - pxor %xmm3,%xmm11 - pxor %xmm1,%xmm7 - movdqa %xmm14,%xmm10 - movdqa %xmm12,%xmm9 - pxor %xmm13,%xmm10 - pxor %xmm8,%xmm9 - pand %xmm11,%xmm10 - pand %xmm3,%xmm9 - pxor %xmm7,%xmm11 - pxor %xmm1,%xmm3 - pand %xmm14,%xmm7 - pand %xmm12,%xmm1 - pand %xmm13,%xmm11 - pand %xmm8,%xmm3 - pxor %xmm11,%xmm7 - pxor %xmm1,%xmm3 - pxor %xmm10,%xmm11 - pxor %xmm9,%xmm1 - pxor %xmm12,%xmm14 - pxor %xmm8,%xmm13 - movdqa %xmm14,%xmm10 - pxor %xmm13,%xmm10 - pand %xmm6,%xmm10 - pxor %xmm0,%xmm6 - pand %xmm14,%xmm0 - pand %xmm13,%xmm6 - pxor %xmm0,%xmm6 - pxor %xmm10,%xmm0 - pxor %xmm11,%xmm6 - pxor %xmm11,%xmm3 - pxor %xmm7,%xmm0 - pxor %xmm7,%xmm1 - pxor %xmm15,%xmm6 - pxor %xmm5,%xmm0 - pxor %xmm6,%xmm3 - pxor %xmm15,%xmm5 - pxor %xmm0,%xmm15 - - pxor %xmm4,%xmm0 - pxor %xmm1,%xmm4 - pxor %xmm2,%xmm1 - pxor %xmm4,%xmm2 - pxor %xmm4,%xmm3 - - pxor %xmm2,%xmm5 - decl %r10d - jl .Lenc_done - pshufd $0x93,%xmm15,%xmm7 - pshufd $0x93,%xmm0,%xmm8 - pxor %xmm7,%xmm15 - pshufd $0x93,%xmm3,%xmm9 - pxor %xmm8,%xmm0 - pshufd $0x93,%xmm5,%xmm10 - pxor %xmm9,%xmm3 - pshufd $0x93,%xmm2,%xmm11 - pxor %xmm10,%xmm5 - pshufd $0x93,%xmm6,%xmm12 - pxor %xmm11,%xmm2 - pshufd $0x93,%xmm1,%xmm13 - pxor %xmm12,%xmm6 - pshufd $0x93,%xmm4,%xmm14 - pxor %xmm13,%xmm1 - pxor %xmm14,%xmm4 - - pxor %xmm15,%xmm8 - pxor %xmm4,%xmm7 - pxor %xmm4,%xmm8 - pshufd $0x4E,%xmm15,%xmm15 - pxor %xmm0,%xmm9 - pshufd $0x4E,%xmm0,%xmm0 - pxor %xmm2,%xmm12 - pxor %xmm7,%xmm15 - pxor %xmm6,%xmm13 - pxor %xmm8,%xmm0 - pxor %xmm5,%xmm11 - pshufd $0x4E,%xmm2,%xmm7 - pxor %xmm1,%xmm14 - pshufd $0x4E,%xmm6,%xmm8 - pxor %xmm3,%xmm10 - pshufd $0x4E,%xmm5,%xmm2 - pxor %xmm4,%xmm10 - pshufd $0x4E,%xmm4,%xmm6 - pxor %xmm4,%xmm11 - pshufd $0x4E,%xmm1,%xmm5 - pxor %xmm11,%xmm7 - pshufd $0x4E,%xmm3,%xmm1 - pxor %xmm12,%xmm8 - pxor %xmm10,%xmm2 - pxor %xmm14,%xmm6 - pxor %xmm13,%xmm5 - movdqa %xmm7,%xmm3 - pxor %xmm9,%xmm1 - movdqa %xmm8,%xmm4 - movdqa 48(%r11),%xmm7 - jnz .Lenc_loop - movdqa 64(%r11),%xmm7 - jmp .Lenc_loop -.align 16 -.Lenc_done: - movdqa 0(%r11),%xmm7 - movdqa 16(%r11),%xmm8 - movdqa %xmm1,%xmm9 - psrlq $1,%xmm1 - movdqa %xmm2,%xmm10 - psrlq $1,%xmm2 - pxor %xmm4,%xmm1 - pxor %xmm6,%xmm2 - pand %xmm7,%xmm1 - pand %xmm7,%xmm2 - pxor %xmm1,%xmm4 - psllq $1,%xmm1 - pxor %xmm2,%xmm6 - psllq $1,%xmm2 - pxor %xmm9,%xmm1 - pxor %xmm10,%xmm2 - movdqa %xmm3,%xmm9 - psrlq $1,%xmm3 - movdqa %xmm15,%xmm10 - psrlq $1,%xmm15 - pxor %xmm5,%xmm3 - pxor %xmm0,%xmm15 - pand %xmm7,%xmm3 - pand %xmm7,%xmm15 - pxor %xmm3,%xmm5 - psllq $1,%xmm3 - pxor %xmm15,%xmm0 - psllq $1,%xmm15 - pxor %xmm9,%xmm3 - pxor %xmm10,%xmm15 - movdqa 32(%r11),%xmm7 - movdqa %xmm6,%xmm9 - psrlq $2,%xmm6 - movdqa %xmm2,%xmm10 - psrlq $2,%xmm2 - pxor %xmm4,%xmm6 - pxor %xmm1,%xmm2 - pand %xmm8,%xmm6 - pand %xmm8,%xmm2 - pxor %xmm6,%xmm4 - psllq $2,%xmm6 - pxor %xmm2,%xmm1 - psllq $2,%xmm2 - pxor %xmm9,%xmm6 - pxor %xmm10,%xmm2 - movdqa %xmm0,%xmm9 - psrlq $2,%xmm0 - movdqa %xmm15,%xmm10 - psrlq $2,%xmm15 - pxor %xmm5,%xmm0 - pxor %xmm3,%xmm15 - pand %xmm8,%xmm0 - pand %xmm8,%xmm15 - pxor %xmm0,%xmm5 - psllq $2,%xmm0 - pxor %xmm15,%xmm3 - psllq $2,%xmm15 - pxor %xmm9,%xmm0 - pxor %xmm10,%xmm15 - movdqa %xmm5,%xmm9 - psrlq $4,%xmm5 - movdqa %xmm3,%xmm10 - psrlq $4,%xmm3 - pxor %xmm4,%xmm5 - pxor %xmm1,%xmm3 - pand %xmm7,%xmm5 - pand %xmm7,%xmm3 - pxor %xmm5,%xmm4 - psllq $4,%xmm5 - pxor %xmm3,%xmm1 - psllq $4,%xmm3 - pxor %xmm9,%xmm5 - pxor %xmm10,%xmm3 - movdqa %xmm0,%xmm9 - psrlq $4,%xmm0 - movdqa %xmm15,%xmm10 - psrlq $4,%xmm15 - pxor %xmm6,%xmm0 - pxor %xmm2,%xmm15 - pand %xmm7,%xmm0 - pand %xmm7,%xmm15 - pxor %xmm0,%xmm6 - psllq $4,%xmm0 - pxor %xmm15,%xmm2 - psllq $4,%xmm15 - pxor %xmm9,%xmm0 - pxor %xmm10,%xmm15 - movdqa (%rax),%xmm7 - pxor %xmm7,%xmm3 - pxor %xmm7,%xmm5 - pxor %xmm7,%xmm2 - pxor %xmm7,%xmm6 - pxor %xmm7,%xmm1 - pxor %xmm7,%xmm4 - pxor %xmm7,%xmm15 - pxor %xmm7,%xmm0 - .byte 0xf3,0xc3 -.cfi_endproc -.size _bsaes_encrypt8,.-_bsaes_encrypt8 - -.type _bsaes_decrypt8,@function -.align 64 -_bsaes_decrypt8: -.cfi_startproc - leaq .LBS0(%rip),%r11 - - movdqa (%rax),%xmm8 - leaq 16(%rax),%rax - movdqa -48(%r11),%xmm7 - pxor %xmm8,%xmm15 - pxor %xmm8,%xmm0 - pxor %xmm8,%xmm1 - pxor %xmm8,%xmm2 -.byte 102,68,15,56,0,255 -.byte 102,15,56,0,199 - pxor %xmm8,%xmm3 - pxor %xmm8,%xmm4 -.byte 102,15,56,0,207 -.byte 102,15,56,0,215 - pxor %xmm8,%xmm5 - pxor %xmm8,%xmm6 -.byte 102,15,56,0,223 -.byte 102,15,56,0,231 -.byte 102,15,56,0,239 -.byte 102,15,56,0,247 - movdqa 0(%r11),%xmm7 - movdqa 16(%r11),%xmm8 - movdqa %xmm5,%xmm9 - psrlq $1,%xmm5 - movdqa %xmm3,%xmm10 - psrlq $1,%xmm3 - pxor %xmm6,%xmm5 - pxor %xmm4,%xmm3 - pand %xmm7,%xmm5 - pand %xmm7,%xmm3 - pxor %xmm5,%xmm6 - psllq $1,%xmm5 - pxor %xmm3,%xmm4 - psllq $1,%xmm3 - pxor %xmm9,%xmm5 - pxor %xmm10,%xmm3 - movdqa %xmm1,%xmm9 - psrlq $1,%xmm1 - movdqa %xmm15,%xmm10 - psrlq $1,%xmm15 - pxor %xmm2,%xmm1 - pxor %xmm0,%xmm15 - pand %xmm7,%xmm1 - pand %xmm7,%xmm15 - pxor %xmm1,%xmm2 - psllq $1,%xmm1 - pxor %xmm15,%xmm0 - psllq $1,%xmm15 - pxor %xmm9,%xmm1 - pxor %xmm10,%xmm15 - movdqa 32(%r11),%xmm7 - movdqa %xmm4,%xmm9 - psrlq $2,%xmm4 - movdqa %xmm3,%xmm10 - psrlq $2,%xmm3 - pxor %xmm6,%xmm4 - pxor %xmm5,%xmm3 - pand %xmm8,%xmm4 - pand %xmm8,%xmm3 - pxor %xmm4,%xmm6 - psllq $2,%xmm4 - pxor %xmm3,%xmm5 - psllq $2,%xmm3 - pxor %xmm9,%xmm4 - pxor %xmm10,%xmm3 - movdqa %xmm0,%xmm9 - psrlq $2,%xmm0 - movdqa %xmm15,%xmm10 - psrlq $2,%xmm15 - pxor %xmm2,%xmm0 - pxor %xmm1,%xmm15 - pand %xmm8,%xmm0 - pand %xmm8,%xmm15 - pxor %xmm0,%xmm2 - psllq $2,%xmm0 - pxor %xmm15,%xmm1 - psllq $2,%xmm15 - pxor %xmm9,%xmm0 - pxor %xmm10,%xmm15 - movdqa %xmm2,%xmm9 - psrlq $4,%xmm2 - movdqa %xmm1,%xmm10 - psrlq $4,%xmm1 - pxor %xmm6,%xmm2 - pxor %xmm5,%xmm1 - pand %xmm7,%xmm2 - pand %xmm7,%xmm1 - pxor %xmm2,%xmm6 - psllq $4,%xmm2 - pxor %xmm1,%xmm5 - psllq $4,%xmm1 - pxor %xmm9,%xmm2 - pxor %xmm10,%xmm1 - movdqa %xmm0,%xmm9 - psrlq $4,%xmm0 - movdqa %xmm15,%xmm10 - psrlq $4,%xmm15 - pxor %xmm4,%xmm0 - pxor %xmm3,%xmm15 - pand %xmm7,%xmm0 - pand %xmm7,%xmm15 - pxor %xmm0,%xmm4 - psllq $4,%xmm0 - pxor %xmm15,%xmm3 - psllq $4,%xmm15 - pxor %xmm9,%xmm0 - pxor %xmm10,%xmm15 - decl %r10d - jmp .Ldec_sbox -.align 16 -.Ldec_loop: - pxor 0(%rax),%xmm15 - pxor 16(%rax),%xmm0 - pxor 32(%rax),%xmm1 - pxor 48(%rax),%xmm2 -.byte 102,68,15,56,0,255 -.byte 102,15,56,0,199 - pxor 64(%rax),%xmm3 - pxor 80(%rax),%xmm4 -.byte 102,15,56,0,207 -.byte 102,15,56,0,215 - pxor 96(%rax),%xmm5 - pxor 112(%rax),%xmm6 -.byte 102,15,56,0,223 -.byte 102,15,56,0,231 -.byte 102,15,56,0,239 -.byte 102,15,56,0,247 - leaq 128(%rax),%rax -.Ldec_sbox: - pxor %xmm3,%xmm2 - - pxor %xmm6,%xmm3 - pxor %xmm6,%xmm1 - pxor %xmm3,%xmm5 - pxor %xmm5,%xmm6 - pxor %xmm6,%xmm0 - - pxor %xmm0,%xmm15 - pxor %xmm4,%xmm1 - pxor %xmm15,%xmm2 - pxor %xmm15,%xmm4 - pxor %xmm2,%xmm0 - movdqa %xmm2,%xmm10 - movdqa %xmm6,%xmm9 - movdqa %xmm0,%xmm8 - movdqa %xmm3,%xmm12 - movdqa %xmm4,%xmm11 - - pxor %xmm15,%xmm10 - pxor %xmm3,%xmm9 - pxor %xmm5,%xmm8 - movdqa %xmm10,%xmm13 - pxor %xmm15,%xmm12 - movdqa %xmm9,%xmm7 - pxor %xmm1,%xmm11 - movdqa %xmm10,%xmm14 - - por %xmm8,%xmm9 - por %xmm11,%xmm10 - pxor %xmm7,%xmm14 - pand %xmm11,%xmm13 - pxor %xmm8,%xmm11 - pand %xmm8,%xmm7 - pand %xmm11,%xmm14 - movdqa %xmm5,%xmm11 - pxor %xmm1,%xmm11 - pand %xmm11,%xmm12 - pxor %xmm12,%xmm10 - pxor %xmm12,%xmm9 - movdqa %xmm2,%xmm12 - movdqa %xmm0,%xmm11 - pxor %xmm6,%xmm12 - pxor %xmm4,%xmm11 - movdqa %xmm12,%xmm8 - pand %xmm11,%xmm12 - por %xmm11,%xmm8 - pxor %xmm12,%xmm7 - pxor %xmm14,%xmm10 - pxor %xmm13,%xmm9 - pxor %xmm14,%xmm8 - movdqa %xmm3,%xmm11 - pxor %xmm13,%xmm7 - movdqa %xmm15,%xmm12 - pxor %xmm13,%xmm8 - movdqa %xmm6,%xmm13 - pand %xmm5,%xmm11 - movdqa %xmm2,%xmm14 - pand %xmm1,%xmm12 - pand %xmm0,%xmm13 - por %xmm4,%xmm14 - pxor %xmm11,%xmm10 - pxor %xmm12,%xmm9 - pxor %xmm13,%xmm8 - pxor %xmm14,%xmm7 - - - - - - movdqa %xmm10,%xmm11 - pand %xmm8,%xmm10 - pxor %xmm9,%xmm11 - - movdqa %xmm7,%xmm13 - movdqa %xmm11,%xmm14 - pxor %xmm10,%xmm13 - pand %xmm13,%xmm14 - - movdqa %xmm8,%xmm12 - pxor %xmm9,%xmm14 - pxor %xmm7,%xmm12 - - pxor %xmm9,%xmm10 - - pand %xmm10,%xmm12 - - movdqa %xmm13,%xmm9 - pxor %xmm7,%xmm12 - - pxor %xmm12,%xmm9 - pxor %xmm12,%xmm8 - - pand %xmm7,%xmm9 - - pxor %xmm9,%xmm13 - pxor %xmm9,%xmm8 - - pand %xmm14,%xmm13 - - pxor %xmm11,%xmm13 - movdqa %xmm4,%xmm11 - movdqa %xmm0,%xmm7 - movdqa %xmm14,%xmm9 - pxor %xmm13,%xmm9 - pand %xmm4,%xmm9 - pxor %xmm0,%xmm4 - pand %xmm14,%xmm0 - pand %xmm13,%xmm4 - pxor %xmm0,%xmm4 - pxor %xmm9,%xmm0 - pxor %xmm1,%xmm11 - pxor %xmm5,%xmm7 - pxor %xmm12,%xmm14 - pxor %xmm8,%xmm13 - movdqa %xmm14,%xmm10 - movdqa %xmm12,%xmm9 - pxor %xmm13,%xmm10 - pxor %xmm8,%xmm9 - pand %xmm11,%xmm10 - pand %xmm1,%xmm9 - pxor %xmm7,%xmm11 - pxor %xmm5,%xmm1 - pand %xmm14,%xmm7 - pand %xmm12,%xmm5 - pand %xmm13,%xmm11 - pand %xmm8,%xmm1 - pxor %xmm11,%xmm7 - pxor %xmm5,%xmm1 - pxor %xmm10,%xmm11 - pxor %xmm9,%xmm5 - pxor %xmm11,%xmm4 - pxor %xmm11,%xmm1 - pxor %xmm7,%xmm0 - pxor %xmm7,%xmm5 - - movdqa %xmm2,%xmm11 - movdqa %xmm6,%xmm7 - pxor %xmm15,%xmm11 - pxor %xmm3,%xmm7 - movdqa %xmm14,%xmm10 - movdqa %xmm12,%xmm9 - pxor %xmm13,%xmm10 - pxor %xmm8,%xmm9 - pand %xmm11,%xmm10 - pand %xmm15,%xmm9 - pxor %xmm7,%xmm11 - pxor %xmm3,%xmm15 - pand %xmm14,%xmm7 - pand %xmm12,%xmm3 - pand %xmm13,%xmm11 - pand %xmm8,%xmm15 - pxor %xmm11,%xmm7 - pxor %xmm3,%xmm15 - pxor %xmm10,%xmm11 - pxor %xmm9,%xmm3 - pxor %xmm12,%xmm14 - pxor %xmm8,%xmm13 - movdqa %xmm14,%xmm10 - pxor %xmm13,%xmm10 - pand %xmm2,%xmm10 - pxor %xmm6,%xmm2 - pand %xmm14,%xmm6 - pand %xmm13,%xmm2 - pxor %xmm6,%xmm2 - pxor %xmm10,%xmm6 - pxor %xmm11,%xmm2 - pxor %xmm11,%xmm15 - pxor %xmm7,%xmm6 - pxor %xmm7,%xmm3 - pxor %xmm6,%xmm0 - pxor %xmm4,%xmm5 - - pxor %xmm0,%xmm3 - pxor %xmm6,%xmm1 - pxor %xmm6,%xmm4 - pxor %xmm1,%xmm3 - pxor %xmm15,%xmm6 - pxor %xmm4,%xmm3 - pxor %xmm5,%xmm2 - pxor %xmm0,%xmm5 - pxor %xmm3,%xmm2 - - pxor %xmm15,%xmm3 - pxor %xmm2,%xmm6 - decl %r10d - jl .Ldec_done - - pshufd $0x4E,%xmm15,%xmm7 - pshufd $0x4E,%xmm2,%xmm13 - pxor %xmm15,%xmm7 - pshufd $0x4E,%xmm4,%xmm14 - pxor %xmm2,%xmm13 - pshufd $0x4E,%xmm0,%xmm8 - pxor %xmm4,%xmm14 - pshufd $0x4E,%xmm5,%xmm9 - pxor %xmm0,%xmm8 - pshufd $0x4E,%xmm3,%xmm10 - pxor %xmm5,%xmm9 - pxor %xmm13,%xmm15 - pxor %xmm13,%xmm0 - pshufd $0x4E,%xmm1,%xmm11 - pxor %xmm3,%xmm10 - pxor %xmm7,%xmm5 - pxor %xmm8,%xmm3 - pshufd $0x4E,%xmm6,%xmm12 - pxor %xmm1,%xmm11 - pxor %xmm14,%xmm0 - pxor %xmm9,%xmm1 - pxor %xmm6,%xmm12 - - pxor %xmm14,%xmm5 - pxor %xmm13,%xmm3 - pxor %xmm13,%xmm1 - pxor %xmm10,%xmm6 - pxor %xmm11,%xmm2 - pxor %xmm14,%xmm1 - pxor %xmm14,%xmm6 - pxor %xmm12,%xmm4 - pshufd $0x93,%xmm15,%xmm7 - pshufd $0x93,%xmm0,%xmm8 - pxor %xmm7,%xmm15 - pshufd $0x93,%xmm5,%xmm9 - pxor %xmm8,%xmm0 - pshufd $0x93,%xmm3,%xmm10 - pxor %xmm9,%xmm5 - pshufd $0x93,%xmm1,%xmm11 - pxor %xmm10,%xmm3 - pshufd $0x93,%xmm6,%xmm12 - pxor %xmm11,%xmm1 - pshufd $0x93,%xmm2,%xmm13 - pxor %xmm12,%xmm6 - pshufd $0x93,%xmm4,%xmm14 - pxor %xmm13,%xmm2 - pxor %xmm14,%xmm4 - - pxor %xmm15,%xmm8 - pxor %xmm4,%xmm7 - pxor %xmm4,%xmm8 - pshufd $0x4E,%xmm15,%xmm15 - pxor %xmm0,%xmm9 - pshufd $0x4E,%xmm0,%xmm0 - pxor %xmm1,%xmm12 - pxor %xmm7,%xmm15 - pxor %xmm6,%xmm13 - pxor %xmm8,%xmm0 - pxor %xmm3,%xmm11 - pshufd $0x4E,%xmm1,%xmm7 - pxor %xmm2,%xmm14 - pshufd $0x4E,%xmm6,%xmm8 - pxor %xmm5,%xmm10 - pshufd $0x4E,%xmm3,%xmm1 - pxor %xmm4,%xmm10 - pshufd $0x4E,%xmm4,%xmm6 - pxor %xmm4,%xmm11 - pshufd $0x4E,%xmm2,%xmm3 - pxor %xmm11,%xmm7 - pshufd $0x4E,%xmm5,%xmm2 - pxor %xmm12,%xmm8 - pxor %xmm1,%xmm10 - pxor %xmm14,%xmm6 - pxor %xmm3,%xmm13 - movdqa %xmm7,%xmm3 - pxor %xmm9,%xmm2 - movdqa %xmm13,%xmm5 - movdqa %xmm8,%xmm4 - movdqa %xmm2,%xmm1 - movdqa %xmm10,%xmm2 - movdqa -16(%r11),%xmm7 - jnz .Ldec_loop - movdqa -32(%r11),%xmm7 - jmp .Ldec_loop -.align 16 -.Ldec_done: - movdqa 0(%r11),%xmm7 - movdqa 16(%r11),%xmm8 - movdqa %xmm2,%xmm9 - psrlq $1,%xmm2 - movdqa %xmm1,%xmm10 - psrlq $1,%xmm1 - pxor %xmm4,%xmm2 - pxor %xmm6,%xmm1 - pand %xmm7,%xmm2 - pand %xmm7,%xmm1 - pxor %xmm2,%xmm4 - psllq $1,%xmm2 - pxor %xmm1,%xmm6 - psllq $1,%xmm1 - pxor %xmm9,%xmm2 - pxor %xmm10,%xmm1 - movdqa %xmm5,%xmm9 - psrlq $1,%xmm5 - movdqa %xmm15,%xmm10 - psrlq $1,%xmm15 - pxor %xmm3,%xmm5 - pxor %xmm0,%xmm15 - pand %xmm7,%xmm5 - pand %xmm7,%xmm15 - pxor %xmm5,%xmm3 - psllq $1,%xmm5 - pxor %xmm15,%xmm0 - psllq $1,%xmm15 - pxor %xmm9,%xmm5 - pxor %xmm10,%xmm15 - movdqa 32(%r11),%xmm7 - movdqa %xmm6,%xmm9 - psrlq $2,%xmm6 - movdqa %xmm1,%xmm10 - psrlq $2,%xmm1 - pxor %xmm4,%xmm6 - pxor %xmm2,%xmm1 - pand %xmm8,%xmm6 - pand %xmm8,%xmm1 - pxor %xmm6,%xmm4 - psllq $2,%xmm6 - pxor %xmm1,%xmm2 - psllq $2,%xmm1 - pxor %xmm9,%xmm6 - pxor %xmm10,%xmm1 - movdqa %xmm0,%xmm9 - psrlq $2,%xmm0 - movdqa %xmm15,%xmm10 - psrlq $2,%xmm15 - pxor %xmm3,%xmm0 - pxor %xmm5,%xmm15 - pand %xmm8,%xmm0 - pand %xmm8,%xmm15 - pxor %xmm0,%xmm3 - psllq $2,%xmm0 - pxor %xmm15,%xmm5 - psllq $2,%xmm15 - pxor %xmm9,%xmm0 - pxor %xmm10,%xmm15 - movdqa %xmm3,%xmm9 - psrlq $4,%xmm3 - movdqa %xmm5,%xmm10 - psrlq $4,%xmm5 - pxor %xmm4,%xmm3 - pxor %xmm2,%xmm5 - pand %xmm7,%xmm3 - pand %xmm7,%xmm5 - pxor %xmm3,%xmm4 - psllq $4,%xmm3 - pxor %xmm5,%xmm2 - psllq $4,%xmm5 - pxor %xmm9,%xmm3 - pxor %xmm10,%xmm5 - movdqa %xmm0,%xmm9 - psrlq $4,%xmm0 - movdqa %xmm15,%xmm10 - psrlq $4,%xmm15 - pxor %xmm6,%xmm0 - pxor %xmm1,%xmm15 - pand %xmm7,%xmm0 - pand %xmm7,%xmm15 - pxor %xmm0,%xmm6 - psllq $4,%xmm0 - pxor %xmm15,%xmm1 - psllq $4,%xmm15 - pxor %xmm9,%xmm0 - pxor %xmm10,%xmm15 - movdqa (%rax),%xmm7 - pxor %xmm7,%xmm5 - pxor %xmm7,%xmm3 - pxor %xmm7,%xmm1 - pxor %xmm7,%xmm6 - pxor %xmm7,%xmm2 - pxor %xmm7,%xmm4 - pxor %xmm7,%xmm15 - pxor %xmm7,%xmm0 - .byte 0xf3,0xc3 -.cfi_endproc -.size _bsaes_decrypt8,.-_bsaes_decrypt8 -.type _bsaes_key_convert,@function -.align 16 -_bsaes_key_convert: -.cfi_startproc - leaq .Lmasks(%rip),%r11 - movdqu (%rcx),%xmm7 - leaq 16(%rcx),%rcx - movdqa 0(%r11),%xmm0 - movdqa 16(%r11),%xmm1 - movdqa 32(%r11),%xmm2 - movdqa 48(%r11),%xmm3 - movdqa 64(%r11),%xmm4 - pcmpeqd %xmm5,%xmm5 - - movdqu (%rcx),%xmm6 - movdqa %xmm7,(%rax) - leaq 16(%rax),%rax - decl %r10d - jmp .Lkey_loop -.align 16 -.Lkey_loop: -.byte 102,15,56,0,244 - - movdqa %xmm0,%xmm8 - movdqa %xmm1,%xmm9 - - pand %xmm6,%xmm8 - pand %xmm6,%xmm9 - movdqa %xmm2,%xmm10 - pcmpeqb %xmm0,%xmm8 - psllq $4,%xmm0 - movdqa %xmm3,%xmm11 - pcmpeqb %xmm1,%xmm9 - psllq $4,%xmm1 - - pand %xmm6,%xmm10 - pand %xmm6,%xmm11 - movdqa %xmm0,%xmm12 - pcmpeqb %xmm2,%xmm10 - psllq $4,%xmm2 - movdqa %xmm1,%xmm13 - pcmpeqb %xmm3,%xmm11 - psllq $4,%xmm3 - - movdqa %xmm2,%xmm14 - movdqa %xmm3,%xmm15 - pxor %xmm5,%xmm8 - pxor %xmm5,%xmm9 - - pand %xmm6,%xmm12 - pand %xmm6,%xmm13 - movdqa %xmm8,0(%rax) - pcmpeqb %xmm0,%xmm12 - psrlq $4,%xmm0 - movdqa %xmm9,16(%rax) - pcmpeqb %xmm1,%xmm13 - psrlq $4,%xmm1 - leaq 16(%rcx),%rcx - - pand %xmm6,%xmm14 - pand %xmm6,%xmm15 - movdqa %xmm10,32(%rax) - pcmpeqb %xmm2,%xmm14 - psrlq $4,%xmm2 - movdqa %xmm11,48(%rax) - pcmpeqb %xmm3,%xmm15 - psrlq $4,%xmm3 - movdqu (%rcx),%xmm6 - - pxor %xmm5,%xmm13 - pxor %xmm5,%xmm14 - movdqa %xmm12,64(%rax) - movdqa %xmm13,80(%rax) - movdqa %xmm14,96(%rax) - movdqa %xmm15,112(%rax) - leaq 128(%rax),%rax - decl %r10d - jnz .Lkey_loop - - movdqa 80(%r11),%xmm7 - - .byte 0xf3,0xc3 -.cfi_endproc -.size _bsaes_key_convert,.-_bsaes_key_convert - -.globl bsaes_cbc_encrypt -.type bsaes_cbc_encrypt,@function -.align 16 -bsaes_cbc_encrypt: -.cfi_startproc - cmpl $0,%r9d - jne asm_AES_cbc_encrypt - cmpq $128,%rdx - jb asm_AES_cbc_encrypt - - movq %rsp,%rax -.Lcbc_dec_prologue: - pushq %rbp -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbp,-16 - pushq %rbx -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbx,-24 - pushq %r12 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r12,-32 - pushq %r13 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r13,-40 - pushq %r14 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r14,-48 - pushq %r15 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r15,-56 - leaq -72(%rsp),%rsp -.cfi_adjust_cfa_offset 0x48 - movq %rsp,%rbp -.cfi_def_cfa_register %rbp - movl 240(%rcx),%eax - movq %rdi,%r12 - movq %rsi,%r13 - movq %rdx,%r14 - movq %rcx,%r15 - movq %r8,%rbx - shrq $4,%r14 - - movl %eax,%edx - shlq $7,%rax - subq $96,%rax - subq %rax,%rsp - - movq %rsp,%rax - movq %r15,%rcx - movl %edx,%r10d - call _bsaes_key_convert - pxor (%rsp),%xmm7 - movdqa %xmm6,(%rax) - movdqa %xmm7,(%rsp) - - movdqu (%rbx),%xmm14 - subq $8,%r14 -.Lcbc_dec_loop: - movdqu 0(%r12),%xmm15 - movdqu 16(%r12),%xmm0 - movdqu 32(%r12),%xmm1 - movdqu 48(%r12),%xmm2 - movdqu 64(%r12),%xmm3 - movdqu 80(%r12),%xmm4 - movq %rsp,%rax - movdqu 96(%r12),%xmm5 - movl %edx,%r10d - movdqu 112(%r12),%xmm6 - movdqa %xmm14,32(%rbp) - - call _bsaes_decrypt8 - - pxor 32(%rbp),%xmm15 - movdqu 0(%r12),%xmm7 - movdqu 16(%r12),%xmm8 - pxor %xmm7,%xmm0 - movdqu 32(%r12),%xmm9 - pxor %xmm8,%xmm5 - movdqu 48(%r12),%xmm10 - pxor %xmm9,%xmm3 - movdqu 64(%r12),%xmm11 - pxor %xmm10,%xmm1 - movdqu 80(%r12),%xmm12 - pxor %xmm11,%xmm6 - movdqu 96(%r12),%xmm13 - pxor %xmm12,%xmm2 - movdqu 112(%r12),%xmm14 - pxor %xmm13,%xmm4 - movdqu %xmm15,0(%r13) - leaq 128(%r12),%r12 - movdqu %xmm0,16(%r13) - movdqu %xmm5,32(%r13) - movdqu %xmm3,48(%r13) - movdqu %xmm1,64(%r13) - movdqu %xmm6,80(%r13) - movdqu %xmm2,96(%r13) - movdqu %xmm4,112(%r13) - leaq 128(%r13),%r13 - subq $8,%r14 - jnc .Lcbc_dec_loop - - addq $8,%r14 - jz .Lcbc_dec_done - - movdqu 0(%r12),%xmm15 - movq %rsp,%rax - movl %edx,%r10d - cmpq $2,%r14 - jb .Lcbc_dec_one - movdqu 16(%r12),%xmm0 - je .Lcbc_dec_two - movdqu 32(%r12),%xmm1 - cmpq $4,%r14 - jb .Lcbc_dec_three - movdqu 48(%r12),%xmm2 - je .Lcbc_dec_four - movdqu 64(%r12),%xmm3 - cmpq $6,%r14 - jb .Lcbc_dec_five - movdqu 80(%r12),%xmm4 - je .Lcbc_dec_six - movdqu 96(%r12),%xmm5 - movdqa %xmm14,32(%rbp) - call _bsaes_decrypt8 - pxor 32(%rbp),%xmm15 - movdqu 0(%r12),%xmm7 - movdqu 16(%r12),%xmm8 - pxor %xmm7,%xmm0 - movdqu 32(%r12),%xmm9 - pxor %xmm8,%xmm5 - movdqu 48(%r12),%xmm10 - pxor %xmm9,%xmm3 - movdqu 64(%r12),%xmm11 - pxor %xmm10,%xmm1 - movdqu 80(%r12),%xmm12 - pxor %xmm11,%xmm6 - movdqu 96(%r12),%xmm14 - pxor %xmm12,%xmm2 - movdqu %xmm15,0(%r13) - movdqu %xmm0,16(%r13) - movdqu %xmm5,32(%r13) - movdqu %xmm3,48(%r13) - movdqu %xmm1,64(%r13) - movdqu %xmm6,80(%r13) - movdqu %xmm2,96(%r13) - jmp .Lcbc_dec_done -.align 16 -.Lcbc_dec_six: - movdqa %xmm14,32(%rbp) - call _bsaes_decrypt8 - pxor 32(%rbp),%xmm15 - movdqu 0(%r12),%xmm7 - movdqu 16(%r12),%xmm8 - pxor %xmm7,%xmm0 - movdqu 32(%r12),%xmm9 - pxor %xmm8,%xmm5 - movdqu 48(%r12),%xmm10 - pxor %xmm9,%xmm3 - movdqu 64(%r12),%xmm11 - pxor %xmm10,%xmm1 - movdqu 80(%r12),%xmm14 - pxor %xmm11,%xmm6 - movdqu %xmm15,0(%r13) - movdqu %xmm0,16(%r13) - movdqu %xmm5,32(%r13) - movdqu %xmm3,48(%r13) - movdqu %xmm1,64(%r13) - movdqu %xmm6,80(%r13) - jmp .Lcbc_dec_done -.align 16 -.Lcbc_dec_five: - movdqa %xmm14,32(%rbp) - call _bsaes_decrypt8 - pxor 32(%rbp),%xmm15 - movdqu 0(%r12),%xmm7 - movdqu 16(%r12),%xmm8 - pxor %xmm7,%xmm0 - movdqu 32(%r12),%xmm9 - pxor %xmm8,%xmm5 - movdqu 48(%r12),%xmm10 - pxor %xmm9,%xmm3 - movdqu 64(%r12),%xmm14 - pxor %xmm10,%xmm1 - movdqu %xmm15,0(%r13) - movdqu %xmm0,16(%r13) - movdqu %xmm5,32(%r13) - movdqu %xmm3,48(%r13) - movdqu %xmm1,64(%r13) - jmp .Lcbc_dec_done -.align 16 -.Lcbc_dec_four: - movdqa %xmm14,32(%rbp) - call _bsaes_decrypt8 - pxor 32(%rbp),%xmm15 - movdqu 0(%r12),%xmm7 - movdqu 16(%r12),%xmm8 - pxor %xmm7,%xmm0 - movdqu 32(%r12),%xmm9 - pxor %xmm8,%xmm5 - movdqu 48(%r12),%xmm14 - pxor %xmm9,%xmm3 - movdqu %xmm15,0(%r13) - movdqu %xmm0,16(%r13) - movdqu %xmm5,32(%r13) - movdqu %xmm3,48(%r13) - jmp .Lcbc_dec_done -.align 16 -.Lcbc_dec_three: - movdqa %xmm14,32(%rbp) - call _bsaes_decrypt8 - pxor 32(%rbp),%xmm15 - movdqu 0(%r12),%xmm7 - movdqu 16(%r12),%xmm8 - pxor %xmm7,%xmm0 - movdqu 32(%r12),%xmm14 - pxor %xmm8,%xmm5 - movdqu %xmm15,0(%r13) - movdqu %xmm0,16(%r13) - movdqu %xmm5,32(%r13) - jmp .Lcbc_dec_done -.align 16 -.Lcbc_dec_two: - movdqa %xmm14,32(%rbp) - call _bsaes_decrypt8 - pxor 32(%rbp),%xmm15 - movdqu 0(%r12),%xmm7 - movdqu 16(%r12),%xmm14 - pxor %xmm7,%xmm0 - movdqu %xmm15,0(%r13) - movdqu %xmm0,16(%r13) - jmp .Lcbc_dec_done -.align 16 -.Lcbc_dec_one: - leaq (%r12),%rdi - leaq 32(%rbp),%rsi - leaq (%r15),%rdx - call asm_AES_decrypt - pxor 32(%rbp),%xmm14 - movdqu %xmm14,(%r13) - movdqa %xmm15,%xmm14 - -.Lcbc_dec_done: - movdqu %xmm14,(%rbx) - leaq (%rsp),%rax - pxor %xmm0,%xmm0 -.Lcbc_dec_bzero: - movdqa %xmm0,0(%rax) - movdqa %xmm0,16(%rax) - leaq 32(%rax),%rax - cmpq %rax,%rbp - ja .Lcbc_dec_bzero - - leaq 120(%rbp),%rax -.cfi_def_cfa %rax,8 - movq -48(%rax),%r15 -.cfi_restore %r15 - movq -40(%rax),%r14 -.cfi_restore %r14 - movq -32(%rax),%r13 -.cfi_restore %r13 - movq -24(%rax),%r12 -.cfi_restore %r12 - movq -16(%rax),%rbx -.cfi_restore %rbx - movq -8(%rax),%rbp -.cfi_restore %rbp - leaq (%rax),%rsp -.cfi_def_cfa_register %rsp -.Lcbc_dec_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size bsaes_cbc_encrypt,.-bsaes_cbc_encrypt - -.globl bsaes_ctr32_encrypt_blocks -.type bsaes_ctr32_encrypt_blocks,@function -.align 16 -bsaes_ctr32_encrypt_blocks: -.cfi_startproc - movq %rsp,%rax -.Lctr_enc_prologue: - pushq %rbp -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbp,-16 - pushq %rbx -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbx,-24 - pushq %r12 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r12,-32 - pushq %r13 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r13,-40 - pushq %r14 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r14,-48 - pushq %r15 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r15,-56 - leaq -72(%rsp),%rsp -.cfi_adjust_cfa_offset 0x48 - movq %rsp,%rbp -.cfi_def_cfa_register %rbp - movdqu (%r8),%xmm0 - movl 240(%rcx),%eax - movq %rdi,%r12 - movq %rsi,%r13 - movq %rdx,%r14 - movq %rcx,%r15 - movdqa %xmm0,32(%rbp) - cmpq $8,%rdx - jb .Lctr_enc_short - - movl %eax,%ebx - shlq $7,%rax - subq $96,%rax - subq %rax,%rsp - - movq %rsp,%rax - movq %r15,%rcx - movl %ebx,%r10d - call _bsaes_key_convert - pxor %xmm6,%xmm7 - movdqa %xmm7,(%rax) - - movdqa (%rsp),%xmm8 - leaq .LADD1(%rip),%r11 - movdqa 32(%rbp),%xmm15 - movdqa -32(%r11),%xmm7 -.byte 102,68,15,56,0,199 -.byte 102,68,15,56,0,255 - movdqa %xmm8,(%rsp) - jmp .Lctr_enc_loop -.align 16 -.Lctr_enc_loop: - movdqa %xmm15,32(%rbp) - movdqa %xmm15,%xmm0 - movdqa %xmm15,%xmm1 - paddd 0(%r11),%xmm0 - movdqa %xmm15,%xmm2 - paddd 16(%r11),%xmm1 - movdqa %xmm15,%xmm3 - paddd 32(%r11),%xmm2 - movdqa %xmm15,%xmm4 - paddd 48(%r11),%xmm3 - movdqa %xmm15,%xmm5 - paddd 64(%r11),%xmm4 - movdqa %xmm15,%xmm6 - paddd 80(%r11),%xmm5 - paddd 96(%r11),%xmm6 - - - - movdqa (%rsp),%xmm8 - leaq 16(%rsp),%rax - movdqa -16(%r11),%xmm7 - pxor %xmm8,%xmm15 - pxor %xmm8,%xmm0 - pxor %xmm8,%xmm1 - pxor %xmm8,%xmm2 -.byte 102,68,15,56,0,255 -.byte 102,15,56,0,199 - pxor %xmm8,%xmm3 - pxor %xmm8,%xmm4 -.byte 102,15,56,0,207 -.byte 102,15,56,0,215 - pxor %xmm8,%xmm5 - pxor %xmm8,%xmm6 -.byte 102,15,56,0,223 -.byte 102,15,56,0,231 -.byte 102,15,56,0,239 -.byte 102,15,56,0,247 - leaq .LBS0(%rip),%r11 - movl %ebx,%r10d - - call _bsaes_encrypt8_bitslice - - subq $8,%r14 - jc .Lctr_enc_loop_done - - movdqu 0(%r12),%xmm7 - movdqu 16(%r12),%xmm8 - movdqu 32(%r12),%xmm9 - movdqu 48(%r12),%xmm10 - movdqu 64(%r12),%xmm11 - movdqu 80(%r12),%xmm12 - movdqu 96(%r12),%xmm13 - movdqu 112(%r12),%xmm14 - leaq 128(%r12),%r12 - pxor %xmm15,%xmm7 - movdqa 32(%rbp),%xmm15 - pxor %xmm8,%xmm0 - movdqu %xmm7,0(%r13) - pxor %xmm9,%xmm3 - movdqu %xmm0,16(%r13) - pxor %xmm10,%xmm5 - movdqu %xmm3,32(%r13) - pxor %xmm11,%xmm2 - movdqu %xmm5,48(%r13) - pxor %xmm12,%xmm6 - movdqu %xmm2,64(%r13) - pxor %xmm13,%xmm1 - movdqu %xmm6,80(%r13) - pxor %xmm14,%xmm4 - movdqu %xmm1,96(%r13) - leaq .LADD1(%rip),%r11 - movdqu %xmm4,112(%r13) - leaq 128(%r13),%r13 - paddd 112(%r11),%xmm15 - jnz .Lctr_enc_loop - - jmp .Lctr_enc_done -.align 16 -.Lctr_enc_loop_done: - addq $8,%r14 - movdqu 0(%r12),%xmm7 - pxor %xmm7,%xmm15 - movdqu %xmm15,0(%r13) - cmpq $2,%r14 - jb .Lctr_enc_done - movdqu 16(%r12),%xmm8 - pxor %xmm8,%xmm0 - movdqu %xmm0,16(%r13) - je .Lctr_enc_done - movdqu 32(%r12),%xmm9 - pxor %xmm9,%xmm3 - movdqu %xmm3,32(%r13) - cmpq $4,%r14 - jb .Lctr_enc_done - movdqu 48(%r12),%xmm10 - pxor %xmm10,%xmm5 - movdqu %xmm5,48(%r13) - je .Lctr_enc_done - movdqu 64(%r12),%xmm11 - pxor %xmm11,%xmm2 - movdqu %xmm2,64(%r13) - cmpq $6,%r14 - jb .Lctr_enc_done - movdqu 80(%r12),%xmm12 - pxor %xmm12,%xmm6 - movdqu %xmm6,80(%r13) - je .Lctr_enc_done - movdqu 96(%r12),%xmm13 - pxor %xmm13,%xmm1 - movdqu %xmm1,96(%r13) - jmp .Lctr_enc_done - -.align 16 -.Lctr_enc_short: - leaq 32(%rbp),%rdi - leaq 48(%rbp),%rsi - leaq (%r15),%rdx - call asm_AES_encrypt - movdqu (%r12),%xmm0 - leaq 16(%r12),%r12 - movl 44(%rbp),%eax - bswapl %eax - pxor 48(%rbp),%xmm0 - incl %eax - movdqu %xmm0,(%r13) - bswapl %eax - leaq 16(%r13),%r13 - movl %eax,44(%rsp) - decq %r14 - jnz .Lctr_enc_short - -.Lctr_enc_done: - leaq (%rsp),%rax - pxor %xmm0,%xmm0 -.Lctr_enc_bzero: - movdqa %xmm0,0(%rax) - movdqa %xmm0,16(%rax) - leaq 32(%rax),%rax - cmpq %rax,%rbp - ja .Lctr_enc_bzero - - leaq 120(%rbp),%rax -.cfi_def_cfa %rax,8 - movq -48(%rax),%r15 -.cfi_restore %r15 - movq -40(%rax),%r14 -.cfi_restore %r14 - movq -32(%rax),%r13 -.cfi_restore %r13 - movq -24(%rax),%r12 -.cfi_restore %r12 - movq -16(%rax),%rbx -.cfi_restore %rbx - movq -8(%rax),%rbp -.cfi_restore %rbp - leaq (%rax),%rsp -.cfi_def_cfa_register %rsp -.Lctr_enc_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size bsaes_ctr32_encrypt_blocks,.-bsaes_ctr32_encrypt_blocks -.globl bsaes_xts_encrypt -.type bsaes_xts_encrypt,@function -.align 16 -bsaes_xts_encrypt: -.cfi_startproc - movq %rsp,%rax -.Lxts_enc_prologue: - pushq %rbp -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbp,-16 - pushq %rbx -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbx,-24 - pushq %r12 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r12,-32 - pushq %r13 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r13,-40 - pushq %r14 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r14,-48 - pushq %r15 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r15,-56 - leaq -72(%rsp),%rsp -.cfi_adjust_cfa_offset 0x48 - movq %rsp,%rbp -.cfi_def_cfa_register %rbp - movq %rdi,%r12 - movq %rsi,%r13 - movq %rdx,%r14 - movq %rcx,%r15 - - leaq (%r9),%rdi - leaq 32(%rbp),%rsi - leaq (%r8),%rdx - call asm_AES_encrypt - - movl 240(%r15),%eax - movq %r14,%rbx - - movl %eax,%edx - shlq $7,%rax - subq $96,%rax - subq %rax,%rsp - - movq %rsp,%rax - movq %r15,%rcx - movl %edx,%r10d - call _bsaes_key_convert - pxor %xmm6,%xmm7 - movdqa %xmm7,(%rax) - - andq $-16,%r14 - subq $0x80,%rsp - movdqa 32(%rbp),%xmm6 - - pxor %xmm14,%xmm14 - movdqa .Lxts_magic(%rip),%xmm12 - pcmpgtd %xmm6,%xmm14 - - subq $0x80,%r14 - jc .Lxts_enc_short - jmp .Lxts_enc_loop - -.align 16 -.Lxts_enc_loop: - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm15 - movdqa %xmm6,0(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm0 - movdqa %xmm6,16(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 0(%r12),%xmm7 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm1 - movdqa %xmm6,32(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 16(%r12),%xmm8 - pxor %xmm7,%xmm15 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm2 - movdqa %xmm6,48(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 32(%r12),%xmm9 - pxor %xmm8,%xmm0 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm3 - movdqa %xmm6,64(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 48(%r12),%xmm10 - pxor %xmm9,%xmm1 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm4 - movdqa %xmm6,80(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 64(%r12),%xmm11 - pxor %xmm10,%xmm2 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm5 - movdqa %xmm6,96(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 80(%r12),%xmm12 - pxor %xmm11,%xmm3 - movdqu 96(%r12),%xmm13 - pxor %xmm12,%xmm4 - movdqu 112(%r12),%xmm14 - leaq 128(%r12),%r12 - movdqa %xmm6,112(%rsp) - pxor %xmm13,%xmm5 - leaq 128(%rsp),%rax - pxor %xmm14,%xmm6 - movl %edx,%r10d - - call _bsaes_encrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm3 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm5 - movdqu %xmm3,32(%r13) - pxor 64(%rsp),%xmm2 - movdqu %xmm5,48(%r13) - pxor 80(%rsp),%xmm6 - movdqu %xmm2,64(%r13) - pxor 96(%rsp),%xmm1 - movdqu %xmm6,80(%r13) - pxor 112(%rsp),%xmm4 - movdqu %xmm1,96(%r13) - movdqu %xmm4,112(%r13) - leaq 128(%r13),%r13 - - movdqa 112(%rsp),%xmm6 - pxor %xmm14,%xmm14 - movdqa .Lxts_magic(%rip),%xmm12 - pcmpgtd %xmm6,%xmm14 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - - subq $0x80,%r14 - jnc .Lxts_enc_loop - -.Lxts_enc_short: - addq $0x80,%r14 - jz .Lxts_enc_done - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm15 - movdqa %xmm6,0(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm0 - movdqa %xmm6,16(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 0(%r12),%xmm7 - cmpq $16,%r14 - je .Lxts_enc_1 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm1 - movdqa %xmm6,32(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 16(%r12),%xmm8 - cmpq $32,%r14 - je .Lxts_enc_2 - pxor %xmm7,%xmm15 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm2 - movdqa %xmm6,48(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 32(%r12),%xmm9 - cmpq $48,%r14 - je .Lxts_enc_3 - pxor %xmm8,%xmm0 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm3 - movdqa %xmm6,64(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 48(%r12),%xmm10 - cmpq $64,%r14 - je .Lxts_enc_4 - pxor %xmm9,%xmm1 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm4 - movdqa %xmm6,80(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 64(%r12),%xmm11 - cmpq $80,%r14 - je .Lxts_enc_5 - pxor %xmm10,%xmm2 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm5 - movdqa %xmm6,96(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 80(%r12),%xmm12 - cmpq $96,%r14 - je .Lxts_enc_6 - pxor %xmm11,%xmm3 - movdqu 96(%r12),%xmm13 - pxor %xmm12,%xmm4 - movdqa %xmm6,112(%rsp) - leaq 112(%r12),%r12 - pxor %xmm13,%xmm5 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_encrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm3 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm5 - movdqu %xmm3,32(%r13) - pxor 64(%rsp),%xmm2 - movdqu %xmm5,48(%r13) - pxor 80(%rsp),%xmm6 - movdqu %xmm2,64(%r13) - pxor 96(%rsp),%xmm1 - movdqu %xmm6,80(%r13) - movdqu %xmm1,96(%r13) - leaq 112(%r13),%r13 - - movdqa 112(%rsp),%xmm6 - jmp .Lxts_enc_done -.align 16 -.Lxts_enc_6: - pxor %xmm11,%xmm3 - leaq 96(%r12),%r12 - pxor %xmm12,%xmm4 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_encrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm3 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm5 - movdqu %xmm3,32(%r13) - pxor 64(%rsp),%xmm2 - movdqu %xmm5,48(%r13) - pxor 80(%rsp),%xmm6 - movdqu %xmm2,64(%r13) - movdqu %xmm6,80(%r13) - leaq 96(%r13),%r13 - - movdqa 96(%rsp),%xmm6 - jmp .Lxts_enc_done -.align 16 -.Lxts_enc_5: - pxor %xmm10,%xmm2 - leaq 80(%r12),%r12 - pxor %xmm11,%xmm3 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_encrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm3 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm5 - movdqu %xmm3,32(%r13) - pxor 64(%rsp),%xmm2 - movdqu %xmm5,48(%r13) - movdqu %xmm2,64(%r13) - leaq 80(%r13),%r13 - - movdqa 80(%rsp),%xmm6 - jmp .Lxts_enc_done -.align 16 -.Lxts_enc_4: - pxor %xmm9,%xmm1 - leaq 64(%r12),%r12 - pxor %xmm10,%xmm2 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_encrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm3 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm5 - movdqu %xmm3,32(%r13) - movdqu %xmm5,48(%r13) - leaq 64(%r13),%r13 - - movdqa 64(%rsp),%xmm6 - jmp .Lxts_enc_done -.align 16 -.Lxts_enc_3: - pxor %xmm8,%xmm0 - leaq 48(%r12),%r12 - pxor %xmm9,%xmm1 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_encrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm3 - movdqu %xmm0,16(%r13) - movdqu %xmm3,32(%r13) - leaq 48(%r13),%r13 - - movdqa 48(%rsp),%xmm6 - jmp .Lxts_enc_done -.align 16 -.Lxts_enc_2: - pxor %xmm7,%xmm15 - leaq 32(%r12),%r12 - pxor %xmm8,%xmm0 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_encrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - movdqu %xmm0,16(%r13) - leaq 32(%r13),%r13 - - movdqa 32(%rsp),%xmm6 - jmp .Lxts_enc_done -.align 16 -.Lxts_enc_1: - pxor %xmm15,%xmm7 - leaq 16(%r12),%r12 - movdqa %xmm7,32(%rbp) - leaq 32(%rbp),%rdi - leaq 32(%rbp),%rsi - leaq (%r15),%rdx - call asm_AES_encrypt - pxor 32(%rbp),%xmm15 - - - - - - movdqu %xmm15,0(%r13) - leaq 16(%r13),%r13 - - movdqa 16(%rsp),%xmm6 - -.Lxts_enc_done: - andl $15,%ebx - jz .Lxts_enc_ret - movq %r13,%rdx - -.Lxts_enc_steal: - movzbl (%r12),%eax - movzbl -16(%rdx),%ecx - leaq 1(%r12),%r12 - movb %al,-16(%rdx) - movb %cl,0(%rdx) - leaq 1(%rdx),%rdx - subl $1,%ebx - jnz .Lxts_enc_steal - - movdqu -16(%r13),%xmm15 - leaq 32(%rbp),%rdi - pxor %xmm6,%xmm15 - leaq 32(%rbp),%rsi - movdqa %xmm15,32(%rbp) - leaq (%r15),%rdx - call asm_AES_encrypt - pxor 32(%rbp),%xmm6 - movdqu %xmm6,-16(%r13) - -.Lxts_enc_ret: - leaq (%rsp),%rax - pxor %xmm0,%xmm0 -.Lxts_enc_bzero: - movdqa %xmm0,0(%rax) - movdqa %xmm0,16(%rax) - leaq 32(%rax),%rax - cmpq %rax,%rbp - ja .Lxts_enc_bzero - - leaq 120(%rbp),%rax -.cfi_def_cfa %rax,8 - movq -48(%rax),%r15 -.cfi_restore %r15 - movq -40(%rax),%r14 -.cfi_restore %r14 - movq -32(%rax),%r13 -.cfi_restore %r13 - movq -24(%rax),%r12 -.cfi_restore %r12 - movq -16(%rax),%rbx -.cfi_restore %rbx - movq -8(%rax),%rbp -.cfi_restore %rbp - leaq (%rax),%rsp -.cfi_def_cfa_register %rsp -.Lxts_enc_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size bsaes_xts_encrypt,.-bsaes_xts_encrypt - -.globl bsaes_xts_decrypt -.type bsaes_xts_decrypt,@function -.align 16 -bsaes_xts_decrypt: -.cfi_startproc - movq %rsp,%rax -.Lxts_dec_prologue: - pushq %rbp -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbp,-16 - pushq %rbx -.cfi_adjust_cfa_offset 8 -.cfi_offset %rbx,-24 - pushq %r12 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r12,-32 - pushq %r13 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r13,-40 - pushq %r14 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r14,-48 - pushq %r15 -.cfi_adjust_cfa_offset 8 -.cfi_offset %r15,-56 - leaq -72(%rsp),%rsp -.cfi_adjust_cfa_offset 0x48 - movq %rsp,%rbp - movq %rdi,%r12 - movq %rsi,%r13 - movq %rdx,%r14 - movq %rcx,%r15 - - leaq (%r9),%rdi - leaq 32(%rbp),%rsi - leaq (%r8),%rdx - call asm_AES_encrypt - - movl 240(%r15),%eax - movq %r14,%rbx - - movl %eax,%edx - shlq $7,%rax - subq $96,%rax - subq %rax,%rsp - - movq %rsp,%rax - movq %r15,%rcx - movl %edx,%r10d - call _bsaes_key_convert - pxor (%rsp),%xmm7 - movdqa %xmm6,(%rax) - movdqa %xmm7,(%rsp) - - xorl %eax,%eax - andq $-16,%r14 - testl $15,%ebx - setnz %al - shlq $4,%rax - subq %rax,%r14 - - subq $0x80,%rsp - movdqa 32(%rbp),%xmm6 - - pxor %xmm14,%xmm14 - movdqa .Lxts_magic(%rip),%xmm12 - pcmpgtd %xmm6,%xmm14 - - subq $0x80,%r14 - jc .Lxts_dec_short - jmp .Lxts_dec_loop - -.align 16 -.Lxts_dec_loop: - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm15 - movdqa %xmm6,0(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm0 - movdqa %xmm6,16(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 0(%r12),%xmm7 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm1 - movdqa %xmm6,32(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 16(%r12),%xmm8 - pxor %xmm7,%xmm15 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm2 - movdqa %xmm6,48(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 32(%r12),%xmm9 - pxor %xmm8,%xmm0 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm3 - movdqa %xmm6,64(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 48(%r12),%xmm10 - pxor %xmm9,%xmm1 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm4 - movdqa %xmm6,80(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 64(%r12),%xmm11 - pxor %xmm10,%xmm2 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm5 - movdqa %xmm6,96(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 80(%r12),%xmm12 - pxor %xmm11,%xmm3 - movdqu 96(%r12),%xmm13 - pxor %xmm12,%xmm4 - movdqu 112(%r12),%xmm14 - leaq 128(%r12),%r12 - movdqa %xmm6,112(%rsp) - pxor %xmm13,%xmm5 - leaq 128(%rsp),%rax - pxor %xmm14,%xmm6 - movl %edx,%r10d - - call _bsaes_decrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm5 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm3 - movdqu %xmm5,32(%r13) - pxor 64(%rsp),%xmm1 - movdqu %xmm3,48(%r13) - pxor 80(%rsp),%xmm6 - movdqu %xmm1,64(%r13) - pxor 96(%rsp),%xmm2 - movdqu %xmm6,80(%r13) - pxor 112(%rsp),%xmm4 - movdqu %xmm2,96(%r13) - movdqu %xmm4,112(%r13) - leaq 128(%r13),%r13 - - movdqa 112(%rsp),%xmm6 - pxor %xmm14,%xmm14 - movdqa .Lxts_magic(%rip),%xmm12 - pcmpgtd %xmm6,%xmm14 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - - subq $0x80,%r14 - jnc .Lxts_dec_loop - -.Lxts_dec_short: - addq $0x80,%r14 - jz .Lxts_dec_done - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm15 - movdqa %xmm6,0(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm0 - movdqa %xmm6,16(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 0(%r12),%xmm7 - cmpq $16,%r14 - je .Lxts_dec_1 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm1 - movdqa %xmm6,32(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 16(%r12),%xmm8 - cmpq $32,%r14 - je .Lxts_dec_2 - pxor %xmm7,%xmm15 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm2 - movdqa %xmm6,48(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 32(%r12),%xmm9 - cmpq $48,%r14 - je .Lxts_dec_3 - pxor %xmm8,%xmm0 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm3 - movdqa %xmm6,64(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 48(%r12),%xmm10 - cmpq $64,%r14 - je .Lxts_dec_4 - pxor %xmm9,%xmm1 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm4 - movdqa %xmm6,80(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 64(%r12),%xmm11 - cmpq $80,%r14 - je .Lxts_dec_5 - pxor %xmm10,%xmm2 - pshufd $0x13,%xmm14,%xmm13 - pxor %xmm14,%xmm14 - movdqa %xmm6,%xmm5 - movdqa %xmm6,96(%rsp) - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - pcmpgtd %xmm6,%xmm14 - pxor %xmm13,%xmm6 - movdqu 80(%r12),%xmm12 - cmpq $96,%r14 - je .Lxts_dec_6 - pxor %xmm11,%xmm3 - movdqu 96(%r12),%xmm13 - pxor %xmm12,%xmm4 - movdqa %xmm6,112(%rsp) - leaq 112(%r12),%r12 - pxor %xmm13,%xmm5 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_decrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm5 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm3 - movdqu %xmm5,32(%r13) - pxor 64(%rsp),%xmm1 - movdqu %xmm3,48(%r13) - pxor 80(%rsp),%xmm6 - movdqu %xmm1,64(%r13) - pxor 96(%rsp),%xmm2 - movdqu %xmm6,80(%r13) - movdqu %xmm2,96(%r13) - leaq 112(%r13),%r13 - - movdqa 112(%rsp),%xmm6 - jmp .Lxts_dec_done -.align 16 -.Lxts_dec_6: - pxor %xmm11,%xmm3 - leaq 96(%r12),%r12 - pxor %xmm12,%xmm4 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_decrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm5 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm3 - movdqu %xmm5,32(%r13) - pxor 64(%rsp),%xmm1 - movdqu %xmm3,48(%r13) - pxor 80(%rsp),%xmm6 - movdqu %xmm1,64(%r13) - movdqu %xmm6,80(%r13) - leaq 96(%r13),%r13 - - movdqa 96(%rsp),%xmm6 - jmp .Lxts_dec_done -.align 16 -.Lxts_dec_5: - pxor %xmm10,%xmm2 - leaq 80(%r12),%r12 - pxor %xmm11,%xmm3 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_decrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm5 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm3 - movdqu %xmm5,32(%r13) - pxor 64(%rsp),%xmm1 - movdqu %xmm3,48(%r13) - movdqu %xmm1,64(%r13) - leaq 80(%r13),%r13 - - movdqa 80(%rsp),%xmm6 - jmp .Lxts_dec_done -.align 16 -.Lxts_dec_4: - pxor %xmm9,%xmm1 - leaq 64(%r12),%r12 - pxor %xmm10,%xmm2 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_decrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm5 - movdqu %xmm0,16(%r13) - pxor 48(%rsp),%xmm3 - movdqu %xmm5,32(%r13) - movdqu %xmm3,48(%r13) - leaq 64(%r13),%r13 - - movdqa 64(%rsp),%xmm6 - jmp .Lxts_dec_done -.align 16 -.Lxts_dec_3: - pxor %xmm8,%xmm0 - leaq 48(%r12),%r12 - pxor %xmm9,%xmm1 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_decrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - pxor 32(%rsp),%xmm5 - movdqu %xmm0,16(%r13) - movdqu %xmm5,32(%r13) - leaq 48(%r13),%r13 - - movdqa 48(%rsp),%xmm6 - jmp .Lxts_dec_done -.align 16 -.Lxts_dec_2: - pxor %xmm7,%xmm15 - leaq 32(%r12),%r12 - pxor %xmm8,%xmm0 - leaq 128(%rsp),%rax - movl %edx,%r10d - - call _bsaes_decrypt8 - - pxor 0(%rsp),%xmm15 - pxor 16(%rsp),%xmm0 - movdqu %xmm15,0(%r13) - movdqu %xmm0,16(%r13) - leaq 32(%r13),%r13 - - movdqa 32(%rsp),%xmm6 - jmp .Lxts_dec_done -.align 16 -.Lxts_dec_1: - pxor %xmm15,%xmm7 - leaq 16(%r12),%r12 - movdqa %xmm7,32(%rbp) - leaq 32(%rbp),%rdi - leaq 32(%rbp),%rsi - leaq (%r15),%rdx - call asm_AES_decrypt - pxor 32(%rbp),%xmm15 - - - - - - movdqu %xmm15,0(%r13) - leaq 16(%r13),%r13 - - movdqa 16(%rsp),%xmm6 - -.Lxts_dec_done: - andl $15,%ebx - jz .Lxts_dec_ret - - pxor %xmm14,%xmm14 - movdqa .Lxts_magic(%rip),%xmm12 - pcmpgtd %xmm6,%xmm14 - pshufd $0x13,%xmm14,%xmm13 - movdqa %xmm6,%xmm5 - paddq %xmm6,%xmm6 - pand %xmm12,%xmm13 - movdqu (%r12),%xmm15 - pxor %xmm13,%xmm6 - - leaq 32(%rbp),%rdi - pxor %xmm6,%xmm15 - leaq 32(%rbp),%rsi - movdqa %xmm15,32(%rbp) - leaq (%r15),%rdx - call asm_AES_decrypt - pxor 32(%rbp),%xmm6 - movq %r13,%rdx - movdqu %xmm6,(%r13) - -.Lxts_dec_steal: - movzbl 16(%r12),%eax - movzbl (%rdx),%ecx - leaq 1(%r12),%r12 - movb %al,(%rdx) - movb %cl,16(%rdx) - leaq 1(%rdx),%rdx - subl $1,%ebx - jnz .Lxts_dec_steal - - movdqu (%r13),%xmm15 - leaq 32(%rbp),%rdi - pxor %xmm5,%xmm15 - leaq 32(%rbp),%rsi - movdqa %xmm15,32(%rbp) - leaq (%r15),%rdx - call asm_AES_decrypt - pxor 32(%rbp),%xmm5 - movdqu %xmm5,(%r13) - -.Lxts_dec_ret: - leaq (%rsp),%rax - pxor %xmm0,%xmm0 -.Lxts_dec_bzero: - movdqa %xmm0,0(%rax) - movdqa %xmm0,16(%rax) - leaq 32(%rax),%rax - cmpq %rax,%rbp - ja .Lxts_dec_bzero - - leaq 120(%rbp),%rax -.cfi_def_cfa %rax,8 - movq -48(%rax),%r15 -.cfi_restore %r15 - movq -40(%rax),%r14 -.cfi_restore %r14 - movq -32(%rax),%r13 -.cfi_restore %r13 - movq -24(%rax),%r12 -.cfi_restore %r12 - movq -16(%rax),%rbx -.cfi_restore %rbx - movq -8(%rax),%rbp -.cfi_restore %rbp - leaq (%rax),%rsp -.cfi_def_cfa_register %rsp -.Lxts_dec_epilogue: - .byte 0xf3,0xc3 -.cfi_endproc -.size bsaes_xts_decrypt,.-bsaes_xts_decrypt -.type _bsaes_const,@object -.align 64 -_bsaes_const: -.LM0ISR: -.quad 0x0a0e0206070b0f03, 0x0004080c0d010509 -.LISRM0: -.quad 0x01040b0e0205080f, 0x0306090c00070a0d -.LISR: -.quad 0x0504070602010003, 0x0f0e0d0c080b0a09 -.LBS0: -.quad 0x5555555555555555, 0x5555555555555555 -.LBS1: -.quad 0x3333333333333333, 0x3333333333333333 -.LBS2: -.quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f -.LSR: -.quad 0x0504070600030201, 0x0f0e0d0c0a09080b -.LSRM0: -.quad 0x0304090e00050a0f, 0x01060b0c0207080d -.LM0SR: -.quad 0x0a0e02060f03070b, 0x0004080c05090d01 -.LSWPUP: -.quad 0x0706050403020100, 0x0c0d0e0f0b0a0908 -.LSWPUPM0SR: -.quad 0x0a0d02060c03070b, 0x0004080f05090e01 -.LADD1: -.quad 0x0000000000000000, 0x0000000100000000 -.LADD2: -.quad 0x0000000000000000, 0x0000000200000000 -.LADD3: -.quad 0x0000000000000000, 0x0000000300000000 -.LADD4: -.quad 0x0000000000000000, 0x0000000400000000 -.LADD5: -.quad 0x0000000000000000, 0x0000000500000000 -.LADD6: -.quad 0x0000000000000000, 0x0000000600000000 -.LADD7: -.quad 0x0000000000000000, 0x0000000700000000 -.LADD8: -.quad 0x0000000000000000, 0x0000000800000000 -.Lxts_magic: -.long 0x87,0,1,0 -.Lmasks: -.quad 0x0101010101010101, 0x0101010101010101 -.quad 0x0202020202020202, 0x0202020202020202 -.quad 0x0404040404040404, 0x0404040404040404 -.quad 0x0808080808080808, 0x0808080808080808 -.LM0: -.quad 0x02060a0e03070b0f, 0x0004080c0105090d -.L63: -.quad 0x6363636363636363, 0x6363636363636363 -.byte 66,105,116,45,115,108,105,99,101,100,32,65,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,69,109,105,108,105,97,32,75,195,164,115,112,101,114,44,32,80,101,116,101,114,32,83,99,104,119,97,98,101,44,32,65,110,100,121,32,80,111,108,121,97,107,111,118,0 -.align 64 -.size _bsaes_const,.-_bsaes_const diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s index f8e4a80588..7876e0b8f9 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s @@ -29,7 +29,7 @@ rsaz_512_sqr: subq $128+24,%rsp .cfi_adjust_cfa_offset 128+24 .Lsqr_body: - movq %rdx,%rbp +.byte 102,72,15,110,202 movq (%rsi),%rdx movq 8(%rsi),%rax movq %rcx,128(%rsp) @@ -44,6 +44,7 @@ rsaz_512_sqr: movl %r8d,128+8(%rsp) movq %rdx,%rbx + movq %rax,%rbp mulq %rdx movq %rax,%r8 movq 16(%rsi),%rax @@ -82,31 +83,29 @@ rsaz_512_sqr: mulq %rbx addq %rax,%r14 movq %rbx,%rax - movq %rdx,%r15 - adcq $0,%r15 + adcq $0,%rdx + xorq %rcx,%rcx addq %r8,%r8 - movq %r9,%rcx - adcq %r9,%r9 + movq %rdx,%r15 + adcq $0,%rcx mulq %rax - movq %rax,(%rsp) - addq %rdx,%r8 - adcq $0,%r9 + addq %r8,%rdx + adcq $0,%rcx - movq %r8,8(%rsp) - shrq $63,%rcx + movq %rax,(%rsp) + movq %rdx,8(%rsp) - movq 8(%rsi),%r8 movq 16(%rsi),%rax - mulq %r8 + mulq %rbp addq %rax,%r10 movq 24(%rsi),%rax movq %rdx,%rbx adcq $0,%rbx - mulq %r8 + mulq %rbp addq %rax,%r11 movq 32(%rsi),%rax adcq $0,%rdx @@ -114,7 +113,7 @@ rsaz_512_sqr: movq %rdx,%rbx adcq $0,%rbx - mulq %r8 + mulq %rbp addq %rax,%r12 movq 40(%rsi),%rax adcq $0,%rdx @@ -122,7 +121,7 @@ rsaz_512_sqr: movq %rdx,%rbx adcq $0,%rbx - mulq %r8 + mulq %rbp addq %rax,%r13 movq 48(%rsi),%rax adcq $0,%rdx @@ -130,7 +129,7 @@ rsaz_512_sqr: movq %rdx,%rbx adcq $0,%rbx - mulq %r8 + mulq %rbp addq %rax,%r14 movq 56(%rsi),%rax adcq $0,%rdx @@ -138,39 +137,39 @@ rsaz_512_sqr: movq %rdx,%rbx adcq $0,%rbx - mulq %r8 + mulq %rbp addq %rax,%r15 - movq %r8,%rax + movq %rbp,%rax adcq $0,%rdx addq %rbx,%r15 - movq %rdx,%r8 - movq %r10,%rdx - adcq $0,%r8 + adcq $0,%rdx - addq %rdx,%rdx - leaq (%rcx,%r10,2),%r10 - movq %r11,%rbx - adcq %r11,%r11 + xorq %rbx,%rbx + addq %r9,%r9 + movq %rdx,%r8 + adcq %r10,%r10 + adcq $0,%rbx mulq %rax + + addq %rcx,%rax + movq 16(%rsi),%rbp addq %rax,%r9 + movq 24(%rsi),%rax adcq %rdx,%r10 - adcq $0,%r11 + adcq $0,%rbx movq %r9,16(%rsp) movq %r10,24(%rsp) - shrq $63,%rbx - movq 16(%rsi),%r9 - movq 24(%rsi),%rax - mulq %r9 + mulq %rbp addq %rax,%r12 movq 32(%rsi),%rax movq %rdx,%rcx adcq $0,%rcx - mulq %r9 + mulq %rbp addq %rax,%r13 movq 40(%rsi),%rax adcq $0,%rdx @@ -178,7 +177,7 @@ rsaz_512_sqr: movq %rdx,%rcx adcq $0,%rcx - mulq %r9 + mulq %rbp addq %rax,%r14 movq 48(%rsi),%rax adcq $0,%rdx @@ -186,9 +185,7 @@ rsaz_512_sqr: movq %rdx,%rcx adcq $0,%rcx - mulq %r9 - movq %r12,%r10 - leaq (%rbx,%r12,2),%r12 + mulq %rbp addq %rax,%r15 movq 56(%rsi),%rax adcq $0,%rdx @@ -196,36 +193,40 @@ rsaz_512_sqr: movq %rdx,%rcx adcq $0,%rcx - mulq %r9 - shrq $63,%r10 + mulq %rbp addq %rax,%r8 - movq %r9,%rax + movq %rbp,%rax adcq $0,%rdx addq %rcx,%r8 - movq %rdx,%r9 - adcq $0,%r9 + adcq $0,%rdx - movq %r13,%rcx - leaq (%r10,%r13,2),%r13 + xorq %rcx,%rcx + addq %r11,%r11 + movq %rdx,%r9 + adcq %r12,%r12 + adcq $0,%rcx mulq %rax + + addq %rbx,%rax + movq 24(%rsi),%r10 addq %rax,%r11 + movq 32(%rsi),%rax adcq %rdx,%r12 - adcq $0,%r13 + adcq $0,%rcx movq %r11,32(%rsp) movq %r12,40(%rsp) - shrq $63,%rcx - movq 24(%rsi),%r10 - movq 32(%rsi),%rax + movq %rax,%r11 mulq %r10 addq %rax,%r14 movq 40(%rsi),%rax movq %rdx,%rbx adcq $0,%rbx + movq %rax,%r12 mulq %r10 addq %rax,%r15 movq 48(%rsi),%rax @@ -234,9 +235,8 @@ rsaz_512_sqr: movq %rdx,%rbx adcq $0,%rbx + movq %rax,%rbp mulq %r10 - movq %r14,%r12 - leaq (%rcx,%r14,2),%r14 addq %rax,%r8 movq 56(%rsi),%rax adcq $0,%rdx @@ -245,32 +245,33 @@ rsaz_512_sqr: adcq $0,%rbx mulq %r10 - shrq $63,%r12 addq %rax,%r9 movq %r10,%rax adcq $0,%rdx addq %rbx,%r9 - movq %rdx,%r10 - adcq $0,%r10 + adcq $0,%rdx - movq %r15,%rbx - leaq (%r12,%r15,2),%r15 + xorq %rbx,%rbx + addq %r13,%r13 + movq %rdx,%r10 + adcq %r14,%r14 + adcq $0,%rbx mulq %rax + + addq %rcx,%rax addq %rax,%r13 + movq %r12,%rax adcq %rdx,%r14 - adcq $0,%r15 + adcq $0,%rbx movq %r13,48(%rsp) movq %r14,56(%rsp) - shrq $63,%rbx - movq 32(%rsi),%r11 - movq 40(%rsi),%rax mulq %r11 addq %rax,%r8 - movq 48(%rsi),%rax + movq %rbp,%rax movq %rdx,%rcx adcq $0,%rcx @@ -278,97 +279,99 @@ rsaz_512_sqr: addq %rax,%r9 movq 56(%rsi),%rax adcq $0,%rdx - movq %r8,%r12 - leaq (%rbx,%r8,2),%r8 addq %rcx,%r9 movq %rdx,%rcx adcq $0,%rcx + movq %rax,%r14 mulq %r11 - shrq $63,%r12 addq %rax,%r10 movq %r11,%rax adcq $0,%rdx addq %rcx,%r10 - movq %rdx,%r11 - adcq $0,%r11 + adcq $0,%rdx - movq %r9,%rcx - leaq (%r12,%r9,2),%r9 + xorq %rcx,%rcx + addq %r15,%r15 + movq %rdx,%r11 + adcq %r8,%r8 + adcq $0,%rcx mulq %rax + + addq %rbx,%rax addq %rax,%r15 + movq %rbp,%rax adcq %rdx,%r8 - adcq $0,%r9 + adcq $0,%rcx movq %r15,64(%rsp) movq %r8,72(%rsp) - shrq $63,%rcx - movq 40(%rsi),%r12 - movq 48(%rsi),%rax mulq %r12 addq %rax,%r10 - movq 56(%rsi),%rax + movq %r14,%rax movq %rdx,%rbx adcq $0,%rbx mulq %r12 addq %rax,%r11 movq %r12,%rax - movq %r10,%r15 - leaq (%rcx,%r10,2),%r10 adcq $0,%rdx - shrq $63,%r15 addq %rbx,%r11 - movq %rdx,%r12 - adcq $0,%r12 + adcq $0,%rdx - movq %r11,%rbx - leaq (%r15,%r11,2),%r11 + xorq %rbx,%rbx + addq %r9,%r9 + movq %rdx,%r12 + adcq %r10,%r10 + adcq $0,%rbx mulq %rax + + addq %rcx,%rax addq %rax,%r9 + movq %r14,%rax adcq %rdx,%r10 - adcq $0,%r11 + adcq $0,%rbx movq %r9,80(%rsp) movq %r10,88(%rsp) - movq 48(%rsi),%r13 - movq 56(%rsi),%rax - mulq %r13 + mulq %rbp addq %rax,%r12 - movq %r13,%rax - movq %rdx,%r13 - adcq $0,%r13 + movq %rbp,%rax + adcq $0,%rdx - xorq %r14,%r14 - shlq $1,%rbx + xorq %rcx,%rcx + addq %r11,%r11 + movq %rdx,%r13 adcq %r12,%r12 - adcq %r13,%r13 - adcq %r14,%r14 + adcq $0,%rcx mulq %rax + + addq %rbx,%rax addq %rax,%r11 + movq %r14,%rax adcq %rdx,%r12 - adcq $0,%r13 + adcq $0,%rcx movq %r11,96(%rsp) movq %r12,104(%rsp) - movq 56(%rsi),%rax - mulq %rax - addq %rax,%r13 - adcq $0,%rdx + xorq %rbx,%rbx + addq %r13,%r13 + adcq $0,%rbx - addq %rdx,%r14 + mulq %rax - movq %r13,112(%rsp) - movq %r14,120(%rsp) + addq %rcx,%rax + addq %r13,%rax + adcq %rbx,%rdx movq (%rsp),%r8 movq 8(%rsp),%r9 @@ -378,6 +381,10 @@ rsaz_512_sqr: movq 40(%rsp),%r13 movq 48(%rsp),%r14 movq 56(%rsp),%r15 +.byte 102,72,15,126,205 + + movq %rax,112(%rsp) + movq %rdx,120(%rsp) call __rsaz_512_reduce @@ -406,9 +413,9 @@ rsaz_512_sqr: .Loop_sqrx: movl %r8d,128+8(%rsp) .byte 102,72,15,110,199 -.byte 102,72,15,110,205 mulxq %rax,%r8,%r9 + movq %rax,%rbx mulxq 16(%rsi),%rcx,%r10 xorq %rbp,%rbp @@ -416,40 +423,39 @@ rsaz_512_sqr: mulxq 24(%rsi),%rax,%r11 adcxq %rcx,%r9 - mulxq 32(%rsi),%rcx,%r12 +.byte 0xc4,0x62,0xf3,0xf6,0xa6,0x20,0x00,0x00,0x00 adcxq %rax,%r10 - mulxq 40(%rsi),%rax,%r13 +.byte 0xc4,0x62,0xfb,0xf6,0xae,0x28,0x00,0x00,0x00 adcxq %rcx,%r11 -.byte 0xc4,0x62,0xf3,0xf6,0xb6,0x30,0x00,0x00,0x00 + mulxq 48(%rsi),%rcx,%r14 adcxq %rax,%r12 adcxq %rcx,%r13 -.byte 0xc4,0x62,0xfb,0xf6,0xbe,0x38,0x00,0x00,0x00 + mulxq 56(%rsi),%rax,%r15 adcxq %rax,%r14 adcxq %rbp,%r15 - movq %r9,%rcx - shldq $1,%r8,%r9 - shlq $1,%r8 - - xorl %ebp,%ebp - mulxq %rdx,%rax,%rdx - adcxq %rdx,%r8 - movq 8(%rsi),%rdx - adcxq %rbp,%r9 + mulxq %rdx,%rax,%rdi + movq %rbx,%rdx + xorq %rcx,%rcx + adoxq %r8,%r8 + adcxq %rdi,%r8 + adoxq %rbp,%rcx + adcxq %rbp,%rcx movq %rax,(%rsp) movq %r8,8(%rsp) - mulxq 16(%rsi),%rax,%rbx +.byte 0xc4,0xe2,0xfb,0xf6,0x9e,0x10,0x00,0x00,0x00 adoxq %rax,%r10 adcxq %rbx,%r11 -.byte 0xc4,0x62,0xc3,0xf6,0x86,0x18,0x00,0x00,0x00 + mulxq 24(%rsi),%rdi,%r8 adoxq %rdi,%r11 +.byte 0x66 adcxq %r8,%r12 mulxq 32(%rsi),%rax,%rbx @@ -467,24 +473,25 @@ rsaz_512_sqr: .byte 0xc4,0x62,0xc3,0xf6,0x86,0x38,0x00,0x00,0x00 adoxq %rdi,%r15 adcxq %rbp,%r8 + mulxq %rdx,%rax,%rdi adoxq %rbp,%r8 +.byte 0x48,0x8b,0x96,0x10,0x00,0x00,0x00 - movq %r11,%rbx - shldq $1,%r10,%r11 - shldq $1,%rcx,%r10 + xorq %rbx,%rbx + adoxq %r9,%r9 - xorl %ebp,%ebp - mulxq %rdx,%rax,%rcx - movq 16(%rsi),%rdx + adcxq %rcx,%rax + adoxq %r10,%r10 adcxq %rax,%r9 - adcxq %rcx,%r10 - adcxq %rbp,%r11 + adoxq %rbp,%rbx + adcxq %rdi,%r10 + adcxq %rbp,%rbx movq %r9,16(%rsp) .byte 0x4c,0x89,0x94,0x24,0x18,0x00,0x00,0x00 -.byte 0xc4,0x62,0xc3,0xf6,0x8e,0x18,0x00,0x00,0x00 + mulxq 24(%rsi),%rdi,%r9 adoxq %rdi,%r12 adcxq %r9,%r13 @@ -492,7 +499,7 @@ rsaz_512_sqr: adoxq %rax,%r13 adcxq %rcx,%r14 - mulxq 40(%rsi),%rdi,%r9 +.byte 0xc4,0x62,0xc3,0xf6,0x8e,0x28,0x00,0x00,0x00 adoxq %rdi,%r14 adcxq %r9,%r15 @@ -500,27 +507,28 @@ rsaz_512_sqr: adoxq %rax,%r15 adcxq %rcx,%r8 -.byte 0xc4,0x62,0xc3,0xf6,0x8e,0x38,0x00,0x00,0x00 + mulxq 56(%rsi),%rdi,%r9 adoxq %rdi,%r8 adcxq %rbp,%r9 + mulxq %rdx,%rax,%rdi adoxq %rbp,%r9 + movq 24(%rsi),%rdx - movq %r13,%rcx - shldq $1,%r12,%r13 - shldq $1,%rbx,%r12 + xorq %rcx,%rcx + adoxq %r11,%r11 - xorl %ebp,%ebp - mulxq %rdx,%rax,%rdx + adcxq %rbx,%rax + adoxq %r12,%r12 adcxq %rax,%r11 - adcxq %rdx,%r12 - movq 24(%rsi),%rdx - adcxq %rbp,%r13 + adoxq %rbp,%rcx + adcxq %rdi,%r12 + adcxq %rbp,%rcx movq %r11,32(%rsp) -.byte 0x4c,0x89,0xa4,0x24,0x28,0x00,0x00,0x00 + movq %r12,40(%rsp) -.byte 0xc4,0xe2,0xfb,0xf6,0x9e,0x20,0x00,0x00,0x00 + mulxq 32(%rsi),%rax,%rbx adoxq %rax,%r14 adcxq %rbx,%r15 @@ -535,25 +543,25 @@ rsaz_512_sqr: mulxq 56(%rsi),%rdi,%r10 adoxq %rdi,%r9 adcxq %rbp,%r10 + mulxq %rdx,%rax,%rdi adoxq %rbp,%r10 + movq 32(%rsi),%rdx -.byte 0x66 - movq %r15,%rbx - shldq $1,%r14,%r15 - shldq $1,%rcx,%r14 + xorq %rbx,%rbx + adoxq %r13,%r13 - xorl %ebp,%ebp - mulxq %rdx,%rax,%rdx + adcxq %rcx,%rax + adoxq %r14,%r14 adcxq %rax,%r13 - adcxq %rdx,%r14 - movq 32(%rsi),%rdx - adcxq %rbp,%r15 + adoxq %rbp,%rbx + adcxq %rdi,%r14 + adcxq %rbp,%rbx movq %r13,48(%rsp) movq %r14,56(%rsp) -.byte 0xc4,0x62,0xc3,0xf6,0x9e,0x28,0x00,0x00,0x00 + mulxq 40(%rsi),%rdi,%r11 adoxq %rdi,%r8 adcxq %r11,%r9 @@ -564,18 +572,19 @@ rsaz_512_sqr: mulxq 56(%rsi),%rdi,%r11 adoxq %rdi,%r10 adcxq %rbp,%r11 + mulxq %rdx,%rax,%rdi + movq 40(%rsi),%rdx adoxq %rbp,%r11 - movq %r9,%rcx - shldq $1,%r8,%r9 - shldq $1,%rbx,%r8 + xorq %rcx,%rcx + adoxq %r15,%r15 - xorl %ebp,%ebp - mulxq %rdx,%rax,%rdx + adcxq %rbx,%rax + adoxq %r8,%r8 adcxq %rax,%r15 - adcxq %rdx,%r8 - movq 40(%rsi),%rdx - adcxq %rbp,%r9 + adoxq %rbp,%rcx + adcxq %rdi,%r8 + adcxq %rbp,%rcx movq %r15,64(%rsp) movq %r8,72(%rsp) @@ -588,18 +597,19 @@ rsaz_512_sqr: .byte 0xc4,0x62,0xc3,0xf6,0xa6,0x38,0x00,0x00,0x00 adoxq %rdi,%r11 adcxq %rbp,%r12 + mulxq %rdx,%rax,%rdi adoxq %rbp,%r12 + movq 48(%rsi),%rdx - movq %r11,%rbx - shldq $1,%r10,%r11 - shldq $1,%rcx,%r10 + xorq %rbx,%rbx + adoxq %r9,%r9 - xorl %ebp,%ebp - mulxq %rdx,%rax,%rdx + adcxq %rcx,%rax + adoxq %r10,%r10 adcxq %rax,%r9 - adcxq %rdx,%r10 - movq 48(%rsi),%rdx - adcxq %rbp,%r11 + adcxq %rdi,%r10 + adoxq %rbp,%rbx + adcxq %rbp,%rbx movq %r9,80(%rsp) movq %r10,88(%rsp) @@ -609,31 +619,31 @@ rsaz_512_sqr: adoxq %rax,%r12 adoxq %rbp,%r13 - xorq %r14,%r14 - shldq $1,%r13,%r14 - shldq $1,%r12,%r13 - shldq $1,%rbx,%r12 + mulxq %rdx,%rax,%rdi + xorq %rcx,%rcx + movq 56(%rsi),%rdx + adoxq %r11,%r11 - xorl %ebp,%ebp - mulxq %rdx,%rax,%rdx + adcxq %rbx,%rax + adoxq %r12,%r12 adcxq %rax,%r11 - adcxq %rdx,%r12 - movq 56(%rsi),%rdx - adcxq %rbp,%r13 + adoxq %rbp,%rcx + adcxq %rdi,%r12 + adcxq %rbp,%rcx .byte 0x4c,0x89,0x9c,0x24,0x60,0x00,0x00,0x00 .byte 0x4c,0x89,0xa4,0x24,0x68,0x00,0x00,0x00 mulxq %rdx,%rax,%rdx - adoxq %rax,%r13 - adoxq %rbp,%rdx + xorq %rbx,%rbx + adoxq %r13,%r13 -.byte 0x66 - addq %rdx,%r14 + adcxq %rcx,%rax + adoxq %rbp,%rbx + adcxq %r13,%rax + adcxq %rdx,%rbx - movq %r13,112(%rsp) - movq %r14,120(%rsp) .byte 102,72,15,126,199 .byte 102,72,15,126,205 @@ -647,6 +657,9 @@ rsaz_512_sqr: movq 48(%rsp),%r14 movq 56(%rsp),%r15 + movq %rax,112(%rsp) + movq %rbx,120(%rsp) + call __rsaz_512_reducex addq 64(%rsp),%r8 @@ -1440,6 +1453,7 @@ rsaz_512_mul_by_one: .type __rsaz_512_reduce,@function .align 32 __rsaz_512_reduce: +.cfi_startproc movq %r8,%rbx imulq 128+8(%rsp),%rbx movq 0(%rbp),%rax @@ -1519,10 +1533,12 @@ __rsaz_512_reduce: jne .Lreduction_loop .byte 0xf3,0xc3 +.cfi_endproc .size __rsaz_512_reduce,.-__rsaz_512_reduce .type __rsaz_512_reducex,@function .align 32 __rsaz_512_reducex: +.cfi_startproc imulq %r8,%rdx xorq %rsi,%rsi @@ -1575,10 +1591,12 @@ __rsaz_512_reducex: jne .Lreduction_loopx .byte 0xf3,0xc3 +.cfi_endproc .size __rsaz_512_reducex,.-__rsaz_512_reducex .type __rsaz_512_subtract,@function .align 32 __rsaz_512_subtract: +.cfi_startproc movq %r8,(%rdi) movq %r9,8(%rdi) movq %r10,16(%rdi) @@ -1632,10 +1650,12 @@ __rsaz_512_subtract: movq %r15,56(%rdi) .byte 0xf3,0xc3 +.cfi_endproc .size __rsaz_512_subtract,.-__rsaz_512_subtract .type __rsaz_512_mul,@function .align 32 __rsaz_512_mul: +.cfi_startproc leaq 8(%rsp),%rdi movq (%rsi),%rax @@ -1774,10 +1794,12 @@ __rsaz_512_mul: movq %r15,56(%rdi) .byte 0xf3,0xc3 +.cfi_endproc .size __rsaz_512_mul,.-__rsaz_512_mul .type __rsaz_512_mulx,@function .align 32 __rsaz_512_mulx: +.cfi_startproc mulxq (%rsi),%rbx,%r8 movq $-6,%rcx @@ -1894,11 +1916,13 @@ __rsaz_512_mulx: movq %r15,8+64+56(%rsp) .byte 0xf3,0xc3 +.cfi_endproc .size __rsaz_512_mulx,.-__rsaz_512_mulx .globl rsaz_512_scatter4 .type rsaz_512_scatter4,@function .align 16 rsaz_512_scatter4: +.cfi_startproc leaq (%rdi,%rdx,8),%rdi movl $8,%r9d jmp .Loop_scatter @@ -1911,12 +1935,14 @@ rsaz_512_scatter4: decl %r9d jnz .Loop_scatter .byte 0xf3,0xc3 +.cfi_endproc .size rsaz_512_scatter4,.-rsaz_512_scatter4 .globl rsaz_512_gather4 .type rsaz_512_gather4,@function .align 16 rsaz_512_gather4: +.cfi_startproc movd %edx,%xmm8 movdqa .Linc+16(%rip),%xmm1 movdqa .Linc(%rip),%xmm0 @@ -1980,6 +2006,7 @@ rsaz_512_gather4: jnz .Loop_gather .byte 0xf3,0xc3 .LSEH_end_rsaz_512_gather4: +.cfi_endproc .size rsaz_512_gather4,.-rsaz_512_gather4 .align 64 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/x86_64-mont5.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/x86_64-mont5.s index df4b6610a0..40a60a3c8f 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/x86_64-mont5.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/x86_64-mont5.s @@ -550,6 +550,7 @@ bn_mul4x_mont_gather5: .type mul4x_internal,@function .align 32 mul4x_internal: +.cfi_startproc shlq $5,%r9 movd 8(%rax),%xmm5 leaq .Linc(%rip),%rax @@ -1071,6 +1072,7 @@ mul4x_internal: movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp .Lsqr4x_sub_entry +.cfi_endproc .size mul4x_internal,.-mul4x_internal .globl bn_power5 .type bn_power5,@function @@ -1213,6 +1215,7 @@ bn_power5: .align 32 bn_sqr8x_internal: __bn_sqr8x_internal: +.cfi_startproc @@ -1987,10 +1990,12 @@ __bn_sqr8x_reduction: cmpq %rdx,%rdi jb .L8x_reduction_loop .byte 0xf3,0xc3 +.cfi_endproc .size bn_sqr8x_internal,.-bn_sqr8x_internal .type __bn_post4x_internal,@function .align 32 __bn_post4x_internal: +.cfi_startproc movq 0(%rbp),%r12 leaq (%rdi,%r9,1),%rbx movq %r9,%rcx @@ -2041,15 +2046,18 @@ __bn_post4x_internal: movq %r9,%r10 negq %r9 .byte 0xf3,0xc3 +.cfi_endproc .size __bn_post4x_internal,.-__bn_post4x_internal .globl bn_from_montgomery .type bn_from_montgomery,@function .align 32 bn_from_montgomery: +.cfi_startproc testl $7,%r9d jz bn_from_mont8x xorl %eax,%eax .byte 0xf3,0xc3 +.cfi_endproc .size bn_from_montgomery,.-bn_from_montgomery .type bn_from_mont8x,@function @@ -2333,6 +2341,7 @@ bn_mulx4x_mont_gather5: .type mulx4x_internal,@function .align 32 mulx4x_internal: +.cfi_startproc movq %r9,8(%rsp) movq %r9,%r10 negq %r9 @@ -2751,6 +2760,7 @@ mulx4x_internal: movq 16(%rbp),%r14 movq 24(%rbp),%r15 jmp .Lsqrx4x_sub_entry +.cfi_endproc .size mulx4x_internal,.-mulx4x_internal .type bn_powerx5,@function .align 32 @@ -3509,6 +3519,7 @@ __bn_sqrx8x_reduction: .size bn_sqrx8x_internal,.-bn_sqrx8x_internal .align 32 __bn_postx4x_internal: +.cfi_startproc movq 0(%rbp),%r12 movq %rcx,%r10 movq %rcx,%r9 @@ -3556,11 +3567,13 @@ __bn_postx4x_internal: negq %r9 .byte 0xf3,0xc3 +.cfi_endproc .size __bn_postx4x_internal,.-__bn_postx4x_internal .globl bn_get_bits5 .type bn_get_bits5,@function .align 16 bn_get_bits5: +.cfi_startproc leaq 0(%rdi),%r10 leaq 1(%rdi),%r11 movl %esi,%ecx @@ -3574,12 +3587,14 @@ bn_get_bits5: shrl %cl,%eax andl $31,%eax .byte 0xf3,0xc3 +.cfi_endproc .size bn_get_bits5,.-bn_get_bits5 .globl bn_scatter5 .type bn_scatter5,@function .align 16 bn_scatter5: +.cfi_startproc cmpl $0,%esi jz .Lscatter_epilogue leaq (%rdx,%rcx,8),%rdx @@ -3592,6 +3607,7 @@ bn_scatter5: jnz .Lscatter .Lscatter_epilogue: .byte 0xf3,0xc3 +.cfi_endproc .size bn_scatter5,.-bn_scatter5 .globl bn_gather5 @@ -3599,6 +3615,7 @@ bn_scatter5: .align 32 bn_gather5: .LSEH_begin_bn_gather5: +.cfi_startproc .byte 0x4c,0x8d,0x14,0x24 .byte 0x48,0x81,0xec,0x08,0x01,0x00,0x00 @@ -3756,6 +3773,7 @@ bn_gather5: leaq (%r10),%rsp .byte 0xf3,0xc3 .LSEH_end_bn_gather5: +.cfi_endproc .size bn_gather5,.-bn_gather5 .align 64 .Linc: diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h index 17e8991485..83d6c611ca 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-x86_64" -#define DATE "built on: Fri Sep 13 16:00:16 2019 UTC" +#define DATE "built on: Wed Mar 18 21:07:06 2020 UTC" /* * Generate compiler_flags as an array of individual characters. This is a @@ -37,10 +37,10 @@ static const char compiler_flags[] = { ' ','-','D','S','H','A','5','1','2','_','A','S','M',' ','-','D', 'K','E','C','C','A','K','1','6','0','0','_','A','S','M',' ','-', 'D','R','C','4','_','A','S','M',' ','-','D','M','D','5','_','A', - 'S','M',' ','-','D','V','P','A','E','S','_','A','S','M',' ','-', - 'D','G','H','A','S','H','_','A','S','M',' ','-','D','E','C','P', - '_','N','I','S','T','Z','2','5','6','_','A','S','M',' ','-','D', - 'X','2','5','5','1','9','_','A','S','M',' ','-','D','P','O','L', - 'Y','1','3','0','5','_','A','S','M',' ','-','D','N','D','E','B', - 'U','G','\0' + 'S','M',' ','-','D','A','E','S','N','I','_','A','S','M',' ','-', + 'D','V','P','A','E','S','_','A','S','M',' ','-','D','G','H','A', + 'S','H','_','A','S','M',' ','-','D','E','C','P','_','N','I','S', + 'T','Z','2','5','6','_','A','S','M',' ','-','D','X','2','5','5', + '1','9','_','A','S','M',' ','-','D','P','O','L','Y','1','3','0', + '5','_','A','S','M',' ','-','D','N','D','E','B','U','G','\0' }; diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/camellia/cmll-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/camellia/cmll-x86_64.s index 405566b01c..eeb20dd229 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/camellia/cmll-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/camellia/cmll-x86_64.s @@ -5,11 +5,13 @@ .type Camellia_EncryptBlock,@function .align 16 Camellia_EncryptBlock: +.cfi_startproc movl $128,%eax subl %edi,%eax movl $3,%edi adcl $0,%edi jmp .Lenc_rounds +.cfi_endproc .size Camellia_EncryptBlock,.-Camellia_EncryptBlock .globl Camellia_EncryptBlock_Rounds @@ -83,6 +85,7 @@ Camellia_EncryptBlock_Rounds: .type _x86_64_Camellia_encrypt,@function .align 16 _x86_64_Camellia_encrypt: +.cfi_startproc xorl 0(%r14),%r9d xorl 4(%r14),%r8d xorl 8(%r14),%r11d @@ -285,6 +288,7 @@ _x86_64_Camellia_encrypt: movl %edx,%r11d .byte 0xf3,0xc3 +.cfi_endproc .size _x86_64_Camellia_encrypt,.-_x86_64_Camellia_encrypt @@ -292,11 +296,13 @@ _x86_64_Camellia_encrypt: .type Camellia_DecryptBlock,@function .align 16 Camellia_DecryptBlock: +.cfi_startproc movl $128,%eax subl %edi,%eax movl $3,%edi adcl $0,%edi jmp .Ldec_rounds +.cfi_endproc .size Camellia_DecryptBlock,.-Camellia_DecryptBlock .globl Camellia_DecryptBlock_Rounds @@ -370,6 +376,7 @@ Camellia_DecryptBlock_Rounds: .type _x86_64_Camellia_decrypt,@function .align 16 _x86_64_Camellia_decrypt: +.cfi_startproc xorl 0(%r14),%r9d xorl 4(%r14),%r8d xorl 8(%r14),%r11d @@ -573,6 +580,7 @@ _x86_64_Camellia_decrypt: movl %ebx,%r11d .byte 0xf3,0xc3 +.cfi_endproc .size _x86_64_Camellia_decrypt,.-_x86_64_Camellia_decrypt .globl Camellia_Ekeygen .type Camellia_Ekeygen,@function diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/ec/ecp_nistz256-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/ec/ecp_nistz256-x86_64.s index 62b9ac6616..5c9e405041 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/ec/ecp_nistz256-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/ec/ecp_nistz256-x86_64.s @@ -3874,10 +3874,12 @@ ecp_nistz256_ord_sqr_montx: .type ecp_nistz256_to_mont,@function .align 32 ecp_nistz256_to_mont: +.cfi_startproc movl $0x80100,%ecx andl OPENSSL_ia32cap_P+8(%rip),%ecx leaq .LRR(%rip),%rdx jmp .Lmul_mont +.cfi_endproc .size ecp_nistz256_to_mont,.-ecp_nistz256_to_mont @@ -4821,6 +4823,7 @@ ecp_nistz256_from_mont: .type ecp_nistz256_scatter_w5,@function .align 32 ecp_nistz256_scatter_w5: +.cfi_startproc leal -3(%rdx,%rdx,2),%edx movdqa 0(%rsi),%xmm0 shll $5,%edx @@ -4837,6 +4840,7 @@ ecp_nistz256_scatter_w5: movdqa %xmm5,80(%rdi,%rdx,1) .byte 0xf3,0xc3 +.cfi_endproc .size ecp_nistz256_scatter_w5,.-ecp_nistz256_scatter_w5 @@ -4910,6 +4914,7 @@ ecp_nistz256_gather_w5: .type ecp_nistz256_scatter_w7,@function .align 32 ecp_nistz256_scatter_w7: +.cfi_startproc movdqu 0(%rsi),%xmm0 shll $6,%edx movdqu 16(%rsi),%xmm1 @@ -4921,6 +4926,7 @@ ecp_nistz256_scatter_w7: movdqa %xmm3,48(%rdi,%rdx,1) .byte 0xf3,0xc3 +.cfi_endproc .size ecp_nistz256_scatter_w7,.-ecp_nistz256_scatter_w7 @@ -5655,26 +5661,16 @@ ecp_nistz256_point_add: orq %r8,%r12 orq %r9,%r12 -.byte 0x3e - jnz .Ladd_proceedq .byte 102,73,15,126,208 .byte 102,73,15,126,217 - testq %r8,%r8 - jnz .Ladd_proceedq - testq %r9,%r9 - jz .Ladd_doubleq -.byte 102,72,15,126,199 - pxor %xmm0,%xmm0 - movdqu %xmm0,0(%rdi) - movdqu %xmm0,16(%rdi) - movdqu %xmm0,32(%rdi) - movdqu %xmm0,48(%rdi) - movdqu %xmm0,64(%rdi) - movdqu %xmm0,80(%rdi) - jmp .Ladd_doneq + orq %r8,%r12 + orq %r9,%r12 + + +.byte 0x3e + jnz .Ladd_proceedq -.align 32 .Ladd_doubleq: .byte 102,72,15,126,206 .byte 102,72,15,126,199 @@ -6774,26 +6770,16 @@ ecp_nistz256_point_addx: orq %r8,%r12 orq %r9,%r12 -.byte 0x3e - jnz .Ladd_proceedx .byte 102,73,15,126,208 .byte 102,73,15,126,217 - testq %r8,%r8 - jnz .Ladd_proceedx - testq %r9,%r9 - jz .Ladd_doublex -.byte 102,72,15,126,199 - pxor %xmm0,%xmm0 - movdqu %xmm0,0(%rdi) - movdqu %xmm0,16(%rdi) - movdqu %xmm0,32(%rdi) - movdqu %xmm0,48(%rdi) - movdqu %xmm0,64(%rdi) - movdqu %xmm0,80(%rdi) - jmp .Ladd_donex + orq %r8,%r12 + orq %r9,%r12 + + +.byte 0x3e + jnz .Ladd_proceedx -.align 32 .Ladd_doublex: .byte 102,72,15,126,206 .byte 102,72,15,126,199 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/ec/x25519-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/ec/x25519-x86_64.s index 2a18eaee28..1788e568cd 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/ec/x25519-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/ec/x25519-x86_64.s @@ -400,12 +400,14 @@ x25519_fe51_mul121666: .type x25519_fe64_eligible,@function .align 32 x25519_fe64_eligible: +.cfi_startproc movl OPENSSL_ia32cap_P+8(%rip),%ecx xorl %eax,%eax andl $0x80100,%ecx cmpl $0x80100,%ecx cmovel %ecx,%eax .byte 0xf3,0xc3 +.cfi_endproc .size x25519_fe64_eligible,.-x25519_fe64_eligible .globl x25519_fe64_mul @@ -648,6 +650,7 @@ x25519_fe64_sqr: .align 32 x25519_fe64_mul121666: .Lfe64_mul121666_body: +.cfi_startproc movl $121666,%edx mulxq 0(%rsi),%r8,%rcx mulxq 8(%rsi),%r9,%rax @@ -676,6 +679,7 @@ x25519_fe64_mul121666: .Lfe64_mul121666_epilogue: .byte 0xf3,0xc3 +.cfi_endproc .size x25519_fe64_mul121666,.-x25519_fe64_mul121666 .globl x25519_fe64_add @@ -683,6 +687,7 @@ x25519_fe64_mul121666: .align 32 x25519_fe64_add: .Lfe64_add_body: +.cfi_startproc movq 0(%rsi),%r8 movq 8(%rsi),%r9 movq 16(%rsi),%r10 @@ -711,6 +716,7 @@ x25519_fe64_add: .Lfe64_add_epilogue: .byte 0xf3,0xc3 +.cfi_endproc .size x25519_fe64_add,.-x25519_fe64_add .globl x25519_fe64_sub @@ -718,6 +724,7 @@ x25519_fe64_add: .align 32 x25519_fe64_sub: .Lfe64_sub_body: +.cfi_startproc movq 0(%rsi),%r8 movq 8(%rsi),%r9 movq 16(%rsi),%r10 @@ -746,6 +753,7 @@ x25519_fe64_sub: .Lfe64_sub_epilogue: .byte 0xf3,0xc3 +.cfi_endproc .size x25519_fe64_sub,.-x25519_fe64_sub .globl x25519_fe64_tobytes @@ -753,6 +761,7 @@ x25519_fe64_sub: .align 32 x25519_fe64_tobytes: .Lfe64_to_body: +.cfi_startproc movq 0(%rsi),%r8 movq 8(%rsi),%r9 movq 16(%rsi),%r10 @@ -788,5 +797,6 @@ x25519_fe64_tobytes: .Lfe64_to_epilogue: .byte 0xf3,0xc3 +.cfi_endproc .size x25519_fe64_tobytes,.-x25519_fe64_tobytes .byte 88,50,53,53,49,57,32,112,114,105,109,105,116,105,118,101,115,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/include/internal/bn_conf.h b/deps/openssl/config/archs/linux-x86_64/asm/crypto/include/internal/bn_conf.h index 34bd8b78b4..5312ef5a7a 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/include/internal/bn_conf.h +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/include/internal/bn_conf.h @@ -1,5 +1,5 @@ /* WARNING: do not edit! */ -/* Generated by Makefile from crypto/include/internal/bn_conf.h.in */ +/* Generated by Makefile from include/crypto/bn_conf.h.in */ /* * Copyright 2016 The OpenSSL Project Authors. All Rights Reserved. * @@ -9,8 +9,8 @@ * https://www.openssl.org/source/license.html */ -#ifndef HEADER_BN_CONF_H -# define HEADER_BN_CONF_H +#ifndef OSSL_CRYPTO_BN_CONF_H +# define OSSL_CRYPTO_BN_CONF_H /* * The contents of this file are not used in the UEFI build, as diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/include/internal/dso_conf.h b/deps/openssl/config/archs/linux-x86_64/asm/crypto/include/internal/dso_conf.h index 252266ec2a..4b1167c3d8 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/include/internal/dso_conf.h +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/include/internal/dso_conf.h @@ -1,5 +1,5 @@ /* WARNING: do not edit! */ -/* Generated by Makefile from crypto/include/internal/dso_conf.h.in */ +/* Generated by Makefile from include/crypto/dso_conf.h.in */ /* * Copyright 2016-2019 The OpenSSL Project Authors. All Rights Reserved. * @@ -9,8 +9,8 @@ * https://www.openssl.org/source/license.html */ -#ifndef HEADER_DSO_CONF_H -# define HEADER_DSO_CONF_H +#ifndef OSSL_CRYPTO_DSO_CONF_H +# define OSSL_CRYPTO_DSO_CONF_H # define DSO_DLFCN # define HAVE_DLFCN_H # define DSO_EXTENSION ".so" diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/modes/aesni-gcm-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/modes/aesni-gcm-x86_64.s index 6a7a9577c7..01d89630a4 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/modes/aesni-gcm-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/modes/aesni-gcm-x86_64.s @@ -3,6 +3,7 @@ .type _aesni_ctr32_ghash_6x,@function .align 32 _aesni_ctr32_ghash_6x: +.cfi_startproc vmovdqu 32(%r11),%xmm2 subq $6,%rdx vpxor %xmm4,%xmm4,%xmm4 @@ -310,6 +311,7 @@ _aesni_ctr32_ghash_6x: vpxor %xmm4,%xmm8,%xmm8 .byte 0xf3,0xc3 +.cfi_endproc .size _aesni_ctr32_ghash_6x,.-_aesni_ctr32_ghash_6x .globl aesni_gcm_decrypt .type aesni_gcm_decrypt,@function @@ -416,6 +418,7 @@ aesni_gcm_decrypt: .type _aesni_ctr32_6x,@function .align 32 _aesni_ctr32_6x: +.cfi_startproc vmovdqu 0-128(%rcx),%xmm4 vmovdqu 32(%r11),%xmm2 leaq -1(%rbp),%r13 @@ -502,6 +505,7 @@ _aesni_ctr32_6x: vpshufb %xmm0,%xmm1,%xmm1 vpxor %xmm4,%xmm14,%xmm14 jmp .Loop_ctr32 +.cfi_endproc .size _aesni_ctr32_6x,.-_aesni_ctr32_6x .globl aesni_gcm_encrypt diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/poly1305/poly1305-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/poly1305/poly1305-x86_64.s index deb4f74bfb..987a65aab3 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/poly1305/poly1305-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/poly1305/poly1305-x86_64.s @@ -12,6 +12,7 @@ .type poly1305_init,@function .align 32 poly1305_init: +.cfi_startproc xorq %rax,%rax movq %rax,0(%rdi) movq %rax,8(%rdi) @@ -47,6 +48,7 @@ poly1305_init: movl $1,%eax .Lno_key: .byte 0xf3,0xc3 +.cfi_endproc .size poly1305_init,.-poly1305_init .type poly1305_blocks,@function @@ -167,6 +169,7 @@ poly1305_blocks: .type poly1305_emit,@function .align 32 poly1305_emit: +.cfi_startproc .Lemit: movq 0(%rdi),%r8 movq 8(%rdi),%r9 @@ -187,10 +190,12 @@ poly1305_emit: movq %rcx,8(%rsi) .byte 0xf3,0xc3 +.cfi_endproc .size poly1305_emit,.-poly1305_emit .type __poly1305_block,@function .align 32 __poly1305_block: +.cfi_startproc mulq %r14 movq %rax,%r9 movq %r11,%rax @@ -230,11 +235,13 @@ __poly1305_block: adcq $0,%rbx adcq $0,%rbp .byte 0xf3,0xc3 +.cfi_endproc .size __poly1305_block,.-__poly1305_block .type __poly1305_init_avx,@function .align 32 __poly1305_init_avx: +.cfi_startproc movq %r11,%r14 movq %r12,%rbx xorq %rbp,%rbp @@ -392,6 +399,7 @@ __poly1305_init_avx: leaq -48-64(%rdi),%rdi .byte 0xf3,0xc3 +.cfi_endproc .size __poly1305_init_avx,.-__poly1305_init_avx .type poly1305_blocks_avx,@function @@ -1232,6 +1240,7 @@ poly1305_blocks_avx: .type poly1305_emit_avx,@function .align 32 poly1305_emit_avx: +.cfi_startproc cmpl $0,20(%rdi) je .Lemit @@ -1282,6 +1291,7 @@ poly1305_emit_avx: movq %rcx,8(%rsi) .byte 0xf3,0xc3 +.cfi_endproc .size poly1305_emit_avx,.-poly1305_emit_avx .type poly1305_blocks_avx2,@function .align 32 @@ -2478,6 +2488,7 @@ poly1305_blocks_avx512: .type poly1305_init_base2_44,@function .align 32 poly1305_init_base2_44: +.cfi_startproc xorq %rax,%rax movq %rax,0(%rdi) movq %rax,8(%rdi) @@ -2511,10 +2522,12 @@ poly1305_init_base2_44: movq %r11,8(%rdx) movl $1,%eax .byte 0xf3,0xc3 +.cfi_endproc .size poly1305_init_base2_44,.-poly1305_init_base2_44 .type poly1305_blocks_vpmadd52,@function .align 32 poly1305_blocks_vpmadd52: +.cfi_startproc shrq $4,%rdx jz .Lno_data_vpmadd52 @@ -2621,10 +2634,12 @@ poly1305_blocks_vpmadd52: .Lno_data_vpmadd52: .byte 0xf3,0xc3 +.cfi_endproc .size poly1305_blocks_vpmadd52,.-poly1305_blocks_vpmadd52 .type poly1305_blocks_vpmadd52_4x,@function .align 32 poly1305_blocks_vpmadd52_4x: +.cfi_startproc shrq $4,%rdx jz .Lno_data_vpmadd52_4x @@ -3049,10 +3064,12 @@ poly1305_blocks_vpmadd52_4x: .Lno_data_vpmadd52_4x: .byte 0xf3,0xc3 +.cfi_endproc .size poly1305_blocks_vpmadd52_4x,.-poly1305_blocks_vpmadd52_4x .type poly1305_blocks_vpmadd52_8x,@function .align 32 poly1305_blocks_vpmadd52_8x: +.cfi_startproc shrq $4,%rdx jz .Lno_data_vpmadd52_8x @@ -3393,10 +3410,12 @@ poly1305_blocks_vpmadd52_8x: .Lno_data_vpmadd52_8x: .byte 0xf3,0xc3 +.cfi_endproc .size poly1305_blocks_vpmadd52_8x,.-poly1305_blocks_vpmadd52_8x .type poly1305_emit_base2_44,@function .align 32 poly1305_emit_base2_44: +.cfi_startproc movq 0(%rdi),%r8 movq 8(%rdi),%r9 movq 16(%rdi),%r10 @@ -3427,6 +3446,7 @@ poly1305_emit_base2_44: movq %rcx,8(%rsi) .byte 0xf3,0xc3 +.cfi_endproc .size poly1305_emit_base2_44,.-poly1305_emit_base2_44 .align 64 .Lconst: @@ -3465,6 +3485,7 @@ poly1305_emit_base2_44: .type xor128_encrypt_n_pad,@function .align 16 xor128_encrypt_n_pad: +.cfi_startproc subq %rdx,%rsi subq %rdx,%rdi movq %rcx,%r10 @@ -3506,12 +3527,14 @@ xor128_encrypt_n_pad: .Ldone_enc: movq %rdx,%rax .byte 0xf3,0xc3 +.cfi_endproc .size xor128_encrypt_n_pad,.-xor128_encrypt_n_pad .globl xor128_decrypt_n_pad .type xor128_decrypt_n_pad,@function .align 16 xor128_decrypt_n_pad: +.cfi_startproc subq %rdx,%rsi subq %rdx,%rdi movq %rcx,%r10 @@ -3557,4 +3580,5 @@ xor128_decrypt_n_pad: .Ldone_dec: movq %rdx,%rax .byte 0xf3,0xc3 +.cfi_endproc .size xor128_decrypt_n_pad,.-xor128_decrypt_n_pad diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/rc4/rc4-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/rc4/rc4-x86_64.s index fba70351d4..b97c757550 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/rc4/rc4-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/rc4/rc4-x86_64.s @@ -4,11 +4,12 @@ .globl RC4 .type RC4,@function .align 16 -RC4: orq %rsi,%rsi +RC4: +.cfi_startproc + orq %rsi,%rsi jne .Lentry .byte 0xf3,0xc3 .Lentry: -.cfi_startproc pushq %rbx .cfi_adjust_cfa_offset 8 .cfi_offset %rbx,-16 @@ -533,6 +534,7 @@ RC4: orq %rsi,%rsi .type RC4_set_key,@function .align 16 RC4_set_key: +.cfi_startproc leaq 8(%rdi),%rdi leaq (%rdx,%rsi,1),%rdx negq %rsi @@ -599,12 +601,14 @@ RC4_set_key: movl %eax,-8(%rdi) movl %eax,-4(%rdi) .byte 0xf3,0xc3 +.cfi_endproc .size RC4_set_key,.-RC4_set_key .globl RC4_options .type RC4_options,@function .align 16 RC4_options: +.cfi_startproc leaq .Lopts(%rip),%rax movl OPENSSL_ia32cap_P(%rip),%edx btl $20,%edx @@ -617,6 +621,7 @@ RC4_options: addq $12,%rax .Ldone: .byte 0xf3,0xc3 +.cfi_endproc .align 64 .Lopts: .byte 114,99,52,40,56,120,44,105,110,116,41,0 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/keccak1600-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/keccak1600-x86_64.s index e511f25035..09617d014b 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/keccak1600-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/keccak1600-x86_64.s @@ -3,6 +3,7 @@ .type __KeccakF1600,@function .align 32 __KeccakF1600: +.cfi_startproc movq 60(%rdi),%rax movq 68(%rdi),%rbx movq 76(%rdi),%rcx @@ -255,6 +256,7 @@ __KeccakF1600: leaq -192(%r15),%r15 .byte 0xf3,0xc3 +.cfi_endproc .size __KeccakF1600,.-__KeccakF1600 .type KeccakF1600,@function diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha1-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha1-x86_64.s index e436521a04..98541727e5 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha1-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha1-x86_64.s @@ -1421,8 +1421,8 @@ _shaext_shortcut: pshufd $27,%xmm1,%xmm1 movdqu %xmm0,(%rdi) movd %xmm1,16(%rdi) -.cfi_endproc .byte 0xf3,0xc3 +.cfi_endproc .size sha1_block_data_order_shaext,.-sha1_block_data_order_shaext .type sha1_block_data_order_ssse3,@function .align 16 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha256-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha256-x86_64.s index 42b24df18e..9357385da3 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha256-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha256-x86_64.s @@ -1775,6 +1775,7 @@ K256: .align 64 sha256_block_data_order_shaext: _shaext_shortcut: +.cfi_startproc leaq K256+128(%rip),%rcx movdqu (%rdi),%xmm1 movdqu 16(%rdi),%xmm2 @@ -1977,6 +1978,7 @@ _shaext_shortcut: movdqu %xmm1,(%rdi) movdqu %xmm2,16(%rdi) .byte 0xf3,0xc3 +.cfi_endproc .size sha256_block_data_order_shaext,.-sha256_block_data_order_shaext .type sha256_block_data_order_ssse3,@function .align 64 @@ -4238,7 +4240,15 @@ sha256_block_data_order_avx2: vmovdqa %ymm4,0(%rsp) xorl %r14d,%r14d vmovdqa %ymm5,32(%rsp) + + movq 88(%rsp),%rdi +.cfi_def_cfa %rdi,8 leaq -64(%rsp),%rsp + + + + movq %rdi,-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 movl %ebx,%edi vmovdqa %ymm6,0(%rsp) xorl %ecx,%edi @@ -4250,6 +4260,12 @@ sha256_block_data_order_avx2: .align 16 .Lavx2_00_47: leaq -64(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x38,0x06,0x23,0x08 + + pushq 64-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08 + leaq 8(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpalignr $4,%ymm0,%ymm1,%ymm4 addl 0+128(%rsp),%r11d andl %r8d,%r12d @@ -4505,6 +4521,12 @@ sha256_block_data_order_avx2: movl %r9d,%r12d vmovdqa %ymm6,32(%rsp) leaq -64(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x38,0x06,0x23,0x08 + + pushq 64-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08 + leaq 8(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpalignr $4,%ymm2,%ymm3,%ymm4 addl 0+128(%rsp),%r11d andl %r8d,%r12d @@ -5380,6 +5402,8 @@ sha256_block_data_order_avx2: leaq 448(%rsp),%rsp +.cfi_escape 0x0f,0x06,0x77,0xd8,0x00,0x06,0x23,0x08 + addl 0(%rdi),%eax addl 4(%rdi),%ebx addl 8(%rdi),%ecx @@ -5405,9 +5429,11 @@ sha256_block_data_order_avx2: jbe .Loop_avx2 leaq (%rsp),%rbp + +.cfi_escape 0x0f,0x06,0x76,0xd8,0x00,0x06,0x23,0x08 + .Ldone_avx2: - leaq (%rbp),%rsp - movq 88(%rsp),%rsi + movq 88(%rbp),%rsi .cfi_def_cfa %rsi,8 vzeroupper movq -48(%rsi),%r15 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha512-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha512-x86_64.s index 5931a2a932..939f1ca71c 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha512-x86_64.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/sha/sha512-x86_64.s @@ -4165,7 +4165,15 @@ sha512_block_data_order_avx2: vmovdqa %ymm10,64(%rsp) vpaddq 64(%rbp),%ymm6,%ymm10 vmovdqa %ymm11,96(%rsp) + + movq 152(%rsp),%rdi +.cfi_def_cfa %rdi,8 leaq -128(%rsp),%rsp + + + + movq %rdi,-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpaddq 96(%rbp),%ymm7,%ymm11 vmovdqa %ymm8,0(%rsp) xorq %r14,%r14 @@ -4181,6 +4189,12 @@ sha512_block_data_order_avx2: .align 16 .Lavx2_00_47: leaq -128(%rsp),%rsp +.cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08 + + pushq 128-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08 + leaq 8(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpalignr $8,%ymm0,%ymm1,%ymm8 addq 0+256(%rsp),%r11 andq %r8,%r12 @@ -4474,6 +4488,12 @@ sha512_block_data_order_avx2: movq %r9,%r12 vmovdqa %ymm10,96(%rsp) leaq -128(%rsp),%rsp +.cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08 + + pushq 128-8(%rsp) +.cfi_escape 0x0f,0x05,0x77,0x00,0x06,0x23,0x08 + leaq 8(%rsp),%rsp +.cfi_escape 0x0f,0x05,0x77,0x78,0x06,0x23,0x08 vpalignr $8,%ymm4,%ymm5,%ymm8 addq 0+256(%rsp),%r11 andq %r8,%r12 @@ -5387,6 +5407,8 @@ sha512_block_data_order_avx2: leaq 1152(%rsp),%rsp +.cfi_escape 0x0f,0x06,0x77,0x98,0x01,0x06,0x23,0x08 + addq 0(%rdi),%rax addq 8(%rdi),%rbx addq 16(%rdi),%rcx @@ -5412,9 +5434,11 @@ sha512_block_data_order_avx2: jbe .Loop_avx2 leaq (%rsp),%rbp + +.cfi_escape 0x0f,0x06,0x76,0x98,0x01,0x06,0x23,0x08 + .Ldone_avx2: - leaq (%rbp),%rsp - movq 152(%rsp),%rsi + movq 152(%rbp),%rsi .cfi_def_cfa %rsi,8 vzeroupper movq -48(%rsi),%r15 diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/x86_64cpuid.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/x86_64cpuid.s index fd17eaaba4..9268ce8c9a 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/x86_64cpuid.s +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/x86_64cpuid.s @@ -12,6 +12,7 @@ .type OPENSSL_atomic_add,@function .align 16 OPENSSL_atomic_add: +.cfi_startproc movl (%rdi),%eax .Lspin: leaq (%rsi,%rax,1),%r8 .byte 0xf0 @@ -20,16 +21,19 @@ OPENSSL_atomic_add: movl %r8d,%eax .byte 0x48,0x98 .byte 0xf3,0xc3 +.cfi_endproc .size OPENSSL_atomic_add,.-OPENSSL_atomic_add .globl OPENSSL_rdtsc .type OPENSSL_rdtsc,@function .align 16 OPENSSL_rdtsc: +.cfi_startproc rdtsc shlq $32,%rdx orq %rdx,%rax .byte 0xf3,0xc3 +.cfi_endproc .size OPENSSL_rdtsc,.-OPENSSL_rdtsc .globl OPENSSL_ia32_cpuid @@ -205,6 +209,7 @@ OPENSSL_ia32_cpuid: .type OPENSSL_cleanse,@function .align 16 OPENSSL_cleanse: +.cfi_startproc xorq %rax,%rax cmpq $15,%rsi jae .Lot @@ -234,12 +239,14 @@ OPENSSL_cleanse: cmpq $0,%rsi jne .Little .byte 0xf3,0xc3 +.cfi_endproc .size OPENSSL_cleanse,.-OPENSSL_cleanse .globl CRYPTO_memcmp .type CRYPTO_memcmp,@function .align 16 CRYPTO_memcmp: +.cfi_startproc xorq %rax,%rax xorq %r10,%r10 cmpq $0,%rdx @@ -268,11 +275,13 @@ CRYPTO_memcmp: shrq $63,%rax .Lno_data: .byte 0xf3,0xc3 +.cfi_endproc .size CRYPTO_memcmp,.-CRYPTO_memcmp .globl OPENSSL_wipe_cpu .type OPENSSL_wipe_cpu,@function .align 16 OPENSSL_wipe_cpu: +.cfi_startproc pxor %xmm0,%xmm0 pxor %xmm1,%xmm1 pxor %xmm2,%xmm2 @@ -299,11 +308,13 @@ OPENSSL_wipe_cpu: xorq %r11,%r11 leaq 8(%rsp),%rax .byte 0xf3,0xc3 +.cfi_endproc .size OPENSSL_wipe_cpu,.-OPENSSL_wipe_cpu .globl OPENSSL_instrument_bus .type OPENSSL_instrument_bus,@function .align 16 OPENSSL_instrument_bus: +.cfi_startproc movq %rdi,%r10 movq %rsi,%rcx movq %rsi,%r11 @@ -330,12 +341,14 @@ OPENSSL_instrument_bus: movq %r11,%rax .byte 0xf3,0xc3 +.cfi_endproc .size OPENSSL_instrument_bus,.-OPENSSL_instrument_bus .globl OPENSSL_instrument_bus2 .type OPENSSL_instrument_bus2,@function .align 16 OPENSSL_instrument_bus2: +.cfi_startproc movq %rdi,%r10 movq %rsi,%rcx movq %rdx,%r11 @@ -378,11 +391,13 @@ OPENSSL_instrument_bus2: movq 8(%rsp),%rax subq %rcx,%rax .byte 0xf3,0xc3 +.cfi_endproc .size OPENSSL_instrument_bus2,.-OPENSSL_instrument_bus2 .globl OPENSSL_ia32_rdrand_bytes .type OPENSSL_ia32_rdrand_bytes,@function .align 16 OPENSSL_ia32_rdrand_bytes: +.cfi_startproc xorq %rax,%rax cmpq $0,%rsi je .Ldone_rdrand_bytes @@ -419,11 +434,13 @@ OPENSSL_ia32_rdrand_bytes: .Ldone_rdrand_bytes: xorq %r10,%r10 .byte 0xf3,0xc3 +.cfi_endproc .size OPENSSL_ia32_rdrand_bytes,.-OPENSSL_ia32_rdrand_bytes .globl OPENSSL_ia32_rdseed_bytes .type OPENSSL_ia32_rdseed_bytes,@function .align 16 OPENSSL_ia32_rdseed_bytes: +.cfi_startproc xorq %rax,%rax cmpq $0,%rsi je .Ldone_rdseed_bytes @@ -460,4 +477,5 @@ OPENSSL_ia32_rdseed_bytes: .Ldone_rdseed_bytes: xorq %r10,%r10 .byte 0xf3,0xc3 +.cfi_endproc .size OPENSSL_ia32_rdseed_bytes,.-OPENSSL_ia32_rdseed_bytes |